mirror of https://github.com/sgoudham/Enso-Bot.git
Cool
commit
9fa7626672
@ -0,0 +1,3 @@
|
|||||||
|
# Default ignored files
|
||||||
|
/shelf/
|
||||||
|
/workspace.xml
|
@ -0,0 +1,10 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<module type="PYTHON_MODULE" version="4">
|
||||||
|
<component name="NewModuleRootManager">
|
||||||
|
<content url="file://$MODULE_DIR$">
|
||||||
|
<excludeFolder url="file://$MODULE_DIR$/venv" />
|
||||||
|
</content>
|
||||||
|
<orderEntry type="inheritedJdk" />
|
||||||
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
|
</component>
|
||||||
|
</module>
|
@ -0,0 +1,6 @@
|
|||||||
|
<component name="InspectionProjectProfileManager">
|
||||||
|
<settings>
|
||||||
|
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||||
|
<version value="1.0" />
|
||||||
|
</settings>
|
||||||
|
</component>
|
@ -0,0 +1,4 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.6 (EnsoBot)" project-jdk-type="Python SDK" />
|
||||||
|
</project>
|
@ -0,0 +1,8 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="ProjectModuleManager">
|
||||||
|
<modules>
|
||||||
|
<module fileurl="file://$PROJECT_DIR$/.idea/EnsoBot.iml" filepath="$PROJECT_DIR$/.idea/EnsoBot.iml" />
|
||||||
|
</modules>
|
||||||
|
</component>
|
||||||
|
</project>
|
@ -0,0 +1,6 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="VcsDirectoryMappings">
|
||||||
|
<mapping directory="$PROJECT_DIR$" vcs="Git" />
|
||||||
|
</component>
|
||||||
|
</project>
|
@ -0,0 +1,52 @@
|
|||||||
|
import discord
|
||||||
|
from discord.ext import commands
|
||||||
|
|
||||||
|
client = commands.Bot(command_prefix = ';')
|
||||||
|
|
||||||
|
@client.event
|
||||||
|
async def on_ready():
|
||||||
|
print('Bot is ready.')
|
||||||
|
await client.change_presence(activity=discord.Game(name='With Tiddies'))
|
||||||
|
|
||||||
|
@client.event
|
||||||
|
async def on_member_join(member):
|
||||||
|
print (f'{member} has joined the server')
|
||||||
|
|
||||||
|
@client.event
|
||||||
|
async def on_member_removed(member):
|
||||||
|
print (f'{member} has has left the server')
|
||||||
|
|
||||||
|
@client.command(aliases = ["ping"])
|
||||||
|
@commands.has_any_role('Hamothy')
|
||||||
|
async def Ping(ctx):
|
||||||
|
await ctx.send(f'Pong! {round(client.latency * 1000)}ms')
|
||||||
|
|
||||||
|
#@client.command(aliases = ['8ball'])
|
||||||
|
#async def _8ball(ctx, *, question):
|
||||||
|
# Responses = ["Hamothy believes it is certain",
|
||||||
|
# "Kate decides it will come true",
|
||||||
|
# "Josh doesn't believe.",
|
||||||
|
# "Izzy can't predict this",
|
||||||
|
# "Idk idiot lmao",
|
||||||
|
# "Why are you even askin me",
|
||||||
|
# "its not like i can read ur question"]
|
||||||
|
# await ctx.send(f'Question: {question}\nAnswer: {random.choice(Responses)}')
|
||||||
|
|
||||||
|
@client.command()
|
||||||
|
@commands.has_any_role('Hamothy')
|
||||||
|
async def roles(ctx):
|
||||||
|
embed = discord.Embed(title="```So you wanna know how the leveled roles system works huh?```", colour=discord.Colour(0x30e419), description="------------------------------------------------")
|
||||||
|
|
||||||
|
embed.set_image(url="https://media.discordapp.net/attachments/669812887564320769/717149671771996180/unknown.png")
|
||||||
|
embed.set_thumbnail(url="https://cdn.discordapp.com/attachments/683490529862090814/715010931620446269/image1.jpg")
|
||||||
|
embed.set_author(name="Hamothy", icon_url="https://cdn.discordapp.com/attachments/689525645734182916/717137453651066900/Rias_Gremory.png")
|
||||||
|
embed.set_footer(text="-------------------------------------------------------------------------------------------------------")
|
||||||
|
|
||||||
|
embed.add_field(name = "Cooldown", value="**•XP is gained every time you talk with a 2 minute cooldown.**", inline=True),
|
||||||
|
embed.add_field(name = "Message Length",value = "**•XP is not determined by the size of the message. You will not get more XP just because the message is bigger.**", inline = True),
|
||||||
|
embed.add_field(name = "Roles",value="**•As seen below, those are the colours and roles that will be achieved upon gaining that amount of experience**", inline = True)
|
||||||
|
|
||||||
|
await ctx.send(embed=embed)
|
||||||
|
|
||||||
|
|
||||||
|
client.run('NzE2NzAxNjk5MTQ1NzI4MDk0.XtWFiw.KZrh9Tkp9vTY9JYSgZfpg2P4mlQ')
|
@ -0,0 +1,5 @@
|
|||||||
|
git+https://github.com/Rapptz/discord.py@rewrite
|
||||||
|
dnspython==1.16.0
|
||||||
|
PyNaCl==1.3.0
|
||||||
|
certifi
|
||||||
|
async-timeout==3.0.1
|
@ -0,0 +1 @@
|
|||||||
|
pip
|
@ -0,0 +1,174 @@
|
|||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
@ -0,0 +1,196 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: PyNaCl
|
||||||
|
Version: 1.3.0
|
||||||
|
Summary: Python binding to the Networking and Cryptography (NaCl) library
|
||||||
|
Home-page: https://github.com/pyca/pynacl/
|
||||||
|
Author: The PyNaCl developers
|
||||||
|
Author-email: cryptography-dev@python.org
|
||||||
|
License: Apache License 2.0
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||||
|
Classifier: Programming Language :: Python :: 2
|
||||||
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.4
|
||||||
|
Classifier: Programming Language :: Python :: 3.5
|
||||||
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Provides-Extra: tests
|
||||||
|
Provides-Extra: docs
|
||||||
|
Requires-Dist: six
|
||||||
|
Requires-Dist: cffi (>=1.4.1)
|
||||||
|
Provides-Extra: docs
|
||||||
|
Requires-Dist: sphinx (>=1.6.5); extra == 'docs'
|
||||||
|
Requires-Dist: sphinx-rtd-theme; extra == 'docs'
|
||||||
|
Provides-Extra: tests
|
||||||
|
Requires-Dist: pytest (!=3.3.0,>=3.2.1); extra == 'tests'
|
||||||
|
Requires-Dist: hypothesis (>=3.27.0); extra == 'tests'
|
||||||
|
|
||||||
|
===============================================
|
||||||
|
PyNaCl: Python binding to the libsodium library
|
||||||
|
===============================================
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/pypi/v/pynacl.svg
|
||||||
|
:target: https://pypi.org/project/PyNaCl/
|
||||||
|
:alt: Latest Version
|
||||||
|
|
||||||
|
.. image:: https://travis-ci.org/pyca/pynacl.svg?branch=master
|
||||||
|
:target: https://travis-ci.org/pyca/pynacl
|
||||||
|
|
||||||
|
.. image:: https://codecov.io/github/pyca/pynacl/coverage.svg?branch=master
|
||||||
|
:target: https://codecov.io/github/pyca/pynacl?branch=master
|
||||||
|
|
||||||
|
PyNaCl is a Python binding to `libsodium`_, which is a fork of the
|
||||||
|
`Networking and Cryptography library`_. These libraries have a stated goal of
|
||||||
|
improving usability, security and speed. It supports Python 2.7 and 3.4+ as
|
||||||
|
well as PyPy 2.6+.
|
||||||
|
|
||||||
|
.. _libsodium: https://github.com/jedisct1/libsodium
|
||||||
|
.. _Networking and Cryptography library: https://nacl.cr.yp.to/
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
* Digital signatures
|
||||||
|
* Secret-key encryption
|
||||||
|
* Public-key encryption
|
||||||
|
* Hashing and message authentication
|
||||||
|
* Password based key derivation and password hashing
|
||||||
|
|
||||||
|
Installation
|
||||||
|
============
|
||||||
|
|
||||||
|
Binary wheel install
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
PyNaCl ships as a binary wheel on OS X, Windows and Linux ``manylinux1`` [#many]_ ,
|
||||||
|
so all dependencies are included. Make sure you have an up-to-date pip
|
||||||
|
and run:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ pip install pynacl
|
||||||
|
|
||||||
|
Linux source build
|
||||||
|
------------------
|
||||||
|
|
||||||
|
PyNaCl relies on `libsodium`_, a portable C library. A copy is bundled
|
||||||
|
with PyNaCl so to install you can run:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ pip install pynacl
|
||||||
|
|
||||||
|
If you'd prefer to use the version of ``libsodium`` provided by your
|
||||||
|
distribution, you can disable the bundled copy during install by running:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ SODIUM_INSTALL=system pip install pynacl
|
||||||
|
|
||||||
|
.. warning:: Usage of the legacy ``easy_install`` command provided by setuptools
|
||||||
|
is generally discouraged, and is completely unsupported in PyNaCl's case.
|
||||||
|
|
||||||
|
.. _libsodium: https://github.com/jedisct1/libsodium
|
||||||
|
|
||||||
|
.. [#many] `manylinux1 wheels <https://www.python.org/dev/peps/pep-0513/>`_
|
||||||
|
are built on a baseline linux environment based on Centos 5.11
|
||||||
|
and should work on most x86 and x86_64 glibc based linux environments.
|
||||||
|
|
||||||
|
Changelog
|
||||||
|
=========
|
||||||
|
|
||||||
|
1.3.0 2018-09-26
|
||||||
|
----------------
|
||||||
|
|
||||||
|
* Added support for Python 3.7.
|
||||||
|
* Update ``libsodium`` to 1.0.16.
|
||||||
|
* Run and test all code examples in PyNaCl docs through sphinx's
|
||||||
|
doctest builder.
|
||||||
|
* Add low-level bindings for chacha20-poly1305 AEAD constructions.
|
||||||
|
* Add low-level bindings for the chacha20-poly1305 secretstream constructions.
|
||||||
|
* Add low-level bindings for ed25519ph pre-hashed signing construction.
|
||||||
|
* Add low-level bindings for constant-time increment and addition
|
||||||
|
on fixed-precision big integers represented as little-endian
|
||||||
|
byte sequences.
|
||||||
|
* Add low-level bindings for the ISO/IEC 7816-4 compatible padding API.
|
||||||
|
* Add low-level bindings for libsodium's crypto_kx... key exchange
|
||||||
|
construction.
|
||||||
|
* Set hypothesis deadline to None in tests/test_pwhash.py to avoid
|
||||||
|
incorrect test failures on slower processor architectures. GitHub
|
||||||
|
issue #370
|
||||||
|
|
||||||
|
1.2.1 - 2017-12-04
|
||||||
|
------------------
|
||||||
|
|
||||||
|
* Update hypothesis minimum allowed version.
|
||||||
|
* Infrastructure: add proper configuration for readthedocs builder
|
||||||
|
runtime environment.
|
||||||
|
|
||||||
|
1.2.0 - 2017-11-01
|
||||||
|
------------------
|
||||||
|
|
||||||
|
* Update ``libsodium`` to 1.0.15.
|
||||||
|
* Infrastructure: add jenkins support for automatic build of
|
||||||
|
``manylinux1`` binary wheels
|
||||||
|
* Added support for ``SealedBox`` construction.
|
||||||
|
* Added support for ``argon2i`` and ``argon2id`` password hashing constructs
|
||||||
|
and restructured high-level password hashing implementation to expose
|
||||||
|
the same interface for all hashers.
|
||||||
|
* Added support for 128 bit ``siphashx24`` variant of ``siphash24``.
|
||||||
|
* Added support for ``from_seed`` APIs for X25519 keypair generation.
|
||||||
|
* Dropped support for Python 3.3.
|
||||||
|
|
||||||
|
1.1.2 - 2017-03-31
|
||||||
|
------------------
|
||||||
|
|
||||||
|
* reorder link time library search path when using bundled
|
||||||
|
libsodium
|
||||||
|
|
||||||
|
1.1.1 - 2017-03-15
|
||||||
|
------------------
|
||||||
|
|
||||||
|
* Fixed a circular import bug in ``nacl.utils``.
|
||||||
|
|
||||||
|
1.1.0 - 2017-03-14
|
||||||
|
------------------
|
||||||
|
|
||||||
|
* Dropped support for Python 2.6.
|
||||||
|
* Added ``shared_key()`` method on ``Box``.
|
||||||
|
* You can now pass ``None`` to ``nonce`` when encrypting with ``Box`` or
|
||||||
|
``SecretBox`` and it will automatically generate a random nonce.
|
||||||
|
* Added support for ``siphash24``.
|
||||||
|
* Added support for ``blake2b``.
|
||||||
|
* Added support for ``scrypt``.
|
||||||
|
* Update ``libsodium`` to 1.0.11.
|
||||||
|
* Default to the bundled ``libsodium`` when compiling.
|
||||||
|
* All raised exceptions are defined mixing-in
|
||||||
|
``nacl.exceptions.CryptoError``
|
||||||
|
|
||||||
|
1.0.1 - 2016-01-24
|
||||||
|
------------------
|
||||||
|
|
||||||
|
* Fix an issue with absolute paths that prevented the creation of wheels.
|
||||||
|
|
||||||
|
1.0 - 2016-01-23
|
||||||
|
----------------
|
||||||
|
|
||||||
|
* PyNaCl has been ported to use the new APIs available in cffi 1.0+.
|
||||||
|
Due to this change we no longer support PyPy releases older than 2.6.
|
||||||
|
* Python 3.2 support has been dropped.
|
||||||
|
* Functions to convert between Ed25519 and Curve25519 keys have been added.
|
||||||
|
|
||||||
|
0.3.0 - 2015-03-04
|
||||||
|
------------------
|
||||||
|
|
||||||
|
* The low-level API (`nacl.c.*`) has been changed to match the
|
||||||
|
upstream NaCl C/C++ conventions (as well as those of other NaCl bindings).
|
||||||
|
The order of arguments and return values has changed significantly. To
|
||||||
|
avoid silent failures, `nacl.c` has been removed, and replaced with
|
||||||
|
`nacl.bindings` (with the new argument ordering). If you have code which
|
||||||
|
calls these functions (e.g. `nacl.c.crypto_box_keypair()`), you must review
|
||||||
|
the new docstrings and update your code/imports to match the new
|
||||||
|
conventions.
|
||||||
|
|
||||||
|
|
@ -0,0 +1,65 @@
|
|||||||
|
PyNaCl-1.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
PyNaCl-1.3.0.dist-info/LICENSE.txt,sha256=0xdK1j5yHUydzLitQyCEiZLTFDabxGMZcgtYAskVP-k,9694
|
||||||
|
PyNaCl-1.3.0.dist-info/METADATA,sha256=QNwjVN_7D92iDWarZU4x9SyHe7xMd_1monZ6apytcog,6632
|
||||||
|
PyNaCl-1.3.0.dist-info/RECORD,,
|
||||||
|
PyNaCl-1.3.0.dist-info/WHEEL,sha256=xq3J6sB2oJqjv0tDC7tGUGvne1fOugw1JUmk86QA7UM,106
|
||||||
|
PyNaCl-1.3.0.dist-info/top_level.txt,sha256=wfdEOI_G2RIzmzsMyhpqP17HUh6Jcqi99to9aHLEslo,13
|
||||||
|
nacl/__init__.py,sha256=PS9BuXZoCwSvrDpB8HXldTHnA6lb4y00IRi3uqdW5_E,1170
|
||||||
|
nacl/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
nacl/__pycache__/encoding.cpython-36.pyc,,
|
||||||
|
nacl/__pycache__/exceptions.cpython-36.pyc,,
|
||||||
|
nacl/__pycache__/hash.cpython-36.pyc,,
|
||||||
|
nacl/__pycache__/hashlib.cpython-36.pyc,,
|
||||||
|
nacl/__pycache__/public.cpython-36.pyc,,
|
||||||
|
nacl/__pycache__/secret.cpython-36.pyc,,
|
||||||
|
nacl/__pycache__/signing.cpython-36.pyc,,
|
||||||
|
nacl/__pycache__/utils.cpython-36.pyc,,
|
||||||
|
nacl/_sodium.cp36-win_amd64.pyd,sha256=90tFta3cB1IcllcBe7t4P_I0GmqDNkiSkrO55zIrQ90,295424
|
||||||
|
nacl/bindings/__init__.py,sha256=dNH1zFjW87qszsld5oy6xMf2S1w2v_qshQwYHp66pz4,14943
|
||||||
|
nacl/bindings/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
nacl/bindings/__pycache__/crypto_aead.cpython-36.pyc,,
|
||||||
|
nacl/bindings/__pycache__/crypto_box.cpython-36.pyc,,
|
||||||
|
nacl/bindings/__pycache__/crypto_generichash.cpython-36.pyc,,
|
||||||
|
nacl/bindings/__pycache__/crypto_hash.cpython-36.pyc,,
|
||||||
|
nacl/bindings/__pycache__/crypto_kx.cpython-36.pyc,,
|
||||||
|
nacl/bindings/__pycache__/crypto_pwhash.cpython-36.pyc,,
|
||||||
|
nacl/bindings/__pycache__/crypto_scalarmult.cpython-36.pyc,,
|
||||||
|
nacl/bindings/__pycache__/crypto_secretbox.cpython-36.pyc,,
|
||||||
|
nacl/bindings/__pycache__/crypto_secretstream.cpython-36.pyc,,
|
||||||
|
nacl/bindings/__pycache__/crypto_shorthash.cpython-36.pyc,,
|
||||||
|
nacl/bindings/__pycache__/crypto_sign.cpython-36.pyc,,
|
||||||
|
nacl/bindings/__pycache__/randombytes.cpython-36.pyc,,
|
||||||
|
nacl/bindings/__pycache__/sodium_core.cpython-36.pyc,,
|
||||||
|
nacl/bindings/__pycache__/utils.cpython-36.pyc,,
|
||||||
|
nacl/bindings/crypto_aead.py,sha256=DE5zdi09GeHZxvmrhHtxVuTqF61y1cs8trTGh_6uP8Q,17335
|
||||||
|
nacl/bindings/crypto_box.py,sha256=hbHJetr9id5OvkbJwJoeqRQAhqSIGwWC2aXRAF5oPE4,9708
|
||||||
|
nacl/bindings/crypto_generichash.py,sha256=-e4b4DaopLBQHhEjLSjEoumy5fOs4QdTb-hou1S34C4,8010
|
||||||
|
nacl/bindings/crypto_hash.py,sha256=7Xp4mpXr4cpn-hAOU66KlYVUCVHP6deT0v_eW4UZZXo,2243
|
||||||
|
nacl/bindings/crypto_kx.py,sha256=2Gjxu5c7IKAwW2MOJa9zEn1EgpIVQ0tbZQs33REZb38,6937
|
||||||
|
nacl/bindings/crypto_pwhash.py,sha256=lWhEFKmXzFhKnzzxtWDwozs0CseZDkGgTJaI4YQ5rak,16898
|
||||||
|
nacl/bindings/crypto_scalarmult.py,sha256=VA2khmlUrnR24KK0CAdDw2dQ0jiYkku9-_NA-f1p21c,1803
|
||||||
|
nacl/bindings/crypto_secretbox.py,sha256=luvzB3lwBwXxKm63e9nA2neGtOXeeG8R9SyWEckIqdI,2864
|
||||||
|
nacl/bindings/crypto_secretstream.py,sha256=gdKinW10jP3CZ51hanE40s6e39rz8iuajdXTSBSKVcM,10474
|
||||||
|
nacl/bindings/crypto_shorthash.py,sha256=eVUE8byB1RjI0AoHib5BdZSSLtSqtdIcHgPCPWf2OZM,2189
|
||||||
|
nacl/bindings/crypto_sign.py,sha256=uA0RdHM4vsBDNhph2f7fcuI_9K8vvW-4hNHjajTIVU0,9641
|
||||||
|
nacl/bindings/randombytes.py,sha256=eThts6s-9xBXOl3GNzT57fV1dZUhzPjjAmAVIUHfcrc,988
|
||||||
|
nacl/bindings/sodium_core.py,sha256=52z0K7y6Ge6IlXcysWDVN7UdYcTOij6v0Cb0OLo8_Qc,1079
|
||||||
|
nacl/bindings/utils.py,sha256=jOKsDbsjxN9v_HI8DOib72chyU3byqbynXxbiV909-g,4420
|
||||||
|
nacl/encoding.py,sha256=tOiyIQVVpGU6A4Lzr0tMuqomhc_Aj0V_c1t56a-ZtPw,1928
|
||||||
|
nacl/exceptions.py,sha256=SG0BNtXnzmppI9in6xMTSizh1ryfgUIvIVMQv_A0bs8,1858
|
||||||
|
nacl/hash.py,sha256=4DKlmqpWOZJLhzTPk7_JSGXQ32lJULsS3AzJCGsibus,5928
|
||||||
|
nacl/hashlib.py,sha256=gMxOu-lIlKYr3ywSCjsJRBksYgpU2dvXgaAEfQz7PEg,3909
|
||||||
|
nacl/public.py,sha256=-nwQof5ov-wSSdvvoXh-FavTtjfpRnYykZkatNKyLd0,13442
|
||||||
|
nacl/pwhash/__init__.py,sha256=CN0mP6yteSYp3ui-DyWR1vjULNrXVN_gQ72CmTPoao0,2695
|
||||||
|
nacl/pwhash/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
nacl/pwhash/__pycache__/_argon2.cpython-36.pyc,,
|
||||||
|
nacl/pwhash/__pycache__/argon2i.cpython-36.pyc,,
|
||||||
|
nacl/pwhash/__pycache__/argon2id.cpython-36.pyc,,
|
||||||
|
nacl/pwhash/__pycache__/scrypt.cpython-36.pyc,,
|
||||||
|
nacl/pwhash/_argon2.py,sha256=Eu3-juLws3_v1gNy5aeSVPEwuRVFdGOrfeF0wPH9VHA,1878
|
||||||
|
nacl/pwhash/argon2i.py,sha256=EpheK0UHJvZYca_EMhhOcX5GXaOr0xCjFDTIgmSCSDo,4598
|
||||||
|
nacl/pwhash/argon2id.py,sha256=IqNm5RQNEd1Z9F-bEWT-_Y9noU26QoTR5YdWONg1uuI,4610
|
||||||
|
nacl/pwhash/scrypt.py,sha256=F9iUKbzZUMG2ZXuuk70p4KXI_nItue3VA39zmwOESE8,6025
|
||||||
|
nacl/secret.py,sha256=jf4WuUjnnXTekZ2elGgQozZl6zGzxGY_0Nw0fwehUlg,5430
|
||||||
|
nacl/signing.py,sha256=ZwA1l31ZgOIw_sAjiUPkzEo07uYYi8SE7Ni0G_R8ksQ,7302
|
||||||
|
nacl/utils.py,sha256=hhmIriBM7Bwyh3beTrqVqDDucai5gXlSliAMVrxIHPI,1691
|
@ -0,0 +1,5 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.31.1)
|
||||||
|
Root-Is-Purelib: false
|
||||||
|
Tag: cp36-cp36m-win_amd64
|
||||||
|
|
@ -0,0 +1,2 @@
|
|||||||
|
_sodium
|
||||||
|
nacl
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1 @@
|
|||||||
|
pip
|
@ -0,0 +1,201 @@
|
|||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright 2013-2019 Nikolay Kim and Andrew Svetlov
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
@ -0,0 +1,652 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: aiohttp
|
||||||
|
Version: 3.6.2
|
||||||
|
Summary: Async http client/server framework (asyncio)
|
||||||
|
Home-page: https://github.com/aio-libs/aiohttp
|
||||||
|
Author: Nikolay Kim
|
||||||
|
Author-email: fafhrd91@gmail.com
|
||||||
|
Maintainer: Nikolay Kim <fafhrd91@gmail.com>, Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||||
|
Maintainer-email: aio-libs@googlegroups.com
|
||||||
|
License: Apache 2
|
||||||
|
Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
|
||||||
|
Project-URL: CI: AppVeyor, https://ci.appveyor.com/project/aio-libs/aiohttp
|
||||||
|
Project-URL: CI: Circle, https://circleci.com/gh/aio-libs/aiohttp
|
||||||
|
Project-URL: CI: Shippable, https://app.shippable.com/github/aio-libs/aiohttp
|
||||||
|
Project-URL: CI: Travis, https://travis-ci.com/aio-libs/aiohttp
|
||||||
|
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
|
||||||
|
Project-URL: Docs: RTD, https://docs.aiohttp.org
|
||||||
|
Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
|
||||||
|
Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: License :: OSI Approved :: Apache Software License
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.5
|
||||||
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Operating System :: POSIX
|
||||||
|
Classifier: Operating System :: MacOS :: MacOS X
|
||||||
|
Classifier: Operating System :: Microsoft :: Windows
|
||||||
|
Classifier: Topic :: Internet :: WWW/HTTP
|
||||||
|
Classifier: Framework :: AsyncIO
|
||||||
|
Requires-Python: >=3.5.3
|
||||||
|
Requires-Dist: attrs (>=17.3.0)
|
||||||
|
Requires-Dist: chardet (<4.0,>=2.0)
|
||||||
|
Requires-Dist: multidict (<5.0,>=4.5)
|
||||||
|
Requires-Dist: async-timeout (<4.0,>=3.0)
|
||||||
|
Requires-Dist: yarl (<2.0,>=1.0)
|
||||||
|
Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7"
|
||||||
|
Requires-Dist: typing-extensions (>=3.6.5) ; python_version < "3.7"
|
||||||
|
Provides-Extra: speedups
|
||||||
|
Requires-Dist: aiodns ; extra == 'speedups'
|
||||||
|
Requires-Dist: brotlipy ; extra == 'speedups'
|
||||||
|
Requires-Dist: cchardet ; extra == 'speedups'
|
||||||
|
|
||||||
|
==================================
|
||||||
|
Async http client/server framework
|
||||||
|
==================================
|
||||||
|
|
||||||
|
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png
|
||||||
|
:height: 64px
|
||||||
|
:width: 64px
|
||||||
|
:alt: aiohttp logo
|
||||||
|
|
||||||
|
|
|
||||||
|
|
||||||
|
.. image:: https://travis-ci.com/aio-libs/aiohttp.svg?branch=master
|
||||||
|
:target: https://travis-ci.com/aio-libs/aiohttp
|
||||||
|
:align: right
|
||||||
|
:alt: Travis status for master branch
|
||||||
|
|
||||||
|
.. image:: https://ci.appveyor.com/api/projects/status/tnddy9k6pphl8w7k/branch/master?svg=true
|
||||||
|
:target: https://ci.appveyor.com/project/aio-libs/aiohttp
|
||||||
|
:align: right
|
||||||
|
:alt: AppVeyor status for master branch
|
||||||
|
|
||||||
|
.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
|
||||||
|
:target: https://codecov.io/gh/aio-libs/aiohttp
|
||||||
|
:alt: codecov.io status for master branch
|
||||||
|
|
||||||
|
.. image:: https://badge.fury.io/py/aiohttp.svg
|
||||||
|
:target: https://pypi.org/project/aiohttp
|
||||||
|
:alt: Latest PyPI package version
|
||||||
|
|
||||||
|
.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
|
||||||
|
:target: https://docs.aiohttp.org/
|
||||||
|
:alt: Latest Read The Docs
|
||||||
|
|
||||||
|
.. image:: https://badges.gitter.im/Join%20Chat.svg
|
||||||
|
:target: https://gitter.im/aio-libs/Lobby
|
||||||
|
:alt: Chat on Gitter
|
||||||
|
|
||||||
|
Key Features
|
||||||
|
============
|
||||||
|
|
||||||
|
- Supports both client and server side of HTTP protocol.
|
||||||
|
- Supports both client and server Web-Sockets out-of-the-box and avoids
|
||||||
|
Callback Hell.
|
||||||
|
- Provides Web-server with middlewares and pluggable routing.
|
||||||
|
|
||||||
|
|
||||||
|
Getting started
|
||||||
|
===============
|
||||||
|
|
||||||
|
Client
|
||||||
|
------
|
||||||
|
|
||||||
|
To get something from the web:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
async def fetch(session, url):
|
||||||
|
async with session.get(url) as response:
|
||||||
|
return await response.text()
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
html = await fetch(session, 'http://python.org')
|
||||||
|
print(html)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
loop.run_until_complete(main())
|
||||||
|
|
||||||
|
|
||||||
|
Server
|
||||||
|
------
|
||||||
|
|
||||||
|
An example using a simple server:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# examples/server_simple.py
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
async def handle(request):
|
||||||
|
name = request.match_info.get('name', "Anonymous")
|
||||||
|
text = "Hello, " + name
|
||||||
|
return web.Response(text=text)
|
||||||
|
|
||||||
|
async def wshandle(request):
|
||||||
|
ws = web.WebSocketResponse()
|
||||||
|
await ws.prepare(request)
|
||||||
|
|
||||||
|
async for msg in ws:
|
||||||
|
if msg.type == web.WSMsgType.text:
|
||||||
|
await ws.send_str("Hello, {}".format(msg.data))
|
||||||
|
elif msg.type == web.WSMsgType.binary:
|
||||||
|
await ws.send_bytes(msg.data)
|
||||||
|
elif msg.type == web.WSMsgType.close:
|
||||||
|
break
|
||||||
|
|
||||||
|
return ws
|
||||||
|
|
||||||
|
|
||||||
|
app = web.Application()
|
||||||
|
app.add_routes([web.get('/', handle),
|
||||||
|
web.get('/echo', wshandle),
|
||||||
|
web.get('/{name}', handle)])
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
web.run_app(app)
|
||||||
|
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
=============
|
||||||
|
|
||||||
|
https://aiohttp.readthedocs.io/
|
||||||
|
|
||||||
|
|
||||||
|
Demos
|
||||||
|
=====
|
||||||
|
|
||||||
|
https://github.com/aio-libs/aiohttp-demos
|
||||||
|
|
||||||
|
|
||||||
|
External links
|
||||||
|
==============
|
||||||
|
|
||||||
|
* `Third party libraries
|
||||||
|
<http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
|
||||||
|
* `Built with aiohttp
|
||||||
|
<http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
|
||||||
|
* `Powered by aiohttp
|
||||||
|
<http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
|
||||||
|
|
||||||
|
Feel free to make a Pull Request for adding your link to these pages!
|
||||||
|
|
||||||
|
|
||||||
|
Communication channels
|
||||||
|
======================
|
||||||
|
|
||||||
|
*aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs
|
||||||
|
|
||||||
|
Feel free to post your questions and ideas here.
|
||||||
|
|
||||||
|
*gitter chat* https://gitter.im/aio-libs/Lobby
|
||||||
|
|
||||||
|
We support `Stack Overflow
|
||||||
|
<https://stackoverflow.com/questions/tagged/aiohttp>`_.
|
||||||
|
Please add *aiohttp* tag to your question there.
|
||||||
|
|
||||||
|
Requirements
|
||||||
|
============
|
||||||
|
|
||||||
|
- Python >= 3.5.3
|
||||||
|
- async-timeout_
|
||||||
|
- attrs_
|
||||||
|
- chardet_
|
||||||
|
- multidict_
|
||||||
|
- yarl_
|
||||||
|
|
||||||
|
Optionally you may install the cChardet_ and aiodns_ libraries (highly
|
||||||
|
recommended for sake of speed).
|
||||||
|
|
||||||
|
.. _chardet: https://pypi.python.org/pypi/chardet
|
||||||
|
.. _aiodns: https://pypi.python.org/pypi/aiodns
|
||||||
|
.. _attrs: https://github.com/python-attrs/attrs
|
||||||
|
.. _multidict: https://pypi.python.org/pypi/multidict
|
||||||
|
.. _yarl: https://pypi.python.org/pypi/yarl
|
||||||
|
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
|
||||||
|
.. _cChardet: https://pypi.python.org/pypi/cchardet
|
||||||
|
|
||||||
|
License
|
||||||
|
=======
|
||||||
|
|
||||||
|
``aiohttp`` is offered under the Apache 2 license.
|
||||||
|
|
||||||
|
|
||||||
|
Keepsafe
|
||||||
|
========
|
||||||
|
|
||||||
|
The aiohttp community would like to thank Keepsafe
|
||||||
|
(https://www.getkeepsafe.com) for its support in the early days of
|
||||||
|
the project.
|
||||||
|
|
||||||
|
|
||||||
|
Source code
|
||||||
|
===========
|
||||||
|
|
||||||
|
The latest developer version is available in a GitHub repository:
|
||||||
|
https://github.com/aio-libs/aiohttp
|
||||||
|
|
||||||
|
Benchmarks
|
||||||
|
==========
|
||||||
|
|
||||||
|
If you are interested in efficiency, the AsyncIO community maintains a
|
||||||
|
list of benchmarks on the official wiki:
|
||||||
|
https://github.com/python/asyncio/wiki/Benchmarks
|
||||||
|
|
||||||
|
=========
|
||||||
|
Changelog
|
||||||
|
=========
|
||||||
|
|
||||||
|
..
|
||||||
|
You should *NOT* be adding new change log entries to this file, this
|
||||||
|
file is managed by towncrier. You *may* edit previous change logs to
|
||||||
|
fix problems like typo corrections or such.
|
||||||
|
To add a new change log entry, please see
|
||||||
|
https://pip.pypa.io/en/latest/development/#adding-a-news-entry
|
||||||
|
we named the news folder "changes".
|
||||||
|
|
||||||
|
WARNING: Don't drop the next directive!
|
||||||
|
|
||||||
|
.. towncrier release notes start
|
||||||
|
|
||||||
|
3.6.2 (2019-10-09)
|
||||||
|
==================
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Made exceptions pickleable. Also changed the repr of some exceptions.
|
||||||
|
`#4077 <https://github.com/aio-libs/aiohttp/issues/4077>`_
|
||||||
|
- Use ``Iterable`` type hint instead of ``Sequence`` for ``Application`` *middleware*
|
||||||
|
parameter. `#4125 <https://github.com/aio-libs/aiohttp/issues/4125>`_
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Reset the ``sock_read`` timeout each time data is received for a
|
||||||
|
``aiohttp.ClientResponse``. `#3808
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/3808>`_
|
||||||
|
- Fix handling of expired cookies so they are not stored in CookieJar.
|
||||||
|
`#4063 <https://github.com/aio-libs/aiohttp/issues/4063>`_
|
||||||
|
- Fix misleading message in the string representation of ``ClientConnectorError``;
|
||||||
|
``self.ssl == None`` means default SSL context, not SSL disabled `#4097
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/4097>`_
|
||||||
|
- Don't clobber HTTP status when using FileResponse.
|
||||||
|
`#4106 <https://github.com/aio-libs/aiohttp/issues/4106>`_
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Added minimal required logging configuration to logging documentation.
|
||||||
|
`#2469 <https://github.com/aio-libs/aiohttp/issues/2469>`_
|
||||||
|
- Update docs to reflect proxy support.
|
||||||
|
`#4100 <https://github.com/aio-libs/aiohttp/issues/4100>`_
|
||||||
|
- Fix typo in code example in testing docs.
|
||||||
|
`#4108 <https://github.com/aio-libs/aiohttp/issues/4108>`_
|
||||||
|
|
||||||
|
|
||||||
|
Misc
|
||||||
|
----
|
||||||
|
|
||||||
|
- `#4102 <https://github.com/aio-libs/aiohttp/issues/4102>`_
|
||||||
|
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
|
||||||
|
3.6.1 (2019-09-19)
|
||||||
|
==================
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Compatibility with Python 3.8.
|
||||||
|
`#4056 <https://github.com/aio-libs/aiohttp/issues/4056>`_
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- correct some exception string format
|
||||||
|
`#4068 <https://github.com/aio-libs/aiohttp/issues/4068>`_
|
||||||
|
- Emit a warning when ``ssl.OP_NO_COMPRESSION`` is
|
||||||
|
unavailable because the runtime is built against
|
||||||
|
an outdated OpenSSL.
|
||||||
|
`#4052 <https://github.com/aio-libs/aiohttp/issues/4052>`_
|
||||||
|
- Update multidict requirement to >= 4.5
|
||||||
|
`#4057 <https://github.com/aio-libs/aiohttp/issues/4057>`_
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Provide pytest-aiohttp namespace for pytest fixtures in docs.
|
||||||
|
`#3723 <https://github.com/aio-libs/aiohttp/issues/3723>`_
|
||||||
|
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
|
||||||
|
3.6.0 (2019-09-06)
|
||||||
|
==================
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Add support for Named Pipes (Site and Connector) under Windows. This feature requires
|
||||||
|
Proactor event loop to work. `#3629
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/3629>`_
|
||||||
|
- Removed ``Transfer-Encoding: chunked`` header from websocket responses to be
|
||||||
|
compatible with more http proxy servers. `#3798
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/3798>`_
|
||||||
|
- Accept non-GET request for starting websocket handshake on server side.
|
||||||
|
`#3980 <https://github.com/aio-libs/aiohttp/issues/3980>`_
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Raise a ClientResponseError instead of an AssertionError for a blank
|
||||||
|
HTTP Reason Phrase.
|
||||||
|
`#3532 <https://github.com/aio-libs/aiohttp/issues/3532>`_
|
||||||
|
- Fix an issue where cookies would sometimes not be set during a redirect.
|
||||||
|
`#3576 <https://github.com/aio-libs/aiohttp/issues/3576>`_
|
||||||
|
- Change normalize_path_middleware to use 308 redirect instead of 301.
|
||||||
|
|
||||||
|
This behavior should prevent clients from being unable to use PUT/POST
|
||||||
|
methods on endpoints that are redirected because of a trailing slash.
|
||||||
|
`#3579 <https://github.com/aio-libs/aiohttp/issues/3579>`_
|
||||||
|
- Drop the processed task from ``all_tasks()`` list early. It prevents logging about a
|
||||||
|
task with unhandled exception when the server is used in conjunction with
|
||||||
|
``asyncio.run()``. `#3587 <https://github.com/aio-libs/aiohttp/issues/3587>`_
|
||||||
|
- ``Signal`` type annotation changed from ``Signal[Callable[['TraceConfig'],
|
||||||
|
Awaitable[None]]]`` to ``Signal[Callable[ClientSession, SimpleNamespace, ...]``.
|
||||||
|
`#3595 <https://github.com/aio-libs/aiohttp/issues/3595>`_
|
||||||
|
- Use sanitized URL as Location header in redirects
|
||||||
|
`#3614 <https://github.com/aio-libs/aiohttp/issues/3614>`_
|
||||||
|
- Improve typing annotations for multipart.py along with changes required
|
||||||
|
by mypy in files that references multipart.py.
|
||||||
|
`#3621 <https://github.com/aio-libs/aiohttp/issues/3621>`_
|
||||||
|
- Close session created inside ``aiohttp.request`` when unhandled exception occurs
|
||||||
|
`#3628 <https://github.com/aio-libs/aiohttp/issues/3628>`_
|
||||||
|
- Cleanup per-chunk data in generic data read. Memory leak fixed.
|
||||||
|
`#3631 <https://github.com/aio-libs/aiohttp/issues/3631>`_
|
||||||
|
- Use correct type for add_view and family
|
||||||
|
`#3633 <https://github.com/aio-libs/aiohttp/issues/3633>`_
|
||||||
|
- Fix _keepalive field in __slots__ of ``RequestHandler``.
|
||||||
|
`#3644 <https://github.com/aio-libs/aiohttp/issues/3644>`_
|
||||||
|
- Properly handle ConnectionResetError, to silence the "Cannot write to closing
|
||||||
|
transport" exception when clients disconnect uncleanly.
|
||||||
|
`#3648 <https://github.com/aio-libs/aiohttp/issues/3648>`_
|
||||||
|
- Suppress pytest warnings due to ``test_utils`` classes
|
||||||
|
`#3660 <https://github.com/aio-libs/aiohttp/issues/3660>`_
|
||||||
|
- Fix overshadowing of overlapped sub-application prefixes.
|
||||||
|
`#3701 <https://github.com/aio-libs/aiohttp/issues/3701>`_
|
||||||
|
- Fixed return type annotation for WSMessage.json()
|
||||||
|
`#3720 <https://github.com/aio-libs/aiohttp/issues/3720>`_
|
||||||
|
- Properly expose TooManyRedirects publicly as documented.
|
||||||
|
`#3818 <https://github.com/aio-libs/aiohttp/issues/3818>`_
|
||||||
|
- Fix missing brackets for IPv6 in proxy CONNECT request
|
||||||
|
`#3841 <https://github.com/aio-libs/aiohttp/issues/3841>`_
|
||||||
|
- Make the signature of ``aiohttp.test_utils.TestClient.request`` match
|
||||||
|
``asyncio.ClientSession.request`` according to the docs `#3852
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/3852>`_
|
||||||
|
- Use correct style for re-exported imports, makes mypy ``--strict`` mode happy.
|
||||||
|
`#3868 <https://github.com/aio-libs/aiohttp/issues/3868>`_
|
||||||
|
- Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of
|
||||||
|
View `#3880 <https://github.com/aio-libs/aiohttp/issues/3880>`_
|
||||||
|
- Made cython HTTP parser set Reason-Phrase of the response to an empty string if it is
|
||||||
|
missing. `#3906 <https://github.com/aio-libs/aiohttp/issues/3906>`_
|
||||||
|
- Add URL to the string representation of ClientResponseError.
|
||||||
|
`#3959 <https://github.com/aio-libs/aiohttp/issues/3959>`_
|
||||||
|
- Accept ``istr`` keys in ``LooseHeaders`` type hints.
|
||||||
|
`#3976 <https://github.com/aio-libs/aiohttp/issues/3976>`_
|
||||||
|
- Fixed race conditions in _resolve_host caching and throttling when tracing is enabled.
|
||||||
|
`#4013 <https://github.com/aio-libs/aiohttp/issues/4013>`_
|
||||||
|
- For URLs like "unix://localhost/..." set Host HTTP header to "localhost" instead of
|
||||||
|
"localhost:None". `#4039 <https://github.com/aio-libs/aiohttp/issues/4039>`_
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Modify documentation for Background Tasks to remove deprecated usage of event loop.
|
||||||
|
`#3526 <https://github.com/aio-libs/aiohttp/issues/3526>`_
|
||||||
|
- use ``if __name__ == '__main__':`` in server examples.
|
||||||
|
`#3775 <https://github.com/aio-libs/aiohttp/issues/3775>`_
|
||||||
|
- Update documentation reference to the default access logger.
|
||||||
|
`#3783 <https://github.com/aio-libs/aiohttp/issues/3783>`_
|
||||||
|
- Improve documentation for ``web.BaseRequest.path`` and ``web.BaseRequest.raw_path``.
|
||||||
|
`#3791 <https://github.com/aio-libs/aiohttp/issues/3791>`_
|
||||||
|
- Removed deprecation warning in tracing example docs
|
||||||
|
`#3964 <https://github.com/aio-libs/aiohttp/issues/3964>`_
|
||||||
|
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
|
||||||
|
3.5.4 (2019-01-12)
|
||||||
|
==================
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Fix stream ``.read()`` / ``.readany()`` / ``.iter_any()`` which used to return a
|
||||||
|
partial content only in case of compressed content
|
||||||
|
`#3525 <https://github.com/aio-libs/aiohttp/issues/3525>`_
|
||||||
|
|
||||||
|
|
||||||
|
3.5.3 (2019-01-10)
|
||||||
|
==================
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Fix type stubs for ``aiohttp.web.run_app(access_log=True)`` and fix edge case of
|
||||||
|
``access_log=True`` and the event loop being in debug mode. `#3504
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/3504>`_
|
||||||
|
- Fix ``aiohttp.ClientTimeout`` type annotations to accept ``None`` for fields
|
||||||
|
`#3511 <https://github.com/aio-libs/aiohttp/issues/3511>`_
|
||||||
|
- Send custom per-request cookies even if session jar is empty
|
||||||
|
`#3515 <https://github.com/aio-libs/aiohttp/issues/3515>`_
|
||||||
|
- Restore Linux binary wheels publishing on PyPI
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
|
||||||
|
3.5.2 (2019-01-08)
|
||||||
|
==================
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- ``FileResponse`` from ``web_fileresponse.py`` uses a ``ThreadPoolExecutor`` to work
|
||||||
|
with files asynchronously. I/O based payloads from ``payload.py`` uses a
|
||||||
|
``ThreadPoolExecutor`` to work with I/O objects asynchronously. `#3313
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/3313>`_
|
||||||
|
- Internal Server Errors in plain text if the browser does not support HTML.
|
||||||
|
`#3483 <https://github.com/aio-libs/aiohttp/issues/3483>`_
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Preserve MultipartWriter parts headers on write. Refactor the way how
|
||||||
|
``Payload.headers`` are handled. Payload instances now always have headers and
|
||||||
|
Content-Type defined. Fix Payload Content-Disposition header reset after initial
|
||||||
|
creation. `#3035 <https://github.com/aio-libs/aiohttp/issues/3035>`_
|
||||||
|
- Log suppressed exceptions in ``GunicornWebWorker``.
|
||||||
|
`#3464 <https://github.com/aio-libs/aiohttp/issues/3464>`_
|
||||||
|
- Remove wildcard imports.
|
||||||
|
`#3468 <https://github.com/aio-libs/aiohttp/issues/3468>`_
|
||||||
|
- Use the same task for app initialization and web server handling in gunicorn workers.
|
||||||
|
It allows to use Python3.7 context vars smoothly.
|
||||||
|
`#3471 <https://github.com/aio-libs/aiohttp/issues/3471>`_
|
||||||
|
- Fix handling of chunked+gzipped response when first chunk does not give uncompressed
|
||||||
|
data `#3477 <https://github.com/aio-libs/aiohttp/issues/3477>`_
|
||||||
|
- Replace ``collections.MutableMapping`` with ``collections.abc.MutableMapping`` to
|
||||||
|
avoid a deprecation warning. `#3480
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/3480>`_
|
||||||
|
- ``Payload.size`` type annotation changed from ``Optional[float]`` to
|
||||||
|
``Optional[int]``. `#3484 <https://github.com/aio-libs/aiohttp/issues/3484>`_
|
||||||
|
- Ignore done tasks when cancels pending activities on ``web.run_app`` finalization.
|
||||||
|
`#3497 <https://github.com/aio-libs/aiohttp/issues/3497>`_
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Add documentation for ``aiohttp.web.HTTPException``.
|
||||||
|
`#3490 <https://github.com/aio-libs/aiohttp/issues/3490>`_
|
||||||
|
|
||||||
|
|
||||||
|
Misc
|
||||||
|
----
|
||||||
|
|
||||||
|
- `#3487 <https://github.com/aio-libs/aiohttp/issues/3487>`_
|
||||||
|
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
|
||||||
|
3.5.1 (2018-12-24)
|
||||||
|
====================
|
||||||
|
|
||||||
|
- Fix a regression about ``ClientSession._requote_redirect_url`` modification in debug
|
||||||
|
mode.
|
||||||
|
|
||||||
|
3.5.0 (2018-12-22)
|
||||||
|
====================
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- The library type annotations are checked in strict mode now.
|
||||||
|
- Add support for setting cookies for individual request (`#2387
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/2387>`_)
|
||||||
|
- Application.add_domain implementation (`#2809
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/2809>`_)
|
||||||
|
- The default ``app`` in the request returned by ``test_utils.make_mocked_request`` can
|
||||||
|
now have objects assigned to it and retrieved using the ``[]`` operator. (`#3174
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3174>`_)
|
||||||
|
- Make ``request.url`` accessible when transport is closed. (`#3177
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3177>`_)
|
||||||
|
- Add ``zlib_executor_size`` argument to ``Response`` constructor to allow compression
|
||||||
|
to run in a background executor to avoid blocking the main thread and potentially
|
||||||
|
triggering health check failures. (`#3205
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3205>`_)
|
||||||
|
- Enable users to set ``ClientTimeout`` in ``aiohttp.request`` (`#3213
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3213>`_)
|
||||||
|
- Don't raise a warning if ``NETRC`` environment variable is not set and ``~/.netrc``
|
||||||
|
file doesn't exist. (`#3267 <https://github.com/aio-libs/aiohttp/pull/3267>`_)
|
||||||
|
- Add default logging handler to web.run_app If the ``Application.debug``` flag is set
|
||||||
|
and the default logger ``aiohttp.access`` is used, access logs will now be output
|
||||||
|
using a *stderr* ``StreamHandler`` if no handlers are attached. Furthermore, if the
|
||||||
|
default logger has no log level set, the log level will be set to ``DEBUG``. (`#3324
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3324>`_)
|
||||||
|
- Add method argument to ``session.ws_connect()``. Sometimes server API requires a
|
||||||
|
different HTTP method for WebSocket connection establishment. For example, ``Docker
|
||||||
|
exec`` needs POST. (`#3378 <https://github.com/aio-libs/aiohttp/pull/3378>`_)
|
||||||
|
- Create a task per request handling. (`#3406
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3406>`_)
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Enable passing ``access_log_class`` via ``handler_args`` (`#3158
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3158>`_)
|
||||||
|
- Return empty bytes with end-of-chunk marker in empty stream reader. (`#3186
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3186>`_)
|
||||||
|
- Accept ``CIMultiDictProxy`` instances for ``headers`` argument in ``web.Response``
|
||||||
|
constructor. (`#3207 <https://github.com/aio-libs/aiohttp/pull/3207>`_)
|
||||||
|
- Don't uppercase HTTP method in parser (`#3233
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3233>`_)
|
||||||
|
- Make method match regexp RFC-7230 compliant (`#3235
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3235>`_)
|
||||||
|
- Add ``app.pre_frozen`` state to properly handle startup signals in
|
||||||
|
sub-applications. (`#3237 <https://github.com/aio-libs/aiohttp/pull/3237>`_)
|
||||||
|
- Enhanced parsing and validation of helpers.BasicAuth.decode. (`#3239
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3239>`_)
|
||||||
|
- Change imports from collections module in preparation for 3.8. (`#3258
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3258>`_)
|
||||||
|
- Ensure Host header is added first to ClientRequest to better replicate browser (`#3265
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3265>`_)
|
||||||
|
- Fix forward compatibility with Python 3.8: importing ABCs directly from the
|
||||||
|
collections module will not be supported anymore. (`#3273
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3273>`_)
|
||||||
|
- Keep the query string by ``normalize_path_middleware``. (`#3278
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3278>`_)
|
||||||
|
- Fix missing parameter ``raise_for_status`` for aiohttp.request() (`#3290
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3290>`_)
|
||||||
|
- Bracket IPv6 addresses in the HOST header (`#3304
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3304>`_)
|
||||||
|
- Fix default message for server ping and pong frames. (`#3308
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3308>`_)
|
||||||
|
- Fix tests/test_connector.py typo and tests/autobahn/server.py duplicate loop
|
||||||
|
def. (`#3337 <https://github.com/aio-libs/aiohttp/pull/3337>`_)
|
||||||
|
- Fix false-negative indicator end_of_HTTP_chunk in StreamReader.readchunk function
|
||||||
|
(`#3361 <https://github.com/aio-libs/aiohttp/pull/3361>`_)
|
||||||
|
- Release HTTP response before raising status exception (`#3364
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3364>`_)
|
||||||
|
- Fix task cancellation when ``sendfile()`` syscall is used by static file
|
||||||
|
handling. (`#3383 <https://github.com/aio-libs/aiohttp/pull/3383>`_)
|
||||||
|
- Fix stack trace for ``asyncio.TimeoutError`` which was not logged, when it is caught
|
||||||
|
in the handler. (`#3414 <https://github.com/aio-libs/aiohttp/pull/3414>`_)
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Improve documentation of ``Application.make_handler`` parameters. (`#3152
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3152>`_)
|
||||||
|
- Fix BaseRequest.raw_headers doc. (`#3215
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3215>`_)
|
||||||
|
- Fix typo in TypeError exception reason in ``web.Application._handle`` (`#3229
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3229>`_)
|
||||||
|
- Make server access log format placeholder %b documentation reflect
|
||||||
|
behavior and docstring. (`#3307 <https://github.com/aio-libs/aiohttp/pull/3307>`_)
|
||||||
|
|
||||||
|
|
||||||
|
Deprecations and Removals
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
- Deprecate modification of ``session.requote_redirect_url`` (`#2278
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/2278>`_)
|
||||||
|
- Deprecate ``stream.unread_data()`` (`#3260
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3260>`_)
|
||||||
|
- Deprecated use of boolean in ``resp.enable_compression()`` (`#3318
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3318>`_)
|
||||||
|
- Encourage creation of aiohttp public objects inside a coroutine (`#3331
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3331>`_)
|
||||||
|
- Drop dead ``Connection.detach()`` and ``Connection.writer``. Both methods were broken
|
||||||
|
for more than 2 years. (`#3358 <https://github.com/aio-libs/aiohttp/pull/3358>`_)
|
||||||
|
- Deprecate ``app.loop``, ``request.loop``, ``client.loop`` and ``connector.loop``
|
||||||
|
properties. (`#3374 <https://github.com/aio-libs/aiohttp/pull/3374>`_)
|
||||||
|
- Deprecate explicit debug argument. Use asyncio debug mode instead. (`#3381
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3381>`_)
|
||||||
|
- Deprecate body parameter in HTTPException (and derived classes) constructor. (`#3385
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3385>`_)
|
||||||
|
- Deprecate bare connector close, use ``async with connector:`` and ``await
|
||||||
|
connector.close()`` instead. (`#3417
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3417>`_)
|
||||||
|
- Deprecate obsolete ``read_timeout`` and ``conn_timeout`` in ``ClientSession``
|
||||||
|
constructor. (`#3438 <https://github.com/aio-libs/aiohttp/pull/3438>`_)
|
||||||
|
|
||||||
|
|
||||||
|
Misc
|
||||||
|
----
|
||||||
|
|
||||||
|
- #3341, #3351
|
||||||
|
|
@ -0,0 +1,124 @@
|
|||||||
|
aiohttp-3.6.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
aiohttp-3.6.2.dist-info/LICENSE.txt,sha256=atcq6P9K6Td0Wq4oBfNDqYf6o6YGrHLGCfLUj3GZspQ,11533
|
||||||
|
aiohttp-3.6.2.dist-info/METADATA,sha256=4kebVhrza_aP2QNEcLfPESEhoVd7Jc1une-JuWJlVlE,24410
|
||||||
|
aiohttp-3.6.2.dist-info/RECORD,,
|
||||||
|
aiohttp-3.6.2.dist-info/WHEEL,sha256=uQaeujkjkt7SlmOZGXO6onhwBPrzw2WTI2otbCZzdNI,106
|
||||||
|
aiohttp-3.6.2.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8
|
||||||
|
aiohttp/__init__.py,sha256=k5JorjbCoRvIyRSvcz-N_LFgNe1wX5HtjLCwNkC7zdY,8427
|
||||||
|
aiohttp/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/abc.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/base_protocol.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/client.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/client_exceptions.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/client_proto.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/client_reqrep.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/client_ws.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/connector.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/cookiejar.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/formdata.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/frozenlist.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/hdrs.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/helpers.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/http.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/http_exceptions.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/http_parser.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/http_websocket.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/http_writer.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/locks.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/log.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/multipart.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/payload.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/payload_streamer.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/pytest_plugin.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/resolver.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/signals.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/streams.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/tcp_helpers.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/test_utils.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/tracing.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/typedefs.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_app.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_exceptions.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_fileresponse.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_log.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_middlewares.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_protocol.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_request.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_response.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_routedef.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_runner.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_server.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_urldispatcher.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_ws.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/worker.cpython-36.pyc,,
|
||||||
|
aiohttp/_cparser.pxd,sha256=xvsLl13ZXXyHGyb2Us7WsLncndQrxhyGB4KXnvbsRtQ,4099
|
||||||
|
aiohttp/_find_header.c,sha256=MOZn07_ot-UcOdQBpYAWQmyigqLvMwkqa_7l4M7D1dI,199932
|
||||||
|
aiohttp/_find_header.h,sha256=HistyxY7K3xEJ53Y5xEfwrDVDkfcV0zQ9mkzMgzi_jo,184
|
||||||
|
aiohttp/_find_header.pxd,sha256=BFUSmxhemBtblqxzjzH3x03FfxaWlTyuAIOz8YZ5_nM,70
|
||||||
|
aiohttp/_frozenlist.c,sha256=-vfgzV6cNjUykuqt1kkWDiT2U92BR2zhL9b9yDiiodg,288943
|
||||||
|
aiohttp/_frozenlist.cp36-win_amd64.pyd,sha256=SN72FLXG8KJYhqgT9BtULfLFhjSmvv_C-oDeQPhlpH8,79872
|
||||||
|
aiohttp/_frozenlist.pyx,sha256=SB851KmtWpiJ2ZB05Tpo4855VkCyRtgMs843Wz8kFeg,2713
|
||||||
|
aiohttp/_headers.pxi,sha256=PxiakDsuEs0O94eHRlPcerO24TqPAxc0BtX86XZL4gw,2111
|
||||||
|
aiohttp/_helpers.c,sha256=sQcHpEGAX3jEvA8jujh4_D_fev9cRjMAc5CySqtHYrg,208657
|
||||||
|
aiohttp/_helpers.cp36-win_amd64.pyd,sha256=ezuDwotCokL_pvZWHfe9kppSqetibStK3Ob727IJaGY,59904
|
||||||
|
aiohttp/_helpers.pyi,sha256=C6Q4W8EwElvD1gF1siRGMVG7evEX8fWWstZHL1BbsDA,212
|
||||||
|
aiohttp/_helpers.pyx,sha256=tgl7fZh0QMT6cjf4jSJ8iaO6DdQD3GON2-SH4N5_ETg,1084
|
||||||
|
aiohttp/_http_parser.c,sha256=W1sETtDrrBdnBiSOpqaDcO9DcE9zhyLjPTq4WKIK0bc,997494
|
||||||
|
aiohttp/_http_parser.cp36-win_amd64.pyd,sha256=E54uSiDD1EJj7fCWuOxxqGJKzvCif6HV5ewK1US3ya8,255488
|
||||||
|
aiohttp/_http_parser.pyx,sha256=C2XxooYRput7XPQzbaGMDrtvJtmhWa58SDPytyuAwGk,29577
|
||||||
|
aiohttp/_http_writer.c,sha256=-wuBZwiaUXEy1Zj-R5BD5igH7cUg_CYb5ZvYMsh8vzo,211620
|
||||||
|
aiohttp/_http_writer.cp36-win_amd64.pyd,sha256=wsDiKyfAERR76tMESHKZ9xsEABBowsdYWKjvF7xv2fs,51712
|
||||||
|
aiohttp/_http_writer.pyx,sha256=TzCawCBLMe7w9eX2SEcUcLYySwkFfrfjaEYHS0Uvjtg,4353
|
||||||
|
aiohttp/_websocket.c,sha256=JrG6bXW3OR8sfxl5V1Q3VTXvGBbFTYgzgdbhQHr3LGI,136606
|
||||||
|
aiohttp/_websocket.cp36-win_amd64.pyd,sha256=JvOl8VKDwvfhr3TDGovNSUYK_8smCphWhewuKzk4l1Y,39424
|
||||||
|
aiohttp/_websocket.pyx,sha256=Ig8jXl_wkAXPugEWS0oPYo0-BnL8zT7uBG6BrYqVXdA,1613
|
||||||
|
aiohttp/abc.py,sha256=s3wtDI3os8uX4FdQbsvJwr67cFGhylif0mR5k2SKY04,5600
|
||||||
|
aiohttp/base_protocol.py,sha256=5PJImwc0iX8kR3VjZn1D_SAeL-6JKERi87iGHEYjJQ4,2744
|
||||||
|
aiohttp/client.py,sha256=DYv-h8V2wljt4hRmPDmU2czk9zSlSn8zua9MgssSEiY,45130
|
||||||
|
aiohttp/client_exceptions.py,sha256=RCbzCGw_HcaqnL4AHf3nol32xH_2xu1hrYbLNgpjHqk,8786
|
||||||
|
aiohttp/client_proto.py,sha256=XDXJ0G9RW8m80wHahzjgp4T5S3Rf6LSYks9Q9MajSQg,8276
|
||||||
|
aiohttp/client_reqrep.py,sha256=zf6GFaDYvpy50HZ4GntrT8flcc6B4HfwnlHw_yYdGMw,37064
|
||||||
|
aiohttp/client_ws.py,sha256=OUkkw9RwRHRmAakBibE6c63VLMWGVgoyRadoC22wtNY,10995
|
||||||
|
aiohttp/connector.py,sha256=pbq2XHrujiyQXbIhzXQK6E1zrzRYedzt8xlGNmvbQcM,43672
|
||||||
|
aiohttp/cookiejar.py,sha256=lNwvnGX3BjIDU4btE50AUsBQditLXzJhsPPUMZo-dkI,12249
|
||||||
|
aiohttp/formdata.py,sha256=1yNFnS6O0wUrIL4_V66-DwyjS3nWVd0JiPIjWKbTZTs,5957
|
||||||
|
aiohttp/frozenlist.py,sha256=PSElO5biFCVHyVEr6-hXy7--cDaHAxaWDrsFxESHsFc,1853
|
||||||
|
aiohttp/frozenlist.pyi,sha256=z-EGiL4Q5MTe1wxDZINsIhqh4Eb0oT9Xn0X_Rt7C9ns,1512
|
||||||
|
aiohttp/hdrs.py,sha256=PmN2SUiMmwiC0TMEEMSFfwirUpnrzy3jwUhniPGFlmc,3549
|
||||||
|
aiohttp/helpers.py,sha256=yAdG1c-axo7-Vsf3CRaEqb7hU5Ej-FpUgZowGA76f_U,23613
|
||||||
|
aiohttp/http.py,sha256=H9xNqvagxteFvx2R7AeYiGfze7uR6VKF5IsUAITr7d4,2183
|
||||||
|
aiohttp/http_exceptions.py,sha256=Oby70EpyDmwpsb4DpCFYXw-sa856HmWv8IjeHlWWlJo,2771
|
||||||
|
aiohttp/http_parser.py,sha256=Ttk5BSX11cXMaFJmquzd1oNkZbnodghQvBgdUGdQxnE,28676
|
||||||
|
aiohttp/http_websocket.py,sha256=KmHznrwSjtpUgxbFafBg1MaAaCpxGxoK0IL8wDKg9f8,25400
|
||||||
|
aiohttp/http_writer.py,sha256=VBMPy_AaB7m_keycuu05SCN2S3GVVyY8UCHG-W86Y1w,5411
|
||||||
|
aiohttp/locks.py,sha256=6DiJHW1eQKXypu1eWXZT3_amPhFBK-jnxdI-_BpYICk,1278
|
||||||
|
aiohttp/log.py,sha256=qAQMjI6XpX3MOAZATN4HcG0tIceSreR54orlYZaoJ0A,333
|
||||||
|
aiohttp/multipart.py,sha256=RPXfp5GMauxW19nbBaLAkzgUFKTQ9eMo4XtZ7ItGyo4,33740
|
||||||
|
aiohttp/payload.py,sha256=lCF_pZvwyBKJGk4OOLYEQhtxUwOW8rsFF0pxisvfBps,14483
|
||||||
|
aiohttp/payload_streamer.py,sha256=7koj4FVujDGriDIOes48XPp5BK9tsWYyTxJG-3aNaHc,2177
|
||||||
|
aiohttp/py.typed,sha256=E84IaZyFwfLqvXjOVW4LS6WH7QOaKEFpNh9TFyzHNQc,6
|
||||||
|
aiohttp/pytest_plugin.py,sha256=1_XNSrZS-czuaNVt4qvRQs-GbIIl8DaLykGpoDlZfhU,11187
|
||||||
|
aiohttp/resolver.py,sha256=mQvusmMHpS0JekvnX7R1y4aqQ7BIIv3FIkxO5wgv2xQ,3738
|
||||||
|
aiohttp/signals.py,sha256=I_QAX1S7VbN7KDnNO6CSnAzhzx42AYh2Dto_FC9DQ3k,982
|
||||||
|
aiohttp/signals.pyi,sha256=pg4KElFcxBNFU-OQpTe2x-7qKJ79bAlemgqE-yaciiU,341
|
||||||
|
aiohttp/streams.py,sha256=EPM7T5_aJLOXlBTIEeFapIQ1O33KsHTvT-wWH3X0QvQ,21093
|
||||||
|
aiohttp/tcp_helpers.py,sha256=q9fHztjKbR57sCc4zWoo89QDW88pLT0OpcdHLGcV3Fo,1694
|
||||||
|
aiohttp/test_utils.py,sha256=_GjrPdE_9v0SxzbM4Tmt8vst-KJPwL2ILM_Rl1jHhi4,21530
|
||||||
|
aiohttp/tracing.py,sha256=GGhlQDrx5AVwFt33Zl4DvBIoFcR7sXAsgXNxvkd2Uus,13740
|
||||||
|
aiohttp/typedefs.py,sha256=o4R9uAySHxTzedIfX3UPbD0a5TnD5inc_M-h_4qyC4U,1377
|
||||||
|
aiohttp/web.py,sha256=KQXp0C__KpeX8nYM3FWl-eoMAmj9LZIbx7YeI39pQco,19940
|
||||||
|
aiohttp/web_app.py,sha256=dHOhoDoakwdrya0cc6Jl6K723MKGmd_M5LxH3wDeGQI,17779
|
||||||
|
aiohttp/web_exceptions.py,sha256=CQvslnHcpFnreO-qNjnKOWQev7ZvlTG6jfV14NQwb1Q,10519
|
||||||
|
aiohttp/web_fileresponse.py,sha256=TftBNfbgowCQ0L5Iud-dewCAnXq5tIyP-8iZ-KrSHw8,13118
|
||||||
|
aiohttp/web_log.py,sha256=gOR8iLbhjeAUwGL-21qD31kA0HlYSNhpdX6eNwJ-3Uo,8490
|
||||||
|
aiohttp/web_middlewares.py,sha256=jATe_igeeoyBoWKBDW_ISOOzFKvxSoLJE1QPTqZPWGc,4310
|
||||||
|
aiohttp/web_protocol.py,sha256=Zol5oVApIE12NDLBV_W1oKW8AN-sGdBfC0RFMI050U0,22791
|
||||||
|
aiohttp/web_request.py,sha256=xzvj84uGe5Uuug1b4iKWZl8uko_0TpzYKa00POke_NM,26526
|
||||||
|
aiohttp/web_response.py,sha256=CEx04E7NLNg6mfgTjT0QPS9vJuglbw3UQvwob6Qeb7c,26202
|
||||||
|
aiohttp/web_routedef.py,sha256=5QCl85zQml2qoj7bkC9XMoK4stBVuUoiq_0uefxifjc,6293
|
||||||
|
aiohttp/web_runner.py,sha256=ArW4NjMJ24Fv68Ez-9hPL1WNzVygDYEWJ4aIfzOMKz8,11479
|
||||||
|
aiohttp/web_server.py,sha256=P826xDCDs4VgeksMam8OHKm_VzprXuOpsJrysqj3CVg,2222
|
||||||
|
aiohttp/web_urldispatcher.py,sha256=8uhNNXlHd2WJfJ4wcyQ1UxoRM1VUyWWwQhK-TPrM_GM,40043
|
||||||
|
aiohttp/web_ws.py,sha256=mAU6Ln3AbMZeXjUZSSA5MmE39hTajJIMxBE0xnq-4Tc,17414
|
||||||
|
aiohttp/worker.py,sha256=yatPZxpUOp9CzDA05Jb2UWi0eo2PgGWlQm4lIFCRCSY,8420
|
@ -0,0 +1,5 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.33.6)
|
||||||
|
Root-Is-Purelib: false
|
||||||
|
Tag: cp36-cp36m-win_amd64
|
||||||
|
|
@ -0,0 +1 @@
|
|||||||
|
aiohttp
|
@ -0,0 +1,226 @@
|
|||||||
|
__version__ = '3.6.2'
|
||||||
|
|
||||||
|
from typing import Tuple # noqa
|
||||||
|
|
||||||
|
from . import hdrs as hdrs
|
||||||
|
from .client import BaseConnector as BaseConnector
|
||||||
|
from .client import ClientConnectionError as ClientConnectionError
|
||||||
|
from .client import (
|
||||||
|
ClientConnectorCertificateError as ClientConnectorCertificateError,
|
||||||
|
)
|
||||||
|
from .client import ClientConnectorError as ClientConnectorError
|
||||||
|
from .client import ClientConnectorSSLError as ClientConnectorSSLError
|
||||||
|
from .client import ClientError as ClientError
|
||||||
|
from .client import ClientHttpProxyError as ClientHttpProxyError
|
||||||
|
from .client import ClientOSError as ClientOSError
|
||||||
|
from .client import ClientPayloadError as ClientPayloadError
|
||||||
|
from .client import ClientProxyConnectionError as ClientProxyConnectionError
|
||||||
|
from .client import ClientRequest as ClientRequest
|
||||||
|
from .client import ClientResponse as ClientResponse
|
||||||
|
from .client import ClientResponseError as ClientResponseError
|
||||||
|
from .client import ClientSession as ClientSession
|
||||||
|
from .client import ClientSSLError as ClientSSLError
|
||||||
|
from .client import ClientTimeout as ClientTimeout
|
||||||
|
from .client import ClientWebSocketResponse as ClientWebSocketResponse
|
||||||
|
from .client import ContentTypeError as ContentTypeError
|
||||||
|
from .client import Fingerprint as Fingerprint
|
||||||
|
from .client import InvalidURL as InvalidURL
|
||||||
|
from .client import NamedPipeConnector as NamedPipeConnector
|
||||||
|
from .client import RequestInfo as RequestInfo
|
||||||
|
from .client import ServerConnectionError as ServerConnectionError
|
||||||
|
from .client import ServerDisconnectedError as ServerDisconnectedError
|
||||||
|
from .client import ServerFingerprintMismatch as ServerFingerprintMismatch
|
||||||
|
from .client import ServerTimeoutError as ServerTimeoutError
|
||||||
|
from .client import TCPConnector as TCPConnector
|
||||||
|
from .client import TooManyRedirects as TooManyRedirects
|
||||||
|
from .client import UnixConnector as UnixConnector
|
||||||
|
from .client import WSServerHandshakeError as WSServerHandshakeError
|
||||||
|
from .client import request as request
|
||||||
|
from .cookiejar import CookieJar as CookieJar
|
||||||
|
from .cookiejar import DummyCookieJar as DummyCookieJar
|
||||||
|
from .formdata import FormData as FormData
|
||||||
|
from .helpers import BasicAuth as BasicAuth
|
||||||
|
from .helpers import ChainMapProxy as ChainMapProxy
|
||||||
|
from .http import HttpVersion as HttpVersion
|
||||||
|
from .http import HttpVersion10 as HttpVersion10
|
||||||
|
from .http import HttpVersion11 as HttpVersion11
|
||||||
|
from .http import WebSocketError as WebSocketError
|
||||||
|
from .http import WSCloseCode as WSCloseCode
|
||||||
|
from .http import WSMessage as WSMessage
|
||||||
|
from .http import WSMsgType as WSMsgType
|
||||||
|
from .multipart import (
|
||||||
|
BadContentDispositionHeader as BadContentDispositionHeader,
|
||||||
|
)
|
||||||
|
from .multipart import BadContentDispositionParam as BadContentDispositionParam
|
||||||
|
from .multipart import BodyPartReader as BodyPartReader
|
||||||
|
from .multipart import MultipartReader as MultipartReader
|
||||||
|
from .multipart import MultipartWriter as MultipartWriter
|
||||||
|
from .multipart import (
|
||||||
|
content_disposition_filename as content_disposition_filename,
|
||||||
|
)
|
||||||
|
from .multipart import parse_content_disposition as parse_content_disposition
|
||||||
|
from .payload import PAYLOAD_REGISTRY as PAYLOAD_REGISTRY
|
||||||
|
from .payload import AsyncIterablePayload as AsyncIterablePayload
|
||||||
|
from .payload import BufferedReaderPayload as BufferedReaderPayload
|
||||||
|
from .payload import BytesIOPayload as BytesIOPayload
|
||||||
|
from .payload import BytesPayload as BytesPayload
|
||||||
|
from .payload import IOBasePayload as IOBasePayload
|
||||||
|
from .payload import JsonPayload as JsonPayload
|
||||||
|
from .payload import Payload as Payload
|
||||||
|
from .payload import StringIOPayload as StringIOPayload
|
||||||
|
from .payload import StringPayload as StringPayload
|
||||||
|
from .payload import TextIOPayload as TextIOPayload
|
||||||
|
from .payload import get_payload as get_payload
|
||||||
|
from .payload import payload_type as payload_type
|
||||||
|
from .payload_streamer import streamer as streamer
|
||||||
|
from .resolver import AsyncResolver as AsyncResolver
|
||||||
|
from .resolver import DefaultResolver as DefaultResolver
|
||||||
|
from .resolver import ThreadedResolver as ThreadedResolver
|
||||||
|
from .signals import Signal as Signal
|
||||||
|
from .streams import EMPTY_PAYLOAD as EMPTY_PAYLOAD
|
||||||
|
from .streams import DataQueue as DataQueue
|
||||||
|
from .streams import EofStream as EofStream
|
||||||
|
from .streams import FlowControlDataQueue as FlowControlDataQueue
|
||||||
|
from .streams import StreamReader as StreamReader
|
||||||
|
from .tracing import TraceConfig as TraceConfig
|
||||||
|
from .tracing import (
|
||||||
|
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
|
||||||
|
)
|
||||||
|
from .tracing import (
|
||||||
|
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
|
||||||
|
)
|
||||||
|
from .tracing import (
|
||||||
|
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
|
||||||
|
)
|
||||||
|
from .tracing import (
|
||||||
|
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
|
||||||
|
)
|
||||||
|
from .tracing import (
|
||||||
|
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
|
||||||
|
)
|
||||||
|
from .tracing import TraceDnsCacheHitParams as TraceDnsCacheHitParams
|
||||||
|
from .tracing import TraceDnsCacheMissParams as TraceDnsCacheMissParams
|
||||||
|
from .tracing import (
|
||||||
|
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
|
||||||
|
)
|
||||||
|
from .tracing import (
|
||||||
|
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
|
||||||
|
)
|
||||||
|
from .tracing import TraceRequestChunkSentParams as TraceRequestChunkSentParams
|
||||||
|
from .tracing import TraceRequestEndParams as TraceRequestEndParams
|
||||||
|
from .tracing import TraceRequestExceptionParams as TraceRequestExceptionParams
|
||||||
|
from .tracing import TraceRequestRedirectParams as TraceRequestRedirectParams
|
||||||
|
from .tracing import TraceRequestStartParams as TraceRequestStartParams
|
||||||
|
from .tracing import (
|
||||||
|
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
'hdrs',
|
||||||
|
# client
|
||||||
|
'BaseConnector',
|
||||||
|
'ClientConnectionError',
|
||||||
|
'ClientConnectorCertificateError',
|
||||||
|
'ClientConnectorError',
|
||||||
|
'ClientConnectorSSLError',
|
||||||
|
'ClientError',
|
||||||
|
'ClientHttpProxyError',
|
||||||
|
'ClientOSError',
|
||||||
|
'ClientPayloadError',
|
||||||
|
'ClientProxyConnectionError',
|
||||||
|
'ClientResponse',
|
||||||
|
'ClientRequest',
|
||||||
|
'ClientResponseError',
|
||||||
|
'ClientSSLError',
|
||||||
|
'ClientSession',
|
||||||
|
'ClientTimeout',
|
||||||
|
'ClientWebSocketResponse',
|
||||||
|
'ContentTypeError',
|
||||||
|
'Fingerprint',
|
||||||
|
'InvalidURL',
|
||||||
|
'RequestInfo',
|
||||||
|
'ServerConnectionError',
|
||||||
|
'ServerDisconnectedError',
|
||||||
|
'ServerFingerprintMismatch',
|
||||||
|
'ServerTimeoutError',
|
||||||
|
'TCPConnector',
|
||||||
|
'TooManyRedirects',
|
||||||
|
'UnixConnector',
|
||||||
|
'NamedPipeConnector',
|
||||||
|
'WSServerHandshakeError',
|
||||||
|
'request',
|
||||||
|
# cookiejar
|
||||||
|
'CookieJar',
|
||||||
|
'DummyCookieJar',
|
||||||
|
# formdata
|
||||||
|
'FormData',
|
||||||
|
# helpers
|
||||||
|
'BasicAuth',
|
||||||
|
'ChainMapProxy',
|
||||||
|
# http
|
||||||
|
'HttpVersion',
|
||||||
|
'HttpVersion10',
|
||||||
|
'HttpVersion11',
|
||||||
|
'WSMsgType',
|
||||||
|
'WSCloseCode',
|
||||||
|
'WSMessage',
|
||||||
|
'WebSocketError',
|
||||||
|
# multipart
|
||||||
|
'BadContentDispositionHeader',
|
||||||
|
'BadContentDispositionParam',
|
||||||
|
'BodyPartReader',
|
||||||
|
'MultipartReader',
|
||||||
|
'MultipartWriter',
|
||||||
|
'content_disposition_filename',
|
||||||
|
'parse_content_disposition',
|
||||||
|
# payload
|
||||||
|
'AsyncIterablePayload',
|
||||||
|
'BufferedReaderPayload',
|
||||||
|
'BytesIOPayload',
|
||||||
|
'BytesPayload',
|
||||||
|
'IOBasePayload',
|
||||||
|
'JsonPayload',
|
||||||
|
'PAYLOAD_REGISTRY',
|
||||||
|
'Payload',
|
||||||
|
'StringIOPayload',
|
||||||
|
'StringPayload',
|
||||||
|
'TextIOPayload',
|
||||||
|
'get_payload',
|
||||||
|
'payload_type',
|
||||||
|
# payload_streamer
|
||||||
|
'streamer',
|
||||||
|
# resolver
|
||||||
|
'AsyncResolver',
|
||||||
|
'DefaultResolver',
|
||||||
|
'ThreadedResolver',
|
||||||
|
# signals
|
||||||
|
'Signal',
|
||||||
|
'DataQueue',
|
||||||
|
'EMPTY_PAYLOAD',
|
||||||
|
'EofStream',
|
||||||
|
'FlowControlDataQueue',
|
||||||
|
'StreamReader',
|
||||||
|
# tracing
|
||||||
|
'TraceConfig',
|
||||||
|
'TraceConnectionCreateEndParams',
|
||||||
|
'TraceConnectionCreateStartParams',
|
||||||
|
'TraceConnectionQueuedEndParams',
|
||||||
|
'TraceConnectionQueuedStartParams',
|
||||||
|
'TraceConnectionReuseconnParams',
|
||||||
|
'TraceDnsCacheHitParams',
|
||||||
|
'TraceDnsCacheMissParams',
|
||||||
|
'TraceDnsResolveHostEndParams',
|
||||||
|
'TraceDnsResolveHostStartParams',
|
||||||
|
'TraceRequestChunkSentParams',
|
||||||
|
'TraceRequestEndParams',
|
||||||
|
'TraceRequestExceptionParams',
|
||||||
|
'TraceRequestRedirectParams',
|
||||||
|
'TraceRequestStartParams',
|
||||||
|
'TraceResponseChunkReceivedParams',
|
||||||
|
) # type: Tuple[str, ...]
|
||||||
|
|
||||||
|
try:
|
||||||
|
from .worker import GunicornWebWorker, GunicornUVLoopWebWorker # noqa
|
||||||
|
__all__ += ('GunicornWebWorker', 'GunicornUVLoopWebWorker')
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
pass
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,140 @@
|
|||||||
|
from libc.stdint cimport uint16_t, uint32_t, uint64_t
|
||||||
|
|
||||||
|
|
||||||
|
cdef extern from "../vendor/http-parser/http_parser.h":
|
||||||
|
ctypedef int (*http_data_cb) (http_parser*,
|
||||||
|
const char *at,
|
||||||
|
size_t length) except -1
|
||||||
|
|
||||||
|
ctypedef int (*http_cb) (http_parser*) except -1
|
||||||
|
|
||||||
|
struct http_parser:
|
||||||
|
unsigned int type
|
||||||
|
unsigned int flags
|
||||||
|
unsigned int state
|
||||||
|
unsigned int header_state
|
||||||
|
unsigned int index
|
||||||
|
|
||||||
|
uint32_t nread
|
||||||
|
uint64_t content_length
|
||||||
|
|
||||||
|
unsigned short http_major
|
||||||
|
unsigned short http_minor
|
||||||
|
unsigned int status_code
|
||||||
|
unsigned int method
|
||||||
|
unsigned int http_errno
|
||||||
|
|
||||||
|
unsigned int upgrade
|
||||||
|
|
||||||
|
void *data
|
||||||
|
|
||||||
|
struct http_parser_settings:
|
||||||
|
http_cb on_message_begin
|
||||||
|
http_data_cb on_url
|
||||||
|
http_data_cb on_status
|
||||||
|
http_data_cb on_header_field
|
||||||
|
http_data_cb on_header_value
|
||||||
|
http_cb on_headers_complete
|
||||||
|
http_data_cb on_body
|
||||||
|
http_cb on_message_complete
|
||||||
|
http_cb on_chunk_header
|
||||||
|
http_cb on_chunk_complete
|
||||||
|
|
||||||
|
enum http_parser_type:
|
||||||
|
HTTP_REQUEST,
|
||||||
|
HTTP_RESPONSE,
|
||||||
|
HTTP_BOTH
|
||||||
|
|
||||||
|
enum http_errno:
|
||||||
|
HPE_OK,
|
||||||
|
HPE_CB_message_begin,
|
||||||
|
HPE_CB_url,
|
||||||
|
HPE_CB_header_field,
|
||||||
|
HPE_CB_header_value,
|
||||||
|
HPE_CB_headers_complete,
|
||||||
|
HPE_CB_body,
|
||||||
|
HPE_CB_message_complete,
|
||||||
|
HPE_CB_status,
|
||||||
|
HPE_CB_chunk_header,
|
||||||
|
HPE_CB_chunk_complete,
|
||||||
|
HPE_INVALID_EOF_STATE,
|
||||||
|
HPE_HEADER_OVERFLOW,
|
||||||
|
HPE_CLOSED_CONNECTION,
|
||||||
|
HPE_INVALID_VERSION,
|
||||||
|
HPE_INVALID_STATUS,
|
||||||
|
HPE_INVALID_METHOD,
|
||||||
|
HPE_INVALID_URL,
|
||||||
|
HPE_INVALID_HOST,
|
||||||
|
HPE_INVALID_PORT,
|
||||||
|
HPE_INVALID_PATH,
|
||||||
|
HPE_INVALID_QUERY_STRING,
|
||||||
|
HPE_INVALID_FRAGMENT,
|
||||||
|
HPE_LF_EXPECTED,
|
||||||
|
HPE_INVALID_HEADER_TOKEN,
|
||||||
|
HPE_INVALID_CONTENT_LENGTH,
|
||||||
|
HPE_INVALID_CHUNK_SIZE,
|
||||||
|
HPE_INVALID_CONSTANT,
|
||||||
|
HPE_INVALID_INTERNAL_STATE,
|
||||||
|
HPE_STRICT,
|
||||||
|
HPE_PAUSED,
|
||||||
|
HPE_UNKNOWN
|
||||||
|
|
||||||
|
enum flags:
|
||||||
|
F_CHUNKED,
|
||||||
|
F_CONNECTION_KEEP_ALIVE,
|
||||||
|
F_CONNECTION_CLOSE,
|
||||||
|
F_CONNECTION_UPGRADE,
|
||||||
|
F_TRAILING,
|
||||||
|
F_UPGRADE,
|
||||||
|
F_SKIPBODY,
|
||||||
|
F_CONTENTLENGTH
|
||||||
|
|
||||||
|
enum http_method:
|
||||||
|
DELETE, GET, HEAD, POST, PUT, CONNECT, OPTIONS, TRACE, COPY,
|
||||||
|
LOCK, MKCOL, MOVE, PROPFIND, PROPPATCH, SEARCH, UNLOCK, BIND,
|
||||||
|
REBIND, UNBIND, ACL, REPORT, MKACTIVITY, CHECKOUT, MERGE,
|
||||||
|
MSEARCH, NOTIFY, SUBSCRIBE, UNSUBSCRIBE, PATCH, PURGE, MKCALENDAR,
|
||||||
|
LINK, UNLINK
|
||||||
|
|
||||||
|
void http_parser_init(http_parser *parser, http_parser_type type)
|
||||||
|
|
||||||
|
size_t http_parser_execute(http_parser *parser,
|
||||||
|
const http_parser_settings *settings,
|
||||||
|
const char *data,
|
||||||
|
size_t len)
|
||||||
|
|
||||||
|
int http_should_keep_alive(const http_parser *parser)
|
||||||
|
|
||||||
|
void http_parser_settings_init(http_parser_settings *settings)
|
||||||
|
|
||||||
|
const char *http_errno_name(http_errno err)
|
||||||
|
const char *http_errno_description(http_errno err)
|
||||||
|
const char *http_method_str(http_method m)
|
||||||
|
|
||||||
|
# URL Parser
|
||||||
|
|
||||||
|
enum http_parser_url_fields:
|
||||||
|
UF_SCHEMA = 0,
|
||||||
|
UF_HOST = 1,
|
||||||
|
UF_PORT = 2,
|
||||||
|
UF_PATH = 3,
|
||||||
|
UF_QUERY = 4,
|
||||||
|
UF_FRAGMENT = 5,
|
||||||
|
UF_USERINFO = 6,
|
||||||
|
UF_MAX = 7
|
||||||
|
|
||||||
|
struct http_parser_url_field_data:
|
||||||
|
uint16_t off
|
||||||
|
uint16_t len
|
||||||
|
|
||||||
|
struct http_parser_url:
|
||||||
|
uint16_t field_set
|
||||||
|
uint16_t port
|
||||||
|
http_parser_url_field_data[<int>UF_MAX] field_data
|
||||||
|
|
||||||
|
void http_parser_url_init(http_parser_url *u)
|
||||||
|
|
||||||
|
int http_parser_parse_url(const char *buf,
|
||||||
|
size_t buflen,
|
||||||
|
int is_connect,
|
||||||
|
http_parser_url *u)
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,14 @@
|
|||||||
|
#ifndef _FIND_HEADERS_H
|
||||||
|
#define _FIND_HEADERS_H
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
extern "C" {
|
||||||
|
#endif
|
||||||
|
|
||||||
|
int find_header(const char *str, int size);
|
||||||
|
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
#endif
|
@ -0,0 +1,2 @@
|
|||||||
|
cdef extern from "_find_header.h":
|
||||||
|
int find_header(char *, int)
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -0,0 +1,108 @@
|
|||||||
|
from collections.abc import MutableSequence
|
||||||
|
|
||||||
|
|
||||||
|
cdef class FrozenList:
|
||||||
|
|
||||||
|
cdef readonly bint frozen
|
||||||
|
cdef list _items
|
||||||
|
|
||||||
|
def __init__(self, items=None):
|
||||||
|
self.frozen = False
|
||||||
|
if items is not None:
|
||||||
|
items = list(items)
|
||||||
|
else:
|
||||||
|
items = []
|
||||||
|
self._items = items
|
||||||
|
|
||||||
|
cdef object _check_frozen(self):
|
||||||
|
if self.frozen:
|
||||||
|
raise RuntimeError("Cannot modify frozen list.")
|
||||||
|
|
||||||
|
cdef inline object _fast_len(self):
|
||||||
|
return len(self._items)
|
||||||
|
|
||||||
|
def freeze(self):
|
||||||
|
self.frozen = True
|
||||||
|
|
||||||
|
def __getitem__(self, index):
|
||||||
|
return self._items[index]
|
||||||
|
|
||||||
|
def __setitem__(self, index, value):
|
||||||
|
self._check_frozen()
|
||||||
|
self._items[index] = value
|
||||||
|
|
||||||
|
def __delitem__(self, index):
|
||||||
|
self._check_frozen()
|
||||||
|
del self._items[index]
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return self._fast_len()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return self._items.__iter__()
|
||||||
|
|
||||||
|
def __reversed__(self):
|
||||||
|
return self._items.__reversed__()
|
||||||
|
|
||||||
|
def __richcmp__(self, other, op):
|
||||||
|
if op == 0: # <
|
||||||
|
return list(self) < other
|
||||||
|
if op == 1: # <=
|
||||||
|
return list(self) <= other
|
||||||
|
if op == 2: # ==
|
||||||
|
return list(self) == other
|
||||||
|
if op == 3: # !=
|
||||||
|
return list(self) != other
|
||||||
|
if op == 4: # >
|
||||||
|
return list(self) > other
|
||||||
|
if op == 5: # =>
|
||||||
|
return list(self) >= other
|
||||||
|
|
||||||
|
def insert(self, pos, item):
|
||||||
|
self._check_frozen()
|
||||||
|
self._items.insert(pos, item)
|
||||||
|
|
||||||
|
def __contains__(self, item):
|
||||||
|
return item in self._items
|
||||||
|
|
||||||
|
def __iadd__(self, items):
|
||||||
|
self._check_frozen()
|
||||||
|
self._items += list(items)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def index(self, item):
|
||||||
|
return self._items.index(item)
|
||||||
|
|
||||||
|
def remove(self, item):
|
||||||
|
self._check_frozen()
|
||||||
|
self._items.remove(item)
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
self._check_frozen()
|
||||||
|
self._items.clear()
|
||||||
|
|
||||||
|
def extend(self, items):
|
||||||
|
self._check_frozen()
|
||||||
|
self._items += list(items)
|
||||||
|
|
||||||
|
def reverse(self):
|
||||||
|
self._check_frozen()
|
||||||
|
self._items.reverse()
|
||||||
|
|
||||||
|
def pop(self, index=-1):
|
||||||
|
self._check_frozen()
|
||||||
|
return self._items.pop(index)
|
||||||
|
|
||||||
|
def append(self, item):
|
||||||
|
self._check_frozen()
|
||||||
|
return self._items.append(item)
|
||||||
|
|
||||||
|
def count(self, item):
|
||||||
|
return self._items.count(item)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<FrozenList(frozen={}, {!r})>'.format(self.frozen,
|
||||||
|
self._items)
|
||||||
|
|
||||||
|
|
||||||
|
MutableSequence.register(FrozenList)
|
@ -0,0 +1,84 @@
|
|||||||
|
# The file is autogenerated from aiohttp/hdrs.py
|
||||||
|
# Run ./tools/gen.py to update it after the origin changing.
|
||||||
|
|
||||||
|
from . import hdrs
|
||||||
|
cdef tuple headers = (
|
||||||
|
hdrs.ACCEPT,
|
||||||
|
hdrs.ACCEPT_CHARSET,
|
||||||
|
hdrs.ACCEPT_ENCODING,
|
||||||
|
hdrs.ACCEPT_LANGUAGE,
|
||||||
|
hdrs.ACCEPT_RANGES,
|
||||||
|
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
|
||||||
|
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
|
||||||
|
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
|
||||||
|
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
|
||||||
|
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
|
||||||
|
hdrs.ACCESS_CONTROL_MAX_AGE,
|
||||||
|
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
|
||||||
|
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
|
||||||
|
hdrs.AGE,
|
||||||
|
hdrs.ALLOW,
|
||||||
|
hdrs.AUTHORIZATION,
|
||||||
|
hdrs.CACHE_CONTROL,
|
||||||
|
hdrs.CONNECTION,
|
||||||
|
hdrs.CONTENT_DISPOSITION,
|
||||||
|
hdrs.CONTENT_ENCODING,
|
||||||
|
hdrs.CONTENT_LANGUAGE,
|
||||||
|
hdrs.CONTENT_LENGTH,
|
||||||
|
hdrs.CONTENT_LOCATION,
|
||||||
|
hdrs.CONTENT_MD5,
|
||||||
|
hdrs.CONTENT_RANGE,
|
||||||
|
hdrs.CONTENT_TRANSFER_ENCODING,
|
||||||
|
hdrs.CONTENT_TYPE,
|
||||||
|
hdrs.COOKIE,
|
||||||
|
hdrs.DATE,
|
||||||
|
hdrs.DESTINATION,
|
||||||
|
hdrs.DIGEST,
|
||||||
|
hdrs.ETAG,
|
||||||
|
hdrs.EXPECT,
|
||||||
|
hdrs.EXPIRES,
|
||||||
|
hdrs.FORWARDED,
|
||||||
|
hdrs.FROM,
|
||||||
|
hdrs.HOST,
|
||||||
|
hdrs.IF_MATCH,
|
||||||
|
hdrs.IF_MODIFIED_SINCE,
|
||||||
|
hdrs.IF_NONE_MATCH,
|
||||||
|
hdrs.IF_RANGE,
|
||||||
|
hdrs.IF_UNMODIFIED_SINCE,
|
||||||
|
hdrs.KEEP_ALIVE,
|
||||||
|
hdrs.LAST_EVENT_ID,
|
||||||
|
hdrs.LAST_MODIFIED,
|
||||||
|
hdrs.LINK,
|
||||||
|
hdrs.LOCATION,
|
||||||
|
hdrs.MAX_FORWARDS,
|
||||||
|
hdrs.ORIGIN,
|
||||||
|
hdrs.PRAGMA,
|
||||||
|
hdrs.PROXY_AUTHENTICATE,
|
||||||
|
hdrs.PROXY_AUTHORIZATION,
|
||||||
|
hdrs.RANGE,
|
||||||
|
hdrs.REFERER,
|
||||||
|
hdrs.RETRY_AFTER,
|
||||||
|
hdrs.SEC_WEBSOCKET_ACCEPT,
|
||||||
|
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
||||||
|
hdrs.SEC_WEBSOCKET_KEY,
|
||||||
|
hdrs.SEC_WEBSOCKET_KEY1,
|
||||||
|
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
||||||
|
hdrs.SEC_WEBSOCKET_VERSION,
|
||||||
|
hdrs.SERVER,
|
||||||
|
hdrs.SET_COOKIE,
|
||||||
|
hdrs.TE,
|
||||||
|
hdrs.TRAILER,
|
||||||
|
hdrs.TRANSFER_ENCODING,
|
||||||
|
hdrs.UPGRADE,
|
||||||
|
hdrs.URI,
|
||||||
|
hdrs.USER_AGENT,
|
||||||
|
hdrs.VARY,
|
||||||
|
hdrs.VIA,
|
||||||
|
hdrs.WANT_DIGEST,
|
||||||
|
hdrs.WARNING,
|
||||||
|
hdrs.WEBSOCKET,
|
||||||
|
hdrs.WWW_AUTHENTICATE,
|
||||||
|
hdrs.X_FORWARDED_FOR,
|
||||||
|
hdrs.X_FORWARDED_HOST,
|
||||||
|
hdrs.X_FORWARDED_PROTO,
|
||||||
|
)
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -0,0 +1,8 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
class reify:
|
||||||
|
def __init__(self, wrapped: Any) -> None: ...
|
||||||
|
|
||||||
|
def __get__(self, inst: Any, owner: Any) -> Any: ...
|
||||||
|
|
||||||
|
def __set__(self, inst: Any, value: Any) -> None: ...
|
@ -0,0 +1,35 @@
|
|||||||
|
cdef class reify:
|
||||||
|
"""Use as a class method decorator. It operates almost exactly like
|
||||||
|
the Python `@property` decorator, but it puts the result of the
|
||||||
|
method it decorates into the instance dict after the first call,
|
||||||
|
effectively replacing the function it decorates with an instance
|
||||||
|
variable. It is, in Python parlance, a data descriptor.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
cdef object wrapped
|
||||||
|
cdef object name
|
||||||
|
|
||||||
|
def __init__(self, wrapped):
|
||||||
|
self.wrapped = wrapped
|
||||||
|
self.name = wrapped.__name__
|
||||||
|
|
||||||
|
@property
|
||||||
|
def __doc__(self):
|
||||||
|
return self.wrapped.__doc__
|
||||||
|
|
||||||
|
def __get__(self, inst, owner):
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
return inst._cache[self.name]
|
||||||
|
except KeyError:
|
||||||
|
val = self.wrapped(inst)
|
||||||
|
inst._cache[self.name] = val
|
||||||
|
return val
|
||||||
|
except AttributeError:
|
||||||
|
if inst is None:
|
||||||
|
return self
|
||||||
|
raise
|
||||||
|
|
||||||
|
def __set__(self, inst, value):
|
||||||
|
raise AttributeError("reified property is read-only")
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -0,0 +1,846 @@
|
|||||||
|
#cython: language_level=3
|
||||||
|
#
|
||||||
|
# Based on https://github.com/MagicStack/httptools
|
||||||
|
#
|
||||||
|
from __future__ import absolute_import, print_function
|
||||||
|
from cpython.mem cimport PyMem_Malloc, PyMem_Free
|
||||||
|
from libc.string cimport memcpy
|
||||||
|
from cpython cimport (PyObject_GetBuffer, PyBuffer_Release, PyBUF_SIMPLE,
|
||||||
|
Py_buffer, PyBytes_AsString, PyBytes_AsStringAndSize)
|
||||||
|
|
||||||
|
from multidict import (CIMultiDict as _CIMultiDict,
|
||||||
|
CIMultiDictProxy as _CIMultiDictProxy)
|
||||||
|
from yarl import URL as _URL
|
||||||
|
|
||||||
|
from aiohttp import hdrs
|
||||||
|
from .http_exceptions import (
|
||||||
|
BadHttpMessage, BadStatusLine, InvalidHeader, LineTooLong, InvalidURLError,
|
||||||
|
PayloadEncodingError, ContentLengthError, TransferEncodingError)
|
||||||
|
from .http_writer import (HttpVersion as _HttpVersion,
|
||||||
|
HttpVersion10 as _HttpVersion10,
|
||||||
|
HttpVersion11 as _HttpVersion11)
|
||||||
|
from .http_parser import DeflateBuffer as _DeflateBuffer
|
||||||
|
from .streams import (EMPTY_PAYLOAD as _EMPTY_PAYLOAD,
|
||||||
|
StreamReader as _StreamReader)
|
||||||
|
|
||||||
|
cimport cython
|
||||||
|
from aiohttp cimport _cparser as cparser
|
||||||
|
|
||||||
|
include "_headers.pxi"
|
||||||
|
|
||||||
|
from aiohttp cimport _find_header
|
||||||
|
|
||||||
|
DEF DEFAULT_FREELIST_SIZE = 250
|
||||||
|
|
||||||
|
cdef extern from "Python.h":
|
||||||
|
int PyByteArray_Resize(object, Py_ssize_t) except -1
|
||||||
|
Py_ssize_t PyByteArray_Size(object) except -1
|
||||||
|
char* PyByteArray_AsString(object)
|
||||||
|
|
||||||
|
__all__ = ('HttpRequestParser', 'HttpResponseParser',
|
||||||
|
'RawRequestMessage', 'RawResponseMessage')
|
||||||
|
|
||||||
|
cdef object URL = _URL
|
||||||
|
cdef object URL_build = URL.build
|
||||||
|
cdef object CIMultiDict = _CIMultiDict
|
||||||
|
cdef object CIMultiDictProxy = _CIMultiDictProxy
|
||||||
|
cdef object HttpVersion = _HttpVersion
|
||||||
|
cdef object HttpVersion10 = _HttpVersion10
|
||||||
|
cdef object HttpVersion11 = _HttpVersion11
|
||||||
|
cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
|
||||||
|
cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
|
||||||
|
cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
|
||||||
|
cdef object StreamReader = _StreamReader
|
||||||
|
cdef object DeflateBuffer = _DeflateBuffer
|
||||||
|
|
||||||
|
|
||||||
|
cdef inline object extend(object buf, const char* at, size_t length):
|
||||||
|
cdef Py_ssize_t s
|
||||||
|
cdef char* ptr
|
||||||
|
s = PyByteArray_Size(buf)
|
||||||
|
PyByteArray_Resize(buf, s + length)
|
||||||
|
ptr = PyByteArray_AsString(buf)
|
||||||
|
memcpy(ptr + s, at, length)
|
||||||
|
|
||||||
|
|
||||||
|
DEF METHODS_COUNT = 34;
|
||||||
|
|
||||||
|
cdef list _http_method = []
|
||||||
|
|
||||||
|
for i in range(METHODS_COUNT):
|
||||||
|
_http_method.append(
|
||||||
|
cparser.http_method_str(<cparser.http_method> i).decode('ascii'))
|
||||||
|
|
||||||
|
|
||||||
|
cdef inline str http_method_str(int i):
|
||||||
|
if i < METHODS_COUNT:
|
||||||
|
return <str>_http_method[i]
|
||||||
|
else:
|
||||||
|
return "<unknown>"
|
||||||
|
|
||||||
|
cdef inline object find_header(bytes raw_header):
|
||||||
|
cdef Py_ssize_t size
|
||||||
|
cdef char *buf
|
||||||
|
cdef int idx
|
||||||
|
PyBytes_AsStringAndSize(raw_header, &buf, &size)
|
||||||
|
idx = _find_header.find_header(buf, size)
|
||||||
|
if idx == -1:
|
||||||
|
return raw_header.decode('utf-8', 'surrogateescape')
|
||||||
|
return headers[idx]
|
||||||
|
|
||||||
|
|
||||||
|
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
||||||
|
cdef class RawRequestMessage:
|
||||||
|
cdef readonly str method
|
||||||
|
cdef readonly str path
|
||||||
|
cdef readonly object version # HttpVersion
|
||||||
|
cdef readonly object headers # CIMultiDict
|
||||||
|
cdef readonly object raw_headers # tuple
|
||||||
|
cdef readonly object should_close
|
||||||
|
cdef readonly object compression
|
||||||
|
cdef readonly object upgrade
|
||||||
|
cdef readonly object chunked
|
||||||
|
cdef readonly object url # yarl.URL
|
||||||
|
|
||||||
|
def __init__(self, method, path, version, headers, raw_headers,
|
||||||
|
should_close, compression, upgrade, chunked, url):
|
||||||
|
self.method = method
|
||||||
|
self.path = path
|
||||||
|
self.version = version
|
||||||
|
self.headers = headers
|
||||||
|
self.raw_headers = raw_headers
|
||||||
|
self.should_close = should_close
|
||||||
|
self.compression = compression
|
||||||
|
self.upgrade = upgrade
|
||||||
|
self.chunked = chunked
|
||||||
|
self.url = url
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
info = []
|
||||||
|
info.append(("method", self.method))
|
||||||
|
info.append(("path", self.path))
|
||||||
|
info.append(("version", self.version))
|
||||||
|
info.append(("headers", self.headers))
|
||||||
|
info.append(("raw_headers", self.raw_headers))
|
||||||
|
info.append(("should_close", self.should_close))
|
||||||
|
info.append(("compression", self.compression))
|
||||||
|
info.append(("upgrade", self.upgrade))
|
||||||
|
info.append(("chunked", self.chunked))
|
||||||
|
info.append(("url", self.url))
|
||||||
|
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
||||||
|
return '<RawRequestMessage(' + sinfo + ')>'
|
||||||
|
|
||||||
|
def _replace(self, **dct):
|
||||||
|
cdef RawRequestMessage ret
|
||||||
|
ret = _new_request_message(self.method,
|
||||||
|
self.path,
|
||||||
|
self.version,
|
||||||
|
self.headers,
|
||||||
|
self.raw_headers,
|
||||||
|
self.should_close,
|
||||||
|
self.compression,
|
||||||
|
self.upgrade,
|
||||||
|
self.chunked,
|
||||||
|
self.url)
|
||||||
|
if "method" in dct:
|
||||||
|
ret.method = dct["method"]
|
||||||
|
if "path" in dct:
|
||||||
|
ret.path = dct["path"]
|
||||||
|
if "version" in dct:
|
||||||
|
ret.version = dct["version"]
|
||||||
|
if "headers" in dct:
|
||||||
|
ret.headers = dct["headers"]
|
||||||
|
if "raw_headers" in dct:
|
||||||
|
ret.raw_headers = dct["raw_headers"]
|
||||||
|
if "should_close" in dct:
|
||||||
|
ret.should_close = dct["should_close"]
|
||||||
|
if "compression" in dct:
|
||||||
|
ret.compression = dct["compression"]
|
||||||
|
if "upgrade" in dct:
|
||||||
|
ret.upgrade = dct["upgrade"]
|
||||||
|
if "chunked" in dct:
|
||||||
|
ret.chunked = dct["chunked"]
|
||||||
|
if "url" in dct:
|
||||||
|
ret.url = dct["url"]
|
||||||
|
return ret
|
||||||
|
|
||||||
|
cdef _new_request_message(str method,
|
||||||
|
str path,
|
||||||
|
object version,
|
||||||
|
object headers,
|
||||||
|
object raw_headers,
|
||||||
|
bint should_close,
|
||||||
|
object compression,
|
||||||
|
bint upgrade,
|
||||||
|
bint chunked,
|
||||||
|
object url):
|
||||||
|
cdef RawRequestMessage ret
|
||||||
|
ret = RawRequestMessage.__new__(RawRequestMessage)
|
||||||
|
ret.method = method
|
||||||
|
ret.path = path
|
||||||
|
ret.version = version
|
||||||
|
ret.headers = headers
|
||||||
|
ret.raw_headers = raw_headers
|
||||||
|
ret.should_close = should_close
|
||||||
|
ret.compression = compression
|
||||||
|
ret.upgrade = upgrade
|
||||||
|
ret.chunked = chunked
|
||||||
|
ret.url = url
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
||||||
|
cdef class RawResponseMessage:
|
||||||
|
cdef readonly object version # HttpVersion
|
||||||
|
cdef readonly int code
|
||||||
|
cdef readonly str reason
|
||||||
|
cdef readonly object headers # CIMultiDict
|
||||||
|
cdef readonly object raw_headers # tuple
|
||||||
|
cdef readonly object should_close
|
||||||
|
cdef readonly object compression
|
||||||
|
cdef readonly object upgrade
|
||||||
|
cdef readonly object chunked
|
||||||
|
|
||||||
|
def __init__(self, version, code, reason, headers, raw_headers,
|
||||||
|
should_close, compression, upgrade, chunked):
|
||||||
|
self.version = version
|
||||||
|
self.code = code
|
||||||
|
self.reason = reason
|
||||||
|
self.headers = headers
|
||||||
|
self.raw_headers = raw_headers
|
||||||
|
self.should_close = should_close
|
||||||
|
self.compression = compression
|
||||||
|
self.upgrade = upgrade
|
||||||
|
self.chunked = chunked
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
info = []
|
||||||
|
info.append(("version", self.version))
|
||||||
|
info.append(("code", self.code))
|
||||||
|
info.append(("reason", self.reason))
|
||||||
|
info.append(("headers", self.headers))
|
||||||
|
info.append(("raw_headers", self.raw_headers))
|
||||||
|
info.append(("should_close", self.should_close))
|
||||||
|
info.append(("compression", self.compression))
|
||||||
|
info.append(("upgrade", self.upgrade))
|
||||||
|
info.append(("chunked", self.chunked))
|
||||||
|
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
||||||
|
return '<RawResponseMessage(' + sinfo + ')>'
|
||||||
|
|
||||||
|
|
||||||
|
cdef _new_response_message(object version,
|
||||||
|
int code,
|
||||||
|
str reason,
|
||||||
|
object headers,
|
||||||
|
object raw_headers,
|
||||||
|
bint should_close,
|
||||||
|
object compression,
|
||||||
|
bint upgrade,
|
||||||
|
bint chunked):
|
||||||
|
cdef RawResponseMessage ret
|
||||||
|
ret = RawResponseMessage.__new__(RawResponseMessage)
|
||||||
|
ret.version = version
|
||||||
|
ret.code = code
|
||||||
|
ret.reason = reason
|
||||||
|
ret.headers = headers
|
||||||
|
ret.raw_headers = raw_headers
|
||||||
|
ret.should_close = should_close
|
||||||
|
ret.compression = compression
|
||||||
|
ret.upgrade = upgrade
|
||||||
|
ret.chunked = chunked
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
@cython.internal
|
||||||
|
cdef class HttpParser:
|
||||||
|
|
||||||
|
cdef:
|
||||||
|
cparser.http_parser* _cparser
|
||||||
|
cparser.http_parser_settings* _csettings
|
||||||
|
|
||||||
|
bytearray _raw_name
|
||||||
|
bytearray _raw_value
|
||||||
|
bint _has_value
|
||||||
|
|
||||||
|
object _protocol
|
||||||
|
object _loop
|
||||||
|
object _timer
|
||||||
|
|
||||||
|
size_t _max_line_size
|
||||||
|
size_t _max_field_size
|
||||||
|
size_t _max_headers
|
||||||
|
bint _response_with_body
|
||||||
|
|
||||||
|
bint _started
|
||||||
|
object _url
|
||||||
|
bytearray _buf
|
||||||
|
str _path
|
||||||
|
str _reason
|
||||||
|
object _headers
|
||||||
|
list _raw_headers
|
||||||
|
bint _upgraded
|
||||||
|
list _messages
|
||||||
|
object _payload
|
||||||
|
bint _payload_error
|
||||||
|
object _payload_exception
|
||||||
|
object _last_error
|
||||||
|
bint _auto_decompress
|
||||||
|
|
||||||
|
str _content_encoding
|
||||||
|
|
||||||
|
Py_buffer py_buf
|
||||||
|
|
||||||
|
def __cinit__(self):
|
||||||
|
self._cparser = <cparser.http_parser*> \
|
||||||
|
PyMem_Malloc(sizeof(cparser.http_parser))
|
||||||
|
if self._cparser is NULL:
|
||||||
|
raise MemoryError()
|
||||||
|
|
||||||
|
self._csettings = <cparser.http_parser_settings*> \
|
||||||
|
PyMem_Malloc(sizeof(cparser.http_parser_settings))
|
||||||
|
if self._csettings is NULL:
|
||||||
|
raise MemoryError()
|
||||||
|
|
||||||
|
def __dealloc__(self):
|
||||||
|
PyMem_Free(self._cparser)
|
||||||
|
PyMem_Free(self._csettings)
|
||||||
|
|
||||||
|
cdef _init(self, cparser.http_parser_type mode,
|
||||||
|
object protocol, object loop, object timer=None,
|
||||||
|
size_t max_line_size=8190, size_t max_headers=32768,
|
||||||
|
size_t max_field_size=8190, payload_exception=None,
|
||||||
|
bint response_with_body=True, bint auto_decompress=True):
|
||||||
|
cparser.http_parser_init(self._cparser, mode)
|
||||||
|
self._cparser.data = <void*>self
|
||||||
|
self._cparser.content_length = 0
|
||||||
|
|
||||||
|
cparser.http_parser_settings_init(self._csettings)
|
||||||
|
|
||||||
|
self._protocol = protocol
|
||||||
|
self._loop = loop
|
||||||
|
self._timer = timer
|
||||||
|
|
||||||
|
self._buf = bytearray()
|
||||||
|
self._payload = None
|
||||||
|
self._payload_error = 0
|
||||||
|
self._payload_exception = payload_exception
|
||||||
|
self._messages = []
|
||||||
|
|
||||||
|
self._raw_name = bytearray()
|
||||||
|
self._raw_value = bytearray()
|
||||||
|
self._has_value = False
|
||||||
|
|
||||||
|
self._max_line_size = max_line_size
|
||||||
|
self._max_headers = max_headers
|
||||||
|
self._max_field_size = max_field_size
|
||||||
|
self._response_with_body = response_with_body
|
||||||
|
self._upgraded = False
|
||||||
|
self._auto_decompress = auto_decompress
|
||||||
|
self._content_encoding = None
|
||||||
|
|
||||||
|
self._csettings.on_url = cb_on_url
|
||||||
|
self._csettings.on_status = cb_on_status
|
||||||
|
self._csettings.on_header_field = cb_on_header_field
|
||||||
|
self._csettings.on_header_value = cb_on_header_value
|
||||||
|
self._csettings.on_headers_complete = cb_on_headers_complete
|
||||||
|
self._csettings.on_body = cb_on_body
|
||||||
|
self._csettings.on_message_begin = cb_on_message_begin
|
||||||
|
self._csettings.on_message_complete = cb_on_message_complete
|
||||||
|
self._csettings.on_chunk_header = cb_on_chunk_header
|
||||||
|
self._csettings.on_chunk_complete = cb_on_chunk_complete
|
||||||
|
|
||||||
|
self._last_error = None
|
||||||
|
|
||||||
|
cdef _process_header(self):
|
||||||
|
if self._raw_name:
|
||||||
|
raw_name = bytes(self._raw_name)
|
||||||
|
raw_value = bytes(self._raw_value)
|
||||||
|
|
||||||
|
name = find_header(raw_name)
|
||||||
|
value = raw_value.decode('utf-8', 'surrogateescape')
|
||||||
|
|
||||||
|
self._headers.add(name, value)
|
||||||
|
|
||||||
|
if name is CONTENT_ENCODING:
|
||||||
|
self._content_encoding = value
|
||||||
|
|
||||||
|
PyByteArray_Resize(self._raw_name, 0)
|
||||||
|
PyByteArray_Resize(self._raw_value, 0)
|
||||||
|
self._has_value = False
|
||||||
|
self._raw_headers.append((raw_name, raw_value))
|
||||||
|
|
||||||
|
cdef _on_header_field(self, char* at, size_t length):
|
||||||
|
cdef Py_ssize_t size
|
||||||
|
cdef char *buf
|
||||||
|
if self._has_value:
|
||||||
|
self._process_header()
|
||||||
|
|
||||||
|
size = PyByteArray_Size(self._raw_name)
|
||||||
|
PyByteArray_Resize(self._raw_name, size + length)
|
||||||
|
buf = PyByteArray_AsString(self._raw_name)
|
||||||
|
memcpy(buf + size, at, length)
|
||||||
|
|
||||||
|
cdef _on_header_value(self, char* at, size_t length):
|
||||||
|
cdef Py_ssize_t size
|
||||||
|
cdef char *buf
|
||||||
|
|
||||||
|
size = PyByteArray_Size(self._raw_value)
|
||||||
|
PyByteArray_Resize(self._raw_value, size + length)
|
||||||
|
buf = PyByteArray_AsString(self._raw_value)
|
||||||
|
memcpy(buf + size, at, length)
|
||||||
|
self._has_value = True
|
||||||
|
|
||||||
|
cdef _on_headers_complete(self):
|
||||||
|
self._process_header()
|
||||||
|
|
||||||
|
method = http_method_str(self._cparser.method)
|
||||||
|
should_close = not cparser.http_should_keep_alive(self._cparser)
|
||||||
|
upgrade = self._cparser.upgrade
|
||||||
|
chunked = self._cparser.flags & cparser.F_CHUNKED
|
||||||
|
|
||||||
|
raw_headers = tuple(self._raw_headers)
|
||||||
|
headers = CIMultiDictProxy(self._headers)
|
||||||
|
|
||||||
|
if upgrade or self._cparser.method == 5: # cparser.CONNECT:
|
||||||
|
self._upgraded = True
|
||||||
|
|
||||||
|
# do not support old websocket spec
|
||||||
|
if SEC_WEBSOCKET_KEY1 in headers:
|
||||||
|
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
||||||
|
|
||||||
|
encoding = None
|
||||||
|
enc = self._content_encoding
|
||||||
|
if enc is not None:
|
||||||
|
self._content_encoding = None
|
||||||
|
enc = enc.lower()
|
||||||
|
if enc in ('gzip', 'deflate', 'br'):
|
||||||
|
encoding = enc
|
||||||
|
|
||||||
|
if self._cparser.type == cparser.HTTP_REQUEST:
|
||||||
|
msg = _new_request_message(
|
||||||
|
method, self._path,
|
||||||
|
self.http_version(), headers, raw_headers,
|
||||||
|
should_close, encoding, upgrade, chunked, self._url)
|
||||||
|
else:
|
||||||
|
msg = _new_response_message(
|
||||||
|
self.http_version(), self._cparser.status_code, self._reason,
|
||||||
|
headers, raw_headers, should_close, encoding,
|
||||||
|
upgrade, chunked)
|
||||||
|
|
||||||
|
if (self._cparser.content_length > 0 or chunked or
|
||||||
|
self._cparser.method == 5): # CONNECT: 5
|
||||||
|
payload = StreamReader(
|
||||||
|
self._protocol, timer=self._timer, loop=self._loop)
|
||||||
|
else:
|
||||||
|
payload = EMPTY_PAYLOAD
|
||||||
|
|
||||||
|
self._payload = payload
|
||||||
|
if encoding is not None and self._auto_decompress:
|
||||||
|
self._payload = DeflateBuffer(payload, encoding)
|
||||||
|
|
||||||
|
if not self._response_with_body:
|
||||||
|
payload = EMPTY_PAYLOAD
|
||||||
|
|
||||||
|
self._messages.append((msg, payload))
|
||||||
|
|
||||||
|
cdef _on_message_complete(self):
|
||||||
|
self._payload.feed_eof()
|
||||||
|
self._payload = None
|
||||||
|
|
||||||
|
cdef _on_chunk_header(self):
|
||||||
|
self._payload.begin_http_chunk_receiving()
|
||||||
|
|
||||||
|
cdef _on_chunk_complete(self):
|
||||||
|
self._payload.end_http_chunk_receiving()
|
||||||
|
|
||||||
|
cdef object _on_status_complete(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
cdef inline http_version(self):
|
||||||
|
cdef cparser.http_parser* parser = self._cparser
|
||||||
|
|
||||||
|
if parser.http_major == 1:
|
||||||
|
if parser.http_minor == 0:
|
||||||
|
return HttpVersion10
|
||||||
|
elif parser.http_minor == 1:
|
||||||
|
return HttpVersion11
|
||||||
|
|
||||||
|
return HttpVersion(parser.http_major, parser.http_minor)
|
||||||
|
|
||||||
|
### Public API ###
|
||||||
|
|
||||||
|
def feed_eof(self):
|
||||||
|
cdef bytes desc
|
||||||
|
|
||||||
|
if self._payload is not None:
|
||||||
|
if self._cparser.flags & cparser.F_CHUNKED:
|
||||||
|
raise TransferEncodingError(
|
||||||
|
"Not enough data for satisfy transfer length header.")
|
||||||
|
elif self._cparser.flags & cparser.F_CONTENTLENGTH:
|
||||||
|
raise ContentLengthError(
|
||||||
|
"Not enough data for satisfy content length header.")
|
||||||
|
elif self._cparser.http_errno != cparser.HPE_OK:
|
||||||
|
desc = cparser.http_errno_description(
|
||||||
|
<cparser.http_errno> self._cparser.http_errno)
|
||||||
|
raise PayloadEncodingError(desc.decode('latin-1'))
|
||||||
|
else:
|
||||||
|
self._payload.feed_eof()
|
||||||
|
elif self._started:
|
||||||
|
self._on_headers_complete()
|
||||||
|
if self._messages:
|
||||||
|
return self._messages[-1][0]
|
||||||
|
|
||||||
|
def feed_data(self, data):
|
||||||
|
cdef:
|
||||||
|
size_t data_len
|
||||||
|
size_t nb
|
||||||
|
|
||||||
|
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
|
||||||
|
data_len = <size_t>self.py_buf.len
|
||||||
|
|
||||||
|
nb = cparser.http_parser_execute(
|
||||||
|
self._cparser,
|
||||||
|
self._csettings,
|
||||||
|
<char*>self.py_buf.buf,
|
||||||
|
data_len)
|
||||||
|
|
||||||
|
PyBuffer_Release(&self.py_buf)
|
||||||
|
|
||||||
|
# i am not sure about cparser.HPE_INVALID_METHOD,
|
||||||
|
# seems get err for valid request
|
||||||
|
# test_client_functional.py::test_post_data_with_bytesio_file
|
||||||
|
if (self._cparser.http_errno != cparser.HPE_OK and
|
||||||
|
(self._cparser.http_errno != cparser.HPE_INVALID_METHOD or
|
||||||
|
self._cparser.method == 0)):
|
||||||
|
if self._payload_error == 0:
|
||||||
|
if self._last_error is not None:
|
||||||
|
ex = self._last_error
|
||||||
|
self._last_error = None
|
||||||
|
else:
|
||||||
|
ex = parser_error_from_errno(
|
||||||
|
<cparser.http_errno> self._cparser.http_errno)
|
||||||
|
self._payload = None
|
||||||
|
raise ex
|
||||||
|
|
||||||
|
if self._messages:
|
||||||
|
messages = self._messages
|
||||||
|
self._messages = []
|
||||||
|
else:
|
||||||
|
messages = ()
|
||||||
|
|
||||||
|
if self._upgraded:
|
||||||
|
return messages, True, data[nb:]
|
||||||
|
else:
|
||||||
|
return messages, False, b''
|
||||||
|
|
||||||
|
|
||||||
|
cdef class HttpRequestParser(HttpParser):
|
||||||
|
|
||||||
|
def __init__(self, protocol, loop, timer=None,
|
||||||
|
size_t max_line_size=8190, size_t max_headers=32768,
|
||||||
|
size_t max_field_size=8190, payload_exception=None,
|
||||||
|
bint response_with_body=True, bint read_until_eof=False):
|
||||||
|
self._init(cparser.HTTP_REQUEST, protocol, loop, timer,
|
||||||
|
max_line_size, max_headers, max_field_size,
|
||||||
|
payload_exception, response_with_body)
|
||||||
|
|
||||||
|
cdef object _on_status_complete(self):
|
||||||
|
cdef Py_buffer py_buf
|
||||||
|
if not self._buf:
|
||||||
|
return
|
||||||
|
self._path = self._buf.decode('utf-8', 'surrogateescape')
|
||||||
|
if self._cparser.method == 5: # CONNECT
|
||||||
|
self._url = URL(self._path)
|
||||||
|
else:
|
||||||
|
PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE)
|
||||||
|
try:
|
||||||
|
self._url = _parse_url(<char*>py_buf.buf,
|
||||||
|
py_buf.len)
|
||||||
|
finally:
|
||||||
|
PyBuffer_Release(&py_buf)
|
||||||
|
PyByteArray_Resize(self._buf, 0)
|
||||||
|
|
||||||
|
|
||||||
|
cdef class HttpResponseParser(HttpParser):
|
||||||
|
|
||||||
|
def __init__(self, protocol, loop, timer=None,
|
||||||
|
size_t max_line_size=8190, size_t max_headers=32768,
|
||||||
|
size_t max_field_size=8190, payload_exception=None,
|
||||||
|
bint response_with_body=True, bint read_until_eof=False,
|
||||||
|
bint auto_decompress=True):
|
||||||
|
self._init(cparser.HTTP_RESPONSE, protocol, loop, timer,
|
||||||
|
max_line_size, max_headers, max_field_size,
|
||||||
|
payload_exception, response_with_body, auto_decompress)
|
||||||
|
|
||||||
|
cdef object _on_status_complete(self):
|
||||||
|
if self._buf:
|
||||||
|
self._reason = self._buf.decode('utf-8', 'surrogateescape')
|
||||||
|
PyByteArray_Resize(self._buf, 0)
|
||||||
|
else:
|
||||||
|
self._reason = self._reason or ''
|
||||||
|
|
||||||
|
cdef int cb_on_message_begin(cparser.http_parser* parser) except -1:
|
||||||
|
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||||
|
|
||||||
|
pyparser._started = True
|
||||||
|
pyparser._headers = CIMultiDict()
|
||||||
|
pyparser._raw_headers = []
|
||||||
|
PyByteArray_Resize(pyparser._buf, 0)
|
||||||
|
pyparser._path = None
|
||||||
|
pyparser._reason = None
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
cdef int cb_on_url(cparser.http_parser* parser,
|
||||||
|
const char *at, size_t length) except -1:
|
||||||
|
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||||
|
try:
|
||||||
|
if length > pyparser._max_line_size:
|
||||||
|
raise LineTooLong(
|
||||||
|
'Status line is too long', pyparser._max_line_size, length)
|
||||||
|
extend(pyparser._buf, at, length)
|
||||||
|
except BaseException as ex:
|
||||||
|
pyparser._last_error = ex
|
||||||
|
return -1
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
cdef int cb_on_status(cparser.http_parser* parser,
|
||||||
|
const char *at, size_t length) except -1:
|
||||||
|
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||||
|
cdef str reason
|
||||||
|
try:
|
||||||
|
if length > pyparser._max_line_size:
|
||||||
|
raise LineTooLong(
|
||||||
|
'Status line is too long', pyparser._max_line_size, length)
|
||||||
|
extend(pyparser._buf, at, length)
|
||||||
|
except BaseException as ex:
|
||||||
|
pyparser._last_error = ex
|
||||||
|
return -1
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
cdef int cb_on_header_field(cparser.http_parser* parser,
|
||||||
|
const char *at, size_t length) except -1:
|
||||||
|
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||||
|
cdef Py_ssize_t size
|
||||||
|
try:
|
||||||
|
pyparser._on_status_complete()
|
||||||
|
size = len(pyparser._raw_name) + length
|
||||||
|
if size > pyparser._max_field_size:
|
||||||
|
raise LineTooLong(
|
||||||
|
'Header name is too long', pyparser._max_field_size, size)
|
||||||
|
pyparser._on_header_field(at, length)
|
||||||
|
except BaseException as ex:
|
||||||
|
pyparser._last_error = ex
|
||||||
|
return -1
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
cdef int cb_on_header_value(cparser.http_parser* parser,
|
||||||
|
const char *at, size_t length) except -1:
|
||||||
|
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||||
|
cdef Py_ssize_t size
|
||||||
|
try:
|
||||||
|
size = len(pyparser._raw_value) + length
|
||||||
|
if size > pyparser._max_field_size:
|
||||||
|
raise LineTooLong(
|
||||||
|
'Header value is too long', pyparser._max_field_size, size)
|
||||||
|
pyparser._on_header_value(at, length)
|
||||||
|
except BaseException as ex:
|
||||||
|
pyparser._last_error = ex
|
||||||
|
return -1
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1:
|
||||||
|
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||||
|
try:
|
||||||
|
pyparser._on_status_complete()
|
||||||
|
pyparser._on_headers_complete()
|
||||||
|
except BaseException as exc:
|
||||||
|
pyparser._last_error = exc
|
||||||
|
return -1
|
||||||
|
else:
|
||||||
|
if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT
|
||||||
|
return 2
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
cdef int cb_on_body(cparser.http_parser* parser,
|
||||||
|
const char *at, size_t length) except -1:
|
||||||
|
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||||
|
cdef bytes body = at[:length]
|
||||||
|
try:
|
||||||
|
pyparser._payload.feed_data(body, length)
|
||||||
|
except BaseException as exc:
|
||||||
|
if pyparser._payload_exception is not None:
|
||||||
|
pyparser._payload.set_exception(pyparser._payload_exception(str(exc)))
|
||||||
|
else:
|
||||||
|
pyparser._payload.set_exception(exc)
|
||||||
|
pyparser._payload_error = 1
|
||||||
|
return -1
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
cdef int cb_on_message_complete(cparser.http_parser* parser) except -1:
|
||||||
|
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||||
|
try:
|
||||||
|
pyparser._started = False
|
||||||
|
pyparser._on_message_complete()
|
||||||
|
except BaseException as exc:
|
||||||
|
pyparser._last_error = exc
|
||||||
|
return -1
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1:
|
||||||
|
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||||
|
try:
|
||||||
|
pyparser._on_chunk_header()
|
||||||
|
except BaseException as exc:
|
||||||
|
pyparser._last_error = exc
|
||||||
|
return -1
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1:
|
||||||
|
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||||
|
try:
|
||||||
|
pyparser._on_chunk_complete()
|
||||||
|
except BaseException as exc:
|
||||||
|
pyparser._last_error = exc
|
||||||
|
return -1
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
cdef parser_error_from_errno(cparser.http_errno errno):
|
||||||
|
cdef bytes desc = cparser.http_errno_description(errno)
|
||||||
|
|
||||||
|
if errno in (cparser.HPE_CB_message_begin,
|
||||||
|
cparser.HPE_CB_url,
|
||||||
|
cparser.HPE_CB_header_field,
|
||||||
|
cparser.HPE_CB_header_value,
|
||||||
|
cparser.HPE_CB_headers_complete,
|
||||||
|
cparser.HPE_CB_body,
|
||||||
|
cparser.HPE_CB_message_complete,
|
||||||
|
cparser.HPE_CB_status,
|
||||||
|
cparser.HPE_CB_chunk_header,
|
||||||
|
cparser.HPE_CB_chunk_complete):
|
||||||
|
cls = BadHttpMessage
|
||||||
|
|
||||||
|
elif errno == cparser.HPE_INVALID_STATUS:
|
||||||
|
cls = BadStatusLine
|
||||||
|
|
||||||
|
elif errno == cparser.HPE_INVALID_METHOD:
|
||||||
|
cls = BadStatusLine
|
||||||
|
|
||||||
|
elif errno == cparser.HPE_INVALID_URL:
|
||||||
|
cls = InvalidURLError
|
||||||
|
|
||||||
|
else:
|
||||||
|
cls = BadHttpMessage
|
||||||
|
|
||||||
|
return cls(desc.decode('latin-1'))
|
||||||
|
|
||||||
|
|
||||||
|
def parse_url(url):
|
||||||
|
cdef:
|
||||||
|
Py_buffer py_buf
|
||||||
|
char* buf_data
|
||||||
|
|
||||||
|
PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE)
|
||||||
|
try:
|
||||||
|
buf_data = <char*>py_buf.buf
|
||||||
|
return _parse_url(buf_data, py_buf.len)
|
||||||
|
finally:
|
||||||
|
PyBuffer_Release(&py_buf)
|
||||||
|
|
||||||
|
|
||||||
|
cdef _parse_url(char* buf_data, size_t length):
|
||||||
|
cdef:
|
||||||
|
cparser.http_parser_url* parsed
|
||||||
|
int res
|
||||||
|
str schema = None
|
||||||
|
str host = None
|
||||||
|
object port = None
|
||||||
|
str path = None
|
||||||
|
str query = None
|
||||||
|
str fragment = None
|
||||||
|
str user = None
|
||||||
|
str password = None
|
||||||
|
str userinfo = None
|
||||||
|
object result = None
|
||||||
|
int off
|
||||||
|
int ln
|
||||||
|
|
||||||
|
parsed = <cparser.http_parser_url*> \
|
||||||
|
PyMem_Malloc(sizeof(cparser.http_parser_url))
|
||||||
|
if parsed is NULL:
|
||||||
|
raise MemoryError()
|
||||||
|
cparser.http_parser_url_init(parsed)
|
||||||
|
try:
|
||||||
|
res = cparser.http_parser_parse_url(buf_data, length, 0, parsed)
|
||||||
|
|
||||||
|
if res == 0:
|
||||||
|
if parsed.field_set & (1 << cparser.UF_SCHEMA):
|
||||||
|
off = parsed.field_data[<int>cparser.UF_SCHEMA].off
|
||||||
|
ln = parsed.field_data[<int>cparser.UF_SCHEMA].len
|
||||||
|
schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||||
|
else:
|
||||||
|
schema = ''
|
||||||
|
|
||||||
|
if parsed.field_set & (1 << cparser.UF_HOST):
|
||||||
|
off = parsed.field_data[<int>cparser.UF_HOST].off
|
||||||
|
ln = parsed.field_data[<int>cparser.UF_HOST].len
|
||||||
|
host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||||
|
else:
|
||||||
|
host = ''
|
||||||
|
|
||||||
|
if parsed.field_set & (1 << cparser.UF_PORT):
|
||||||
|
port = parsed.port
|
||||||
|
|
||||||
|
if parsed.field_set & (1 << cparser.UF_PATH):
|
||||||
|
off = parsed.field_data[<int>cparser.UF_PATH].off
|
||||||
|
ln = parsed.field_data[<int>cparser.UF_PATH].len
|
||||||
|
path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||||
|
else:
|
||||||
|
path = ''
|
||||||
|
|
||||||
|
if parsed.field_set & (1 << cparser.UF_QUERY):
|
||||||
|
off = parsed.field_data[<int>cparser.UF_QUERY].off
|
||||||
|
ln = parsed.field_data[<int>cparser.UF_QUERY].len
|
||||||
|
query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||||
|
else:
|
||||||
|
query = ''
|
||||||
|
|
||||||
|
if parsed.field_set & (1 << cparser.UF_FRAGMENT):
|
||||||
|
off = parsed.field_data[<int>cparser.UF_FRAGMENT].off
|
||||||
|
ln = parsed.field_data[<int>cparser.UF_FRAGMENT].len
|
||||||
|
fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||||
|
else:
|
||||||
|
fragment = ''
|
||||||
|
|
||||||
|
if parsed.field_set & (1 << cparser.UF_USERINFO):
|
||||||
|
off = parsed.field_data[<int>cparser.UF_USERINFO].off
|
||||||
|
ln = parsed.field_data[<int>cparser.UF_USERINFO].len
|
||||||
|
userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||||
|
|
||||||
|
user, sep, password = userinfo.partition(':')
|
||||||
|
|
||||||
|
return URL_build(scheme=schema,
|
||||||
|
user=user, password=password, host=host, port=port,
|
||||||
|
path=path, query=query, fragment=fragment)
|
||||||
|
else:
|
||||||
|
raise InvalidURLError("invalid url {!r}".format(buf_data))
|
||||||
|
finally:
|
||||||
|
PyMem_Free(parsed)
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -0,0 +1,152 @@
|
|||||||
|
from libc.stdint cimport uint8_t, uint64_t
|
||||||
|
from libc.string cimport memcpy
|
||||||
|
from cpython.exc cimport PyErr_NoMemory
|
||||||
|
from cpython.mem cimport PyMem_Malloc, PyMem_Realloc, PyMem_Free
|
||||||
|
|
||||||
|
from cpython.bytes cimport PyBytes_FromStringAndSize
|
||||||
|
from cpython.object cimport PyObject_Str
|
||||||
|
|
||||||
|
from multidict import istr
|
||||||
|
|
||||||
|
DEF BUF_SIZE = 16 * 1024 # 16KiB
|
||||||
|
cdef char BUFFER[BUF_SIZE]
|
||||||
|
|
||||||
|
cdef object _istr = istr
|
||||||
|
|
||||||
|
|
||||||
|
# ----------------- writer ---------------------------
|
||||||
|
|
||||||
|
cdef struct Writer:
|
||||||
|
char *buf
|
||||||
|
Py_ssize_t size
|
||||||
|
Py_ssize_t pos
|
||||||
|
|
||||||
|
|
||||||
|
cdef inline void _init_writer(Writer* writer):
|
||||||
|
writer.buf = &BUFFER[0]
|
||||||
|
writer.size = BUF_SIZE
|
||||||
|
writer.pos = 0
|
||||||
|
|
||||||
|
|
||||||
|
cdef inline void _release_writer(Writer* writer):
|
||||||
|
if writer.buf != BUFFER:
|
||||||
|
PyMem_Free(writer.buf)
|
||||||
|
|
||||||
|
|
||||||
|
cdef inline int _write_byte(Writer* writer, uint8_t ch):
|
||||||
|
cdef char * buf
|
||||||
|
cdef Py_ssize_t size
|
||||||
|
|
||||||
|
if writer.pos == writer.size:
|
||||||
|
# reallocate
|
||||||
|
size = writer.size + BUF_SIZE
|
||||||
|
if writer.buf == BUFFER:
|
||||||
|
buf = <char*>PyMem_Malloc(size)
|
||||||
|
if buf == NULL:
|
||||||
|
PyErr_NoMemory()
|
||||||
|
return -1
|
||||||
|
memcpy(buf, writer.buf, writer.size)
|
||||||
|
else:
|
||||||
|
buf = <char*>PyMem_Realloc(writer.buf, size)
|
||||||
|
if buf == NULL:
|
||||||
|
PyErr_NoMemory()
|
||||||
|
return -1
|
||||||
|
writer.buf = buf
|
||||||
|
writer.size = size
|
||||||
|
writer.buf[writer.pos] = <char>ch
|
||||||
|
writer.pos += 1
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
|
||||||
|
cdef uint64_t utf = <uint64_t> symbol
|
||||||
|
|
||||||
|
if utf < 0x80:
|
||||||
|
return _write_byte(writer, <uint8_t>utf)
|
||||||
|
elif utf < 0x800:
|
||||||
|
if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
|
||||||
|
return -1
|
||||||
|
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
||||||
|
elif 0xD800 <= utf <= 0xDFFF:
|
||||||
|
# surogate pair, ignored
|
||||||
|
return 0
|
||||||
|
elif utf < 0x10000:
|
||||||
|
if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
|
||||||
|
return -1
|
||||||
|
if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
||||||
|
return -1
|
||||||
|
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
||||||
|
elif utf > 0x10FFFF:
|
||||||
|
# symbol is too large
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
|
||||||
|
return -1
|
||||||
|
if _write_byte(writer,
|
||||||
|
<uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
|
||||||
|
return -1
|
||||||
|
if _write_byte(writer,
|
||||||
|
<uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
||||||
|
return -1
|
||||||
|
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
||||||
|
|
||||||
|
|
||||||
|
cdef inline int _write_str(Writer* writer, str s):
|
||||||
|
cdef Py_UCS4 ch
|
||||||
|
for ch in s:
|
||||||
|
if _write_utf8(writer, ch) < 0:
|
||||||
|
return -1
|
||||||
|
|
||||||
|
|
||||||
|
# --------------- _serialize_headers ----------------------
|
||||||
|
|
||||||
|
cdef str to_str(object s):
|
||||||
|
typ = type(s)
|
||||||
|
if typ is str:
|
||||||
|
return <str>s
|
||||||
|
elif typ is _istr:
|
||||||
|
return PyObject_Str(s)
|
||||||
|
elif not isinstance(s, str):
|
||||||
|
raise TypeError("Cannot serialize non-str key {!r}".format(s))
|
||||||
|
else:
|
||||||
|
return str(s)
|
||||||
|
|
||||||
|
|
||||||
|
def _serialize_headers(str status_line, headers):
|
||||||
|
cdef Writer writer
|
||||||
|
cdef object key
|
||||||
|
cdef object val
|
||||||
|
cdef bytes ret
|
||||||
|
|
||||||
|
_init_writer(&writer)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if _write_str(&writer, status_line) < 0:
|
||||||
|
raise
|
||||||
|
if _write_byte(&writer, b'\r') < 0:
|
||||||
|
raise
|
||||||
|
if _write_byte(&writer, b'\n') < 0:
|
||||||
|
raise
|
||||||
|
|
||||||
|
for key, val in headers.items():
|
||||||
|
if _write_str(&writer, to_str(key)) < 0:
|
||||||
|
raise
|
||||||
|
if _write_byte(&writer, b':') < 0:
|
||||||
|
raise
|
||||||
|
if _write_byte(&writer, b' ') < 0:
|
||||||
|
raise
|
||||||
|
if _write_str(&writer, to_str(val)) < 0:
|
||||||
|
raise
|
||||||
|
if _write_byte(&writer, b'\r') < 0:
|
||||||
|
raise
|
||||||
|
if _write_byte(&writer, b'\n') < 0:
|
||||||
|
raise
|
||||||
|
|
||||||
|
if _write_byte(&writer, b'\r') < 0:
|
||||||
|
raise
|
||||||
|
if _write_byte(&writer, b'\n') < 0:
|
||||||
|
raise
|
||||||
|
|
||||||
|
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
|
||||||
|
finally:
|
||||||
|
_release_writer(&writer)
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -0,0 +1,54 @@
|
|||||||
|
from cpython cimport PyBytes_AsString
|
||||||
|
|
||||||
|
#from cpython cimport PyByteArray_AsString # cython still not exports that
|
||||||
|
cdef extern from "Python.h":
|
||||||
|
char* PyByteArray_AsString(bytearray ba) except NULL
|
||||||
|
|
||||||
|
from libc.stdint cimport uint32_t, uint64_t, uintmax_t
|
||||||
|
|
||||||
|
def _websocket_mask_cython(object mask, object data):
|
||||||
|
"""Note, this function mutates its `data` argument
|
||||||
|
"""
|
||||||
|
cdef:
|
||||||
|
Py_ssize_t data_len, i
|
||||||
|
# bit operations on signed integers are implementation-specific
|
||||||
|
unsigned char * in_buf
|
||||||
|
const unsigned char * mask_buf
|
||||||
|
uint32_t uint32_msk
|
||||||
|
uint64_t uint64_msk
|
||||||
|
|
||||||
|
assert len(mask) == 4
|
||||||
|
|
||||||
|
if not isinstance(mask, bytes):
|
||||||
|
mask = bytes(mask)
|
||||||
|
|
||||||
|
if isinstance(data, bytearray):
|
||||||
|
data = <bytearray>data
|
||||||
|
else:
|
||||||
|
data = bytearray(data)
|
||||||
|
|
||||||
|
data_len = len(data)
|
||||||
|
in_buf = <unsigned char*>PyByteArray_AsString(data)
|
||||||
|
mask_buf = <const unsigned char*>PyBytes_AsString(mask)
|
||||||
|
uint32_msk = (<uint32_t*>mask_buf)[0]
|
||||||
|
|
||||||
|
# TODO: align in_data ptr to achieve even faster speeds
|
||||||
|
# does it need in python ?! malloc() always aligns to sizeof(long) bytes
|
||||||
|
|
||||||
|
if sizeof(size_t) >= 8:
|
||||||
|
uint64_msk = uint32_msk
|
||||||
|
uint64_msk = (uint64_msk << 32) | uint32_msk
|
||||||
|
|
||||||
|
while data_len >= 8:
|
||||||
|
(<uint64_t*>in_buf)[0] ^= uint64_msk
|
||||||
|
in_buf += 8
|
||||||
|
data_len -= 8
|
||||||
|
|
||||||
|
|
||||||
|
while data_len >= 4:
|
||||||
|
(<uint32_t*>in_buf)[0] ^= uint32_msk
|
||||||
|
in_buf += 4
|
||||||
|
data_len -= 4
|
||||||
|
|
||||||
|
for i in range(0, data_len):
|
||||||
|
in_buf[i] ^= mask_buf[i]
|
@ -0,0 +1,208 @@
|
|||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from collections.abc import Sized
|
||||||
|
from http.cookies import BaseCookie, Morsel # noqa
|
||||||
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
Awaitable,
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
Generator,
|
||||||
|
Iterable,
|
||||||
|
List,
|
||||||
|
Optional,
|
||||||
|
Tuple,
|
||||||
|
)
|
||||||
|
|
||||||
|
from multidict import CIMultiDict # noqa
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
|
from .helpers import get_running_loop
|
||||||
|
from .typedefs import LooseCookies
|
||||||
|
|
||||||
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
|
from .web_request import BaseRequest, Request
|
||||||
|
from .web_response import StreamResponse
|
||||||
|
from .web_app import Application
|
||||||
|
from .web_exceptions import HTTPException
|
||||||
|
else:
|
||||||
|
BaseRequest = Request = Application = StreamResponse = None
|
||||||
|
HTTPException = None
|
||||||
|
|
||||||
|
|
||||||
|
class AbstractRouter(ABC):
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._frozen = False
|
||||||
|
|
||||||
|
def post_init(self, app: Application) -> None:
|
||||||
|
"""Post init stage.
|
||||||
|
|
||||||
|
Not an abstract method for sake of backward compatibility,
|
||||||
|
but if the router wants to be aware of the application
|
||||||
|
it can override this.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def frozen(self) -> bool:
|
||||||
|
return self._frozen
|
||||||
|
|
||||||
|
def freeze(self) -> None:
|
||||||
|
"""Freeze router."""
|
||||||
|
self._frozen = True
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def resolve(self, request: Request) -> 'AbstractMatchInfo':
|
||||||
|
"""Return MATCH_INFO for given request"""
|
||||||
|
|
||||||
|
|
||||||
|
class AbstractMatchInfo(ABC):
|
||||||
|
|
||||||
|
@property # pragma: no branch
|
||||||
|
@abstractmethod
|
||||||
|
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
|
||||||
|
"""Execute matched request handler"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def expect_handler(self) -> Callable[[Request], Awaitable[None]]:
|
||||||
|
"""Expect handler for 100-continue processing"""
|
||||||
|
|
||||||
|
@property # pragma: no branch
|
||||||
|
@abstractmethod
|
||||||
|
def http_exception(self) -> Optional[HTTPException]:
|
||||||
|
"""HTTPException instance raised on router's resolving, or None"""
|
||||||
|
|
||||||
|
@abstractmethod # pragma: no branch
|
||||||
|
def get_info(self) -> Dict[str, Any]:
|
||||||
|
"""Return a dict with additional info useful for introspection"""
|
||||||
|
|
||||||
|
@property # pragma: no branch
|
||||||
|
@abstractmethod
|
||||||
|
def apps(self) -> Tuple[Application, ...]:
|
||||||
|
"""Stack of nested applications.
|
||||||
|
|
||||||
|
Top level application is left-most element.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def add_app(self, app: Application) -> None:
|
||||||
|
"""Add application to the nested apps stack."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def freeze(self) -> None:
|
||||||
|
"""Freeze the match info.
|
||||||
|
|
||||||
|
The method is called after route resolution.
|
||||||
|
|
||||||
|
After the call .add_app() is forbidden.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class AbstractView(ABC):
|
||||||
|
"""Abstract class based view."""
|
||||||
|
|
||||||
|
def __init__(self, request: Request) -> None:
|
||||||
|
self._request = request
|
||||||
|
|
||||||
|
@property
|
||||||
|
def request(self) -> Request:
|
||||||
|
"""Request instance."""
|
||||||
|
return self._request
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def __await__(self) -> Generator[Any, None, StreamResponse]:
|
||||||
|
"""Execute the view handler."""
|
||||||
|
|
||||||
|
|
||||||
|
class AbstractResolver(ABC):
|
||||||
|
"""Abstract DNS resolver."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def resolve(self, host: str,
|
||||||
|
port: int, family: int) -> List[Dict[str, Any]]:
|
||||||
|
"""Return IP address for given hostname"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def close(self) -> None:
|
||||||
|
"""Release resolver"""
|
||||||
|
|
||||||
|
|
||||||
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
|
IterableBase = Iterable[Morsel[str]]
|
||||||
|
else:
|
||||||
|
IterableBase = Iterable
|
||||||
|
|
||||||
|
|
||||||
|
class AbstractCookieJar(Sized, IterableBase):
|
||||||
|
"""Abstract Cookie Jar."""
|
||||||
|
|
||||||
|
def __init__(self, *,
|
||||||
|
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
|
||||||
|
self._loop = get_running_loop(loop)
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def clear(self) -> None:
|
||||||
|
"""Clear all cookies."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def update_cookies(self,
|
||||||
|
cookies: LooseCookies,
|
||||||
|
response_url: URL=URL()) -> None:
|
||||||
|
"""Update cookies."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def filter_cookies(self, request_url: URL) -> 'BaseCookie[str]':
|
||||||
|
"""Return the jar's cookies filtered by their attributes."""
|
||||||
|
|
||||||
|
|
||||||
|
class AbstractStreamWriter(ABC):
|
||||||
|
"""Abstract stream writer."""
|
||||||
|
|
||||||
|
buffer_size = 0
|
||||||
|
output_size = 0
|
||||||
|
length = 0 # type: Optional[int]
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def write(self, chunk: bytes) -> None:
|
||||||
|
"""Write chunk into stream."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def write_eof(self, chunk: bytes=b'') -> None:
|
||||||
|
"""Write last chunk."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def drain(self) -> None:
|
||||||
|
"""Flush the write buffer."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def enable_compression(self, encoding: str='deflate') -> None:
|
||||||
|
"""Enable HTTP body compression"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def enable_chunking(self) -> None:
|
||||||
|
"""Enable HTTP chunked mode"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def write_headers(self, status_line: str,
|
||||||
|
headers: 'CIMultiDict[str]') -> None:
|
||||||
|
"""Write HTTP headers"""
|
||||||
|
|
||||||
|
|
||||||
|
class AbstractAccessLogger(ABC):
|
||||||
|
"""Abstract writer to access log."""
|
||||||
|
|
||||||
|
def __init__(self, logger: logging.Logger, log_format: str) -> None:
|
||||||
|
self.logger = logger
|
||||||
|
self.log_format = log_format
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def log(self,
|
||||||
|
request: BaseRequest,
|
||||||
|
response: StreamResponse,
|
||||||
|
time: float) -> None:
|
||||||
|
"""Emit log to logger."""
|
@ -0,0 +1,81 @@
|
|||||||
|
import asyncio
|
||||||
|
from typing import Optional, cast
|
||||||
|
|
||||||
|
from .tcp_helpers import tcp_nodelay
|
||||||
|
|
||||||
|
|
||||||
|
class BaseProtocol(asyncio.Protocol):
|
||||||
|
__slots__ = ('_loop', '_paused', '_drain_waiter',
|
||||||
|
'_connection_lost', '_reading_paused', 'transport')
|
||||||
|
|
||||||
|
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||||
|
self._loop = loop # type: asyncio.AbstractEventLoop
|
||||||
|
self._paused = False
|
||||||
|
self._drain_waiter = None # type: Optional[asyncio.Future[None]]
|
||||||
|
self._connection_lost = False
|
||||||
|
self._reading_paused = False
|
||||||
|
|
||||||
|
self.transport = None # type: Optional[asyncio.Transport]
|
||||||
|
|
||||||
|
def pause_writing(self) -> None:
|
||||||
|
assert not self._paused
|
||||||
|
self._paused = True
|
||||||
|
|
||||||
|
def resume_writing(self) -> None:
|
||||||
|
assert self._paused
|
||||||
|
self._paused = False
|
||||||
|
|
||||||
|
waiter = self._drain_waiter
|
||||||
|
if waiter is not None:
|
||||||
|
self._drain_waiter = None
|
||||||
|
if not waiter.done():
|
||||||
|
waiter.set_result(None)
|
||||||
|
|
||||||
|
def pause_reading(self) -> None:
|
||||||
|
if not self._reading_paused and self.transport is not None:
|
||||||
|
try:
|
||||||
|
self.transport.pause_reading()
|
||||||
|
except (AttributeError, NotImplementedError, RuntimeError):
|
||||||
|
pass
|
||||||
|
self._reading_paused = True
|
||||||
|
|
||||||
|
def resume_reading(self) -> None:
|
||||||
|
if self._reading_paused and self.transport is not None:
|
||||||
|
try:
|
||||||
|
self.transport.resume_reading()
|
||||||
|
except (AttributeError, NotImplementedError, RuntimeError):
|
||||||
|
pass
|
||||||
|
self._reading_paused = False
|
||||||
|
|
||||||
|
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
||||||
|
tr = cast(asyncio.Transport, transport)
|
||||||
|
tcp_nodelay(tr, True)
|
||||||
|
self.transport = tr
|
||||||
|
|
||||||
|
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
||||||
|
self._connection_lost = True
|
||||||
|
# Wake up the writer if currently paused.
|
||||||
|
self.transport = None
|
||||||
|
if not self._paused:
|
||||||
|
return
|
||||||
|
waiter = self._drain_waiter
|
||||||
|
if waiter is None:
|
||||||
|
return
|
||||||
|
self._drain_waiter = None
|
||||||
|
if waiter.done():
|
||||||
|
return
|
||||||
|
if exc is None:
|
||||||
|
waiter.set_result(None)
|
||||||
|
else:
|
||||||
|
waiter.set_exception(exc)
|
||||||
|
|
||||||
|
async def _drain_helper(self) -> None:
|
||||||
|
if self._connection_lost:
|
||||||
|
raise ConnectionResetError('Connection lost')
|
||||||
|
if not self._paused:
|
||||||
|
return
|
||||||
|
waiter = self._drain_waiter
|
||||||
|
assert waiter is None or waiter.cancelled()
|
||||||
|
waiter = self._loop.create_future()
|
||||||
|
self._drain_waiter = waiter
|
||||||
|
await waiter
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,292 @@
|
|||||||
|
"""HTTP related errors."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import warnings
|
||||||
|
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
|
||||||
|
|
||||||
|
from .typedefs import _CIMultiDict
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ssl
|
||||||
|
SSLContext = ssl.SSLContext
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
ssl = SSLContext = None # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
|
from .client_reqrep import (RequestInfo, ClientResponse, ConnectionKey, # noqa
|
||||||
|
Fingerprint)
|
||||||
|
else:
|
||||||
|
RequestInfo = ClientResponse = ConnectionKey = None
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
'ClientError',
|
||||||
|
|
||||||
|
'ClientConnectionError',
|
||||||
|
'ClientOSError', 'ClientConnectorError', 'ClientProxyConnectionError',
|
||||||
|
|
||||||
|
'ClientSSLError',
|
||||||
|
'ClientConnectorSSLError', 'ClientConnectorCertificateError',
|
||||||
|
|
||||||
|
'ServerConnectionError', 'ServerTimeoutError', 'ServerDisconnectedError',
|
||||||
|
'ServerFingerprintMismatch',
|
||||||
|
|
||||||
|
'ClientResponseError', 'ClientHttpProxyError',
|
||||||
|
'WSServerHandshakeError', 'ContentTypeError',
|
||||||
|
|
||||||
|
'ClientPayloadError', 'InvalidURL')
|
||||||
|
|
||||||
|
|
||||||
|
class ClientError(Exception):
|
||||||
|
"""Base class for client connection errors."""
|
||||||
|
|
||||||
|
|
||||||
|
class ClientResponseError(ClientError):
|
||||||
|
"""Connection error during reading response.
|
||||||
|
|
||||||
|
request_info: instance of RequestInfo
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, request_info: RequestInfo,
|
||||||
|
history: Tuple[ClientResponse, ...], *,
|
||||||
|
code: Optional[int]=None,
|
||||||
|
status: Optional[int]=None,
|
||||||
|
message: str='',
|
||||||
|
headers: Optional[_CIMultiDict]=None) -> None:
|
||||||
|
self.request_info = request_info
|
||||||
|
if code is not None:
|
||||||
|
if status is not None:
|
||||||
|
raise ValueError(
|
||||||
|
"Both code and status arguments are provided; "
|
||||||
|
"code is deprecated, use status instead")
|
||||||
|
warnings.warn("code argument is deprecated, use status instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2)
|
||||||
|
if status is not None:
|
||||||
|
self.status = status
|
||||||
|
elif code is not None:
|
||||||
|
self.status = code
|
||||||
|
else:
|
||||||
|
self.status = 0
|
||||||
|
self.message = message
|
||||||
|
self.headers = headers
|
||||||
|
self.history = history
|
||||||
|
self.args = (request_info, history)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return ("%s, message=%r, url=%r" %
|
||||||
|
(self.status, self.message, self.request_info.real_url))
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
args = "%r, %r" % (self.request_info, self.history)
|
||||||
|
if self.status != 0:
|
||||||
|
args += ", status=%r" % (self.status,)
|
||||||
|
if self.message != '':
|
||||||
|
args += ", message=%r" % (self.message,)
|
||||||
|
if self.headers is not None:
|
||||||
|
args += ", headers=%r" % (self.headers,)
|
||||||
|
return "%s(%s)" % (type(self).__name__, args)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def code(self) -> int:
|
||||||
|
warnings.warn("code property is deprecated, use status instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2)
|
||||||
|
return self.status
|
||||||
|
|
||||||
|
@code.setter
|
||||||
|
def code(self, value: int) -> None:
|
||||||
|
warnings.warn("code property is deprecated, use status instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2)
|
||||||
|
self.status = value
|
||||||
|
|
||||||
|
|
||||||
|
class ContentTypeError(ClientResponseError):
|
||||||
|
"""ContentType found is not valid."""
|
||||||
|
|
||||||
|
|
||||||
|
class WSServerHandshakeError(ClientResponseError):
|
||||||
|
"""websocket server handshake error."""
|
||||||
|
|
||||||
|
|
||||||
|
class ClientHttpProxyError(ClientResponseError):
|
||||||
|
"""HTTP proxy error.
|
||||||
|
|
||||||
|
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||||
|
proxy responds with status other than ``200 OK``
|
||||||
|
on ``CONNECT`` request.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class TooManyRedirects(ClientResponseError):
|
||||||
|
"""Client was redirected too many times."""
|
||||||
|
|
||||||
|
|
||||||
|
class ClientConnectionError(ClientError):
|
||||||
|
"""Base class for client socket errors."""
|
||||||
|
|
||||||
|
|
||||||
|
class ClientOSError(ClientConnectionError, OSError):
|
||||||
|
"""OSError error."""
|
||||||
|
|
||||||
|
|
||||||
|
class ClientConnectorError(ClientOSError):
|
||||||
|
"""Client connector error.
|
||||||
|
|
||||||
|
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||||
|
connection to proxy can not be established.
|
||||||
|
"""
|
||||||
|
def __init__(self, connection_key: ConnectionKey,
|
||||||
|
os_error: OSError) -> None:
|
||||||
|
self._conn_key = connection_key
|
||||||
|
self._os_error = os_error
|
||||||
|
super().__init__(os_error.errno, os_error.strerror)
|
||||||
|
self.args = (connection_key, os_error)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def os_error(self) -> OSError:
|
||||||
|
return self._os_error
|
||||||
|
|
||||||
|
@property
|
||||||
|
def host(self) -> str:
|
||||||
|
return self._conn_key.host
|
||||||
|
|
||||||
|
@property
|
||||||
|
def port(self) -> Optional[int]:
|
||||||
|
return self._conn_key.port
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ssl(self) -> Union[SSLContext, None, bool, 'Fingerprint']:
|
||||||
|
return self._conn_key.ssl
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return ('Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]'
|
||||||
|
.format(self, self.ssl if self.ssl is not None else 'default',
|
||||||
|
self.strerror))
|
||||||
|
|
||||||
|
# OSError.__reduce__ does too much black magick
|
||||||
|
__reduce__ = BaseException.__reduce__
|
||||||
|
|
||||||
|
|
||||||
|
class ClientProxyConnectionError(ClientConnectorError):
|
||||||
|
"""Proxy connection error.
|
||||||
|
|
||||||
|
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||||
|
connection to proxy can not be established.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ServerConnectionError(ClientConnectionError):
|
||||||
|
"""Server connection errors."""
|
||||||
|
|
||||||
|
|
||||||
|
class ServerDisconnectedError(ServerConnectionError):
|
||||||
|
"""Server disconnected."""
|
||||||
|
|
||||||
|
def __init__(self, message: Optional[str]=None) -> None:
|
||||||
|
self.message = message
|
||||||
|
if message is None:
|
||||||
|
self.args = ()
|
||||||
|
else:
|
||||||
|
self.args = (message,)
|
||||||
|
|
||||||
|
|
||||||
|
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
|
||||||
|
"""Server timeout error."""
|
||||||
|
|
||||||
|
|
||||||
|
class ServerFingerprintMismatch(ServerConnectionError):
|
||||||
|
"""SSL certificate does not match expected fingerprint."""
|
||||||
|
|
||||||
|
def __init__(self, expected: bytes, got: bytes,
|
||||||
|
host: str, port: int) -> None:
|
||||||
|
self.expected = expected
|
||||||
|
self.got = got
|
||||||
|
self.host = host
|
||||||
|
self.port = port
|
||||||
|
self.args = (expected, got, host, port)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return '<{} expected={!r} got={!r} host={!r} port={!r}>'.format(
|
||||||
|
self.__class__.__name__, self.expected, self.got,
|
||||||
|
self.host, self.port)
|
||||||
|
|
||||||
|
|
||||||
|
class ClientPayloadError(ClientError):
|
||||||
|
"""Response payload error."""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidURL(ClientError, ValueError):
|
||||||
|
"""Invalid URL.
|
||||||
|
|
||||||
|
URL used for fetching is malformed, e.g. it doesn't contains host
|
||||||
|
part."""
|
||||||
|
|
||||||
|
# Derive from ValueError for backward compatibility
|
||||||
|
|
||||||
|
def __init__(self, url: Any) -> None:
|
||||||
|
# The type of url is not yarl.URL because the exception can be raised
|
||||||
|
# on URL(url) call
|
||||||
|
super().__init__(url)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def url(self) -> Any:
|
||||||
|
return self.args[0]
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return '<{} {}>'.format(self.__class__.__name__, self.url)
|
||||||
|
|
||||||
|
|
||||||
|
class ClientSSLError(ClientConnectorError):
|
||||||
|
"""Base error for ssl.*Errors."""
|
||||||
|
|
||||||
|
|
||||||
|
if ssl is not None:
|
||||||
|
cert_errors = (ssl.CertificateError,)
|
||||||
|
cert_errors_bases = (ClientSSLError, ssl.CertificateError,)
|
||||||
|
|
||||||
|
ssl_errors = (ssl.SSLError,)
|
||||||
|
ssl_error_bases = (ClientSSLError, ssl.SSLError)
|
||||||
|
else: # pragma: no cover
|
||||||
|
cert_errors = tuple()
|
||||||
|
cert_errors_bases = (ClientSSLError, ValueError,)
|
||||||
|
|
||||||
|
ssl_errors = tuple()
|
||||||
|
ssl_error_bases = (ClientSSLError,)
|
||||||
|
|
||||||
|
|
||||||
|
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore
|
||||||
|
"""Response ssl error."""
|
||||||
|
|
||||||
|
|
||||||
|
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore
|
||||||
|
"""Response certificate error."""
|
||||||
|
|
||||||
|
def __init__(self, connection_key:
|
||||||
|
ConnectionKey, certificate_error: Exception) -> None:
|
||||||
|
self._conn_key = connection_key
|
||||||
|
self._certificate_error = certificate_error
|
||||||
|
self.args = (connection_key, certificate_error)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def certificate_error(self) -> Exception:
|
||||||
|
return self._certificate_error
|
||||||
|
|
||||||
|
@property
|
||||||
|
def host(self) -> str:
|
||||||
|
return self._conn_key.host
|
||||||
|
|
||||||
|
@property
|
||||||
|
def port(self) -> Optional[int]:
|
||||||
|
return self._conn_key.port
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ssl(self) -> bool:
|
||||||
|
return self._conn_key.is_ssl
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} '
|
||||||
|
'[{0.certificate_error.__class__.__name__}: '
|
||||||
|
'{0.certificate_error.args}]'.format(self))
|
@ -0,0 +1,239 @@
|
|||||||
|
import asyncio
|
||||||
|
from contextlib import suppress
|
||||||
|
from typing import Any, Optional, Tuple
|
||||||
|
|
||||||
|
from .base_protocol import BaseProtocol
|
||||||
|
from .client_exceptions import (
|
||||||
|
ClientOSError,
|
||||||
|
ClientPayloadError,
|
||||||
|
ServerDisconnectedError,
|
||||||
|
ServerTimeoutError,
|
||||||
|
)
|
||||||
|
from .helpers import BaseTimerContext
|
||||||
|
from .http import HttpResponseParser, RawResponseMessage
|
||||||
|
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
|
||||||
|
|
||||||
|
|
||||||
|
class ResponseHandler(BaseProtocol,
|
||||||
|
DataQueue[Tuple[RawResponseMessage, StreamReader]]):
|
||||||
|
"""Helper class to adapt between Protocol and StreamReader."""
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
loop: asyncio.AbstractEventLoop) -> None:
|
||||||
|
BaseProtocol.__init__(self, loop=loop)
|
||||||
|
DataQueue.__init__(self, loop)
|
||||||
|
|
||||||
|
self._should_close = False
|
||||||
|
|
||||||
|
self._payload = None
|
||||||
|
self._skip_payload = False
|
||||||
|
self._payload_parser = None
|
||||||
|
|
||||||
|
self._timer = None
|
||||||
|
|
||||||
|
self._tail = b''
|
||||||
|
self._upgraded = False
|
||||||
|
self._parser = None # type: Optional[HttpResponseParser]
|
||||||
|
|
||||||
|
self._read_timeout = None # type: Optional[float]
|
||||||
|
self._read_timeout_handle = None # type: Optional[asyncio.TimerHandle]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def upgraded(self) -> bool:
|
||||||
|
return self._upgraded
|
||||||
|
|
||||||
|
@property
|
||||||
|
def should_close(self) -> bool:
|
||||||
|
if (self._payload is not None and
|
||||||
|
not self._payload.is_eof() or self._upgraded):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return (self._should_close or self._upgraded or
|
||||||
|
self.exception() is not None or
|
||||||
|
self._payload_parser is not None or
|
||||||
|
len(self) > 0 or bool(self._tail))
|
||||||
|
|
||||||
|
def force_close(self) -> None:
|
||||||
|
self._should_close = True
|
||||||
|
|
||||||
|
def close(self) -> None:
|
||||||
|
transport = self.transport
|
||||||
|
if transport is not None:
|
||||||
|
transport.close()
|
||||||
|
self.transport = None
|
||||||
|
self._payload = None
|
||||||
|
self._drop_timeout()
|
||||||
|
|
||||||
|
def is_connected(self) -> bool:
|
||||||
|
return self.transport is not None
|
||||||
|
|
||||||
|
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
||||||
|
self._drop_timeout()
|
||||||
|
|
||||||
|
if self._payload_parser is not None:
|
||||||
|
with suppress(Exception):
|
||||||
|
self._payload_parser.feed_eof()
|
||||||
|
|
||||||
|
uncompleted = None
|
||||||
|
if self._parser is not None:
|
||||||
|
try:
|
||||||
|
uncompleted = self._parser.feed_eof()
|
||||||
|
except Exception:
|
||||||
|
if self._payload is not None:
|
||||||
|
self._payload.set_exception(
|
||||||
|
ClientPayloadError(
|
||||||
|
'Response payload is not completed'))
|
||||||
|
|
||||||
|
if not self.is_eof():
|
||||||
|
if isinstance(exc, OSError):
|
||||||
|
exc = ClientOSError(*exc.args)
|
||||||
|
if exc is None:
|
||||||
|
exc = ServerDisconnectedError(uncompleted)
|
||||||
|
# assigns self._should_close to True as side effect,
|
||||||
|
# we do it anyway below
|
||||||
|
self.set_exception(exc)
|
||||||
|
|
||||||
|
self._should_close = True
|
||||||
|
self._parser = None
|
||||||
|
self._payload = None
|
||||||
|
self._payload_parser = None
|
||||||
|
self._reading_paused = False
|
||||||
|
|
||||||
|
super().connection_lost(exc)
|
||||||
|
|
||||||
|
def eof_received(self) -> None:
|
||||||
|
# should call parser.feed_eof() most likely
|
||||||
|
self._drop_timeout()
|
||||||
|
|
||||||
|
def pause_reading(self) -> None:
|
||||||
|
super().pause_reading()
|
||||||
|
self._drop_timeout()
|
||||||
|
|
||||||
|
def resume_reading(self) -> None:
|
||||||
|
super().resume_reading()
|
||||||
|
self._reschedule_timeout()
|
||||||
|
|
||||||
|
def set_exception(self, exc: BaseException) -> None:
|
||||||
|
self._should_close = True
|
||||||
|
self._drop_timeout()
|
||||||
|
super().set_exception(exc)
|
||||||
|
|
||||||
|
def set_parser(self, parser: Any, payload: Any) -> None:
|
||||||
|
# TODO: actual types are:
|
||||||
|
# parser: WebSocketReader
|
||||||
|
# payload: FlowControlDataQueue
|
||||||
|
# but they are not generi enough
|
||||||
|
# Need an ABC for both types
|
||||||
|
self._payload = payload
|
||||||
|
self._payload_parser = parser
|
||||||
|
|
||||||
|
self._drop_timeout()
|
||||||
|
|
||||||
|
if self._tail:
|
||||||
|
data, self._tail = self._tail, b''
|
||||||
|
self.data_received(data)
|
||||||
|
|
||||||
|
def set_response_params(self, *, timer: BaseTimerContext=None,
|
||||||
|
skip_payload: bool=False,
|
||||||
|
read_until_eof: bool=False,
|
||||||
|
auto_decompress: bool=True,
|
||||||
|
read_timeout: Optional[float]=None) -> None:
|
||||||
|
self._skip_payload = skip_payload
|
||||||
|
|
||||||
|
self._read_timeout = read_timeout
|
||||||
|
self._reschedule_timeout()
|
||||||
|
|
||||||
|
self._parser = HttpResponseParser(
|
||||||
|
self, self._loop, timer=timer,
|
||||||
|
payload_exception=ClientPayloadError,
|
||||||
|
read_until_eof=read_until_eof,
|
||||||
|
auto_decompress=auto_decompress)
|
||||||
|
|
||||||
|
if self._tail:
|
||||||
|
data, self._tail = self._tail, b''
|
||||||
|
self.data_received(data)
|
||||||
|
|
||||||
|
def _drop_timeout(self) -> None:
|
||||||
|
if self._read_timeout_handle is not None:
|
||||||
|
self._read_timeout_handle.cancel()
|
||||||
|
self._read_timeout_handle = None
|
||||||
|
|
||||||
|
def _reschedule_timeout(self) -> None:
|
||||||
|
timeout = self._read_timeout
|
||||||
|
if self._read_timeout_handle is not None:
|
||||||
|
self._read_timeout_handle.cancel()
|
||||||
|
|
||||||
|
if timeout:
|
||||||
|
self._read_timeout_handle = self._loop.call_later(
|
||||||
|
timeout, self._on_read_timeout)
|
||||||
|
else:
|
||||||
|
self._read_timeout_handle = None
|
||||||
|
|
||||||
|
def _on_read_timeout(self) -> None:
|
||||||
|
exc = ServerTimeoutError("Timeout on reading data from socket")
|
||||||
|
self.set_exception(exc)
|
||||||
|
if self._payload is not None:
|
||||||
|
self._payload.set_exception(exc)
|
||||||
|
|
||||||
|
def data_received(self, data: bytes) -> None:
|
||||||
|
self._reschedule_timeout()
|
||||||
|
|
||||||
|
if not data:
|
||||||
|
return
|
||||||
|
|
||||||
|
# custom payload parser
|
||||||
|
if self._payload_parser is not None:
|
||||||
|
eof, tail = self._payload_parser.feed_data(data)
|
||||||
|
if eof:
|
||||||
|
self._payload = None
|
||||||
|
self._payload_parser = None
|
||||||
|
|
||||||
|
if tail:
|
||||||
|
self.data_received(tail)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
if self._upgraded or self._parser is None:
|
||||||
|
# i.e. websocket connection, websocket parser is not set yet
|
||||||
|
self._tail += data
|
||||||
|
else:
|
||||||
|
# parse http messages
|
||||||
|
try:
|
||||||
|
messages, upgraded, tail = self._parser.feed_data(data)
|
||||||
|
except BaseException as exc:
|
||||||
|
if self.transport is not None:
|
||||||
|
# connection.release() could be called BEFORE
|
||||||
|
# data_received(), the transport is already
|
||||||
|
# closed in this case
|
||||||
|
self.transport.close()
|
||||||
|
# should_close is True after the call
|
||||||
|
self.set_exception(exc)
|
||||||
|
return
|
||||||
|
|
||||||
|
self._upgraded = upgraded
|
||||||
|
|
||||||
|
payload = None
|
||||||
|
for message, payload in messages:
|
||||||
|
if message.should_close:
|
||||||
|
self._should_close = True
|
||||||
|
|
||||||
|
self._payload = payload
|
||||||
|
|
||||||
|
if self._skip_payload or message.code in (204, 304):
|
||||||
|
self.feed_data((message, EMPTY_PAYLOAD), 0) # type: ignore # noqa
|
||||||
|
else:
|
||||||
|
self.feed_data((message, payload), 0)
|
||||||
|
if payload is not None:
|
||||||
|
# new message(s) was processed
|
||||||
|
# register timeout handler unsubscribing
|
||||||
|
# either on end-of-stream or immediately for
|
||||||
|
# EMPTY_PAYLOAD
|
||||||
|
if payload is not EMPTY_PAYLOAD:
|
||||||
|
payload.on_eof(self._drop_timeout)
|
||||||
|
else:
|
||||||
|
self._drop_timeout()
|
||||||
|
|
||||||
|
if tail:
|
||||||
|
if upgraded:
|
||||||
|
self.data_received(tail)
|
||||||
|
else:
|
||||||
|
self._tail = tail
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue