diff --git a/.gitignore b/.gitignore index 8eef5b1..28a7297 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,7 @@ __pycache__/ *.pyc .env -config.py +configs/* .DS_Store private.py diff --git a/blacklist.json b/blacklist.json deleted file mode 100644 index 918516c..0000000 --- a/blacklist.json +++ /dev/null @@ -1,3 +0,0 @@ -[ - 589949234308972556 -] \ No newline at end of file diff --git a/bot.py b/bot.py index 74992cb..0c6d01c 100755 --- a/bot.py +++ b/bot.py @@ -2,13 +2,13 @@ import datetime import logging import sys from collections import deque, Counter +from typing import List import aiohttp import discord import git from discord.ext import commands -import config from cogs.utils.config import Config from cogs.utils.lang import Texts from cogs.utils.version import Version @@ -20,27 +20,32 @@ build = git.Repo(search_parent_directories=True).head.object.hexsha log = logging.getLogger(__name__) -l_extensions = ( +l_extensions: List[str] = [ 'cogs.admin', 'cogs.basics', 'cogs.utility', 'cogs.logs', - 'cogs.poll', + # 'cogs.poll', 'jishaku', -) +] async def _prefix_callable(bot, message: discord.message) -> list: extras = ['.'] if message.guild is not None: - extras = bot.prefixes.get(str(message.guild.id), []) + extras = [] + if str(message.guild.id) in bot.prefixes: + extras.extend( + bot.prefixes.get(str(message.guild.id), "prefixes") + .split('-sep-') + ) return commands.when_mentioned_or(*extras)(bot, message) class TuxBot(commands.AutoShardedBot): - def __init__(self, unload: list, database): + def __init__(self): super().__init__(command_prefix=_prefix_callable, pm_help=None, help_command=None, description=description, help_attrs=dict(hidden=True), @@ -52,28 +57,30 @@ class TuxBot(commands.AutoShardedBot): self.command_stats = Counter() self.uptime: datetime = datetime.datetime.utcnow() - self.config = config - self.database = database self._prev_events = deque(maxlen=10) self.session = aiohttp.ClientSession(loop=self.loop) - self.prefixes = Config('prefixes.json') - self.blacklist = Config('blacklist.json') + self.config = Config('./configs/config.cfg') + self.prefixes = Config('./configs/prefixes.cfg') + self.blacklist = Config('./configs/blacklist.cfg') - self.version = Version(10, 0, 0, pre_release='a21', build=build) + self.version = Version(10, 1, 0, pre_release='a0', build=build) for extension in l_extensions: - if extension not in unload: - try: - self.load_extension(extension) - except Exception as e: - print(Texts().get("Failed to load extension : ") - + extension, file=sys.stderr) - log.error(Texts().get("Failed to load extension : ") - + extension, exc_info=e) + try: + print(Texts().get("Extension loaded successfully : ") + + extension) + log.info(Texts().get("Extension loaded successfully : ") + + extension) + self.load_extension(extension) + except Exception as e: + print(Texts().get("Failed to load extension : ") + + extension, file=sys.stderr) + log.error(Texts().get("Failed to load extension : ") + + extension, exc_info=e) async def is_owner(self, user: discord.User) -> bool: - return user.id in config.authorized_id + return str(user.id) in self.config.get("permissions", "owners").split(',') async def on_socket_response(self, msg): self._prev_events.append(msg) @@ -116,8 +123,12 @@ class TuxBot(commands.AutoShardedBot): print(self.version) presence: dict = dict(status=discord.Status.dnd) - if self.config.activity is not None: - presence.update(activity=discord.Game(name=self.config.activity)) + if self.config.get("bot", "Activity", fallback=None) is not None: + presence.update( + activity=discord.Game( + name=self.config.get("bot", "Activity") + ) + ) await self.change_presence(**presence) @@ -127,10 +138,10 @@ class TuxBot(commands.AutoShardedBot): @property def logs_webhook(self) -> discord.Webhook: - logs_webhook = self.config.logs_webhook + logs_webhook = self.config["webhook"] webhook = discord.Webhook.partial( - id=logs_webhook.get('id'), - token=logs_webhook.get('token'), + id=logs_webhook.get('ID'), + token=logs_webhook.get('Url'), adapter=discord.AsyncWebhookAdapter( self.session ) @@ -140,6 +151,19 @@ class TuxBot(commands.AutoShardedBot): async def close(self): await super().close() + await self.session.close() def run(self): - super().run(config.token, reconnect=True) + super().run(self.config.get("bot", "Token"), reconnect=True) + + +if __name__ == "__main__": + log = logging.getLogger() + + print(Texts().get('Starting...')) + + bot = TuxBot() + try: + bot.run() + except KeyboardInterrupt: + bot.close() diff --git a/cogs/logs.py b/cogs/logs.py index 07b48dd..4706b00 100644 --- a/cogs/logs.py +++ b/cogs/logs.py @@ -153,15 +153,29 @@ class Logs(commands.Cog): await self.webhook.send(embed=e) @commands.Cog.listener() - async def on_guild_join(self, guild): + async def on_guild_join(self, guild: discord.guild): e = discord.Embed(colour=0x53dda4, title='New Guild') # green colour await self.send_guild_stats(e, guild) @commands.Cog.listener() - async def on_guild_remove(self, guild): + async def on_guild_remove(self, guild: discord.guild): e = discord.Embed(colour=0xdd5f53, title='Left Guild') # red colour await self.send_guild_stats(e, guild) + @commands.Cog.listener() + async def on_message(self, message: discord.message): + if message.guild is None: + e = discord.Embed(colour=0x0a97f5, title='New DM') # blue colour + e.set_author( + name=message.author, + icon_url=message.author.avatar_url_as(format='png') + ) + e.description = message.content + if len(message.attachments) > 0: + e.set_image(url=message.attachments[0].url) + e.set_footer(text=f"User ID: {message.author.id}") + await self.webhook.send(embed=e) + @commands.Cog.listener() async def on_command_error(self, ctx, error): await self.register_command(ctx) @@ -246,12 +260,14 @@ class Logs(commands.Cog): minutes = delta.total_seconds() / 60 total = sum(self.bot.socket_stats.values()) cpm = total / minutes - await ctx.send(f'{total} socket events observed ({cpm:.2f}/minute):\n{self.bot.socket_stats}') + await ctx.send( + f'{total} socket events observed ({cpm:.2f}/minute):\n{self.bot.socket_stats}') @commands.command(name='uptime') async def _uptime(self, ctx): """Tells you how long the bot has been up for.""" - uptime = humanize.naturaltime(datetime.datetime.utcnow() - self.bot.uptime) + uptime = humanize.naturaltime( + datetime.datetime.utcnow() - self.bot.uptime) await ctx.send(f'Uptime: **{uptime}**') diff --git a/cogs/poll.py b/cogs/poll.py index bb53330..0dbe43e 100644 --- a/cogs/poll.py +++ b/cogs/poll.py @@ -22,12 +22,9 @@ class Polls(commands.Cog): .query(Poll) \ .filter(Poll.message_id == pld.message_id) - print("-------------------------25---------------------------") - if poll.count() != 0: - print("-------------------------27---------------------------") + if poll.count() > 0: poll = poll.one() emotes = utils_emotes.get(poll.available_choices) - if pld.emoji.name in emotes: return poll @@ -52,6 +49,8 @@ class Polls(commands.Cog): pass choice = utils_emotes.get_index(pld.emoji.name) + print(choice) + response = self.bot.database.session.query(Poll) \ .filter( Responses.poll_id == poll.id, @@ -59,18 +58,20 @@ class Polls(commands.Cog): Responses.choice == choice ) - print(pld.user_id, poll.id, choice) - if response.count() != 0: + print("--pre delete--") response = response.one() self.bot.database.session.delete(response) + print("--post delete--") else: + print("--pre add--") response = Responses( user=pld.user_id, poll_id=poll.id, choice=choice ) self.bot.database.session.add(response) + print("--post add--") self.bot.database.session.commit() """---------------------------------------------------------------------""" diff --git a/cogs/utils/config.py b/cogs/utils/config.py index 91fc65e..832d24a 100644 --- a/cogs/utils/config.py +++ b/cogs/utils/config.py @@ -1,4 +1,4 @@ -import json +import configparser class Config: @@ -7,11 +7,8 @@ class Config: def __init__(self, name): self.name = name - try: - with open(self.name, 'r') as f: - self._db = json.load(f) - except FileNotFoundError: - self._db = {} + self._db: configparser.ConfigParser = configparser.ConfigParser() + self._db.read(self.name) def __contains__(self, item): return item in self._db @@ -19,9 +16,8 @@ class Config: def __getitem__(self, item): return self._db[str(item)] - def get(self, key, *args): - """Retrieves a config entry.""" - return self._db.get(str(key), *args) + def all(self) -> list: + return self._db.sections() - def all(self) -> dict: - return self._db + def get(self, *args, **kwargs) -> str: + return self._db.get(*args, **kwargs) diff --git a/cogs/utils/database.py b/cogs/utils/database.py index bb13ff4..9a7e7a3 100644 --- a/cogs/utils/database.py +++ b/cogs/utils/database.py @@ -1,10 +1,16 @@ +from .config import Config + from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker, session class Database: - def __init__(self, config): - self.engine = create_engine(config.postgresql, echo=True) + def __init__(self, config: Config): + conf_postgresql = config["postgresql"] + postgresql = 'postgresql://{}:{}@{}/{}'.format( + conf_postgresql.get("Username"), conf_postgresql.get("Password"), + conf_postgresql.get("Host"), conf_postgresql.get("DBName")) + self.engine = create_engine(postgresql, echo=False) Session = sessionmaker() Session.configure(bind=self.engine) diff --git a/cogs/utils/lang.py b/cogs/utils/lang.py index a357e85..6b9993a 100644 --- a/cogs/utils/lang.py +++ b/cogs/utils/lang.py @@ -1,5 +1,5 @@ import gettext -import config +from .config import Config from cogs.utils.database import Database from .models.lang import Lang @@ -22,7 +22,7 @@ class Texts: @staticmethod def get_locale(ctx): - database = Database(config) + database = Database(Config("./configs/config.cfg")) if ctx is not None: current = database.session\ diff --git a/cogs/utils/models/__init__.py b/cogs/utils/models/__init__.py index 9138e7e..4b2dcdc 100644 --- a/cogs/utils/models/__init__.py +++ b/cogs/utils/models/__init__.py @@ -1,3 +1,6 @@ +from sqlalchemy.ext.declarative import declarative_base +Base = declarative_base() + from .lang import Lang from .warn import Warn -from .poll import Poll, Responses +# from .poll import Poll, Responses diff --git a/cogs/utils/models/lang.py b/cogs/utils/models/lang.py index 52aa037..2480160 100644 --- a/cogs/utils/models/lang.py +++ b/cogs/utils/models/lang.py @@ -1,6 +1,5 @@ -from sqlalchemy.ext.declarative import declarative_base +from . import Base from sqlalchemy import Column, String -Base = declarative_base() class Lang(Base): diff --git a/cogs/utils/models/warn.py b/cogs/utils/models/warn.py index 171a378..5891791 100644 --- a/cogs/utils/models/warn.py +++ b/cogs/utils/models/warn.py @@ -1,10 +1,8 @@ import datetime -from sqlalchemy.ext.declarative import declarative_base +from . import Base from sqlalchemy import Column, Integer, String, BIGINT, TIMESTAMP -Base = declarative_base() - class Warn(Base): __tablename__ = 'warns' diff --git a/configs/blacklist.cfg b/configs/blacklist.cfg new file mode 100644 index 0000000..e69de29 diff --git a/configs/prefixes.cfg b/configs/prefixes.cfg new file mode 100644 index 0000000..0d6fd82 --- /dev/null +++ b/configs/prefixes.cfg @@ -0,0 +1,2 @@ +[280805240977227776] +prefixes = b. diff --git a/extras/first_run/__init__.py b/extras/first_run/__init__.py deleted file mode 100644 index e90bbc3..0000000 --- a/extras/first_run/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from .initializer import Config - -setup = Config() - -setup.install() - -setup.ask() -setup.save() - -setup.clean() \ No newline at end of file diff --git a/extras/first_run/initializer.py b/extras/first_run/initializer.py deleted file mode 100644 index 8b0fb8b..0000000 --- a/extras/first_run/initializer.py +++ /dev/null @@ -1,92 +0,0 @@ -from pip._internal import main as pip -import shutil - -from .langs import locales, texts - - -class Config: - def __init__(self): - self.config = { - 'log_channel_id': '', - 'main_server_id': '', - 'authorized_id': '[admin ids here (in int)]', - } - - with open('requirements.txt', 'r') as f: - self.packages = f.read().split('\n') - - def input(self, key, **kwargs): - locale = self.config.get('locale', 'multiple') - - print('\n\033[4m' + texts.get(locale).get(key) + '\033[0m') - response = input('> ') - - if kwargs.get('valid'): - while response not in kwargs.get('valid'): - print('\033[36m' + '/'.join(kwargs.get('valid')) + '\033[0m') - response = input('> ') - - if not kwargs.get('empty', True): - while len(response) == 0: - print('\033[41m' + texts.get(locale).get('not_empty') - + '\033[0m') - response = input('> ') - else: - response = kwargs.get('default', None) if len(response) == 0 \ - else response - - self.config[key] = response - - def install(self): - self.input('locale', valid=locales) - print('\n\n\033[4;36m' - + texts.get(self.config.get('locale')).get('install') - + '\033[0m\n') - - for package in self.packages: - pip(['install', package]) - - def ask(self): - print('\n\n\033[4;36m' + texts.get(self.config.get('locale')) - .get('conf') + '\033[0m\n') - - self.input('token', empty=False) - self.input('postgresql_username', empty=False) - self.input('postgresql_password', empty=False) - self.input('postgresql_dbname', empty=False) - - print('\n\n\033[4;36m' + texts.get(self.config.get('locale')) - .get('logs') + '\033[0m\n') - - self.input('wh_id', empty=True) - self.input('wh_token', empty=True) - - print('\n\n\033[4;36m' + texts.get(self.config.get('locale')) - .get('misc') + '\033[0m\n') - - self.input('activity', empty=True) - - def save(self): - with open('config.py', 'w') as file: - postgresql = f"postgresql://" \ - f"{self.config.get('postgresql_username')}:" \ - f"{self.config.get('postgresql_password')}" \ - f"@localhost/{self.config.get('postgresql_dbname')}" - file.write(f"postgresql = '{postgresql}'\n") - - logs_webhook = dict(id=int(self.config.get('wh_id')), - token=self.config.get('wh_token')) - file.write(f"logs_webhook = '{logs_webhook}'\n") - - for key, value in self.config.items(): - if not key.startswith('postgresql_') \ - and not key.startswith('wh_'): - value = f"'{value}'" if type(value) is str else value - file.write(f"{key} = {value}\n") - print('\n\n\033[4;36m' + texts.get(self.config.get('locale')) - .get('end') + '\033[0m\n') - - def clean(self): - print('\n\n\033[4;36m' - + texts.get(self.config.get('locale')).get('clean') - + '\033[0m\n') diff --git a/extras/first_run/langs.py b/extras/first_run/langs.py deleted file mode 100644 index 1792e71..0000000 --- a/extras/first_run/langs.py +++ /dev/null @@ -1,61 +0,0 @@ -locales = ['fr', 'en'] -texts = { - 'fr': { - 'install': "Installation des modules...", - - 'conf': "Configuration...", - - 'token': "Veuillez entrer le token", - 'not_empty': "Cette valeur ne doit pas être vide", - - 'postgresql_username': "Veuillez entrer le nom d'utilisateur de postgresql", - 'postgresql_password': "Veuillez entrer le mot de passe de postgresql", - 'postgresql_dbname': "Veuillez entrer le nom de la base de donnée", - - 'logs': "Channel de logs (non obligatoire)", - - 'wh_id': "L'id du webhook pour le channel de logs", - 'wh_token': "Le token du webhook pour le channel de logs", - - 'misc': 'Autre', - - 'activity': "Joue à ...", - - 'end': "Configuration terminée, vous pouvez à tout moment la rectifier en modifiant le fichier config.py", - - 'clean': "Nettoyage..." - }, - - 'en': { - 'install': "Installation of the modules....", - - 'conf': "Configuration...", - - 'token': "Please enter the token", - 'not_empty': "This value must not be empty", - - 'postgresql_username': "Please enter the postgresql username", - 'postgresql_password': "Please enter the postgresql password", - 'postgresql_dbname': "Please enter the database name", - - 'logs': "Log channel (not required)", - - 'wh_id': "Webhook id for log channel", - 'wh_token': "Webhook token for log channel", - - 'misc': 'Misc', - - 'activity': "Playing ...", - - 'end': "Configuration completed, you can fix it at any time by modifying the config.py file", - - 'clean': "Cleaning..." - }, - - 'multiple': { - 'locale': "Veuillez choisir une langue | Please choose a language " - "[fr/en]", - 'not_empty': "Cette valeur ne doit pas être vide |" - " This value must not be empty" - } -} diff --git a/extras/locales/en/LC_MESSAGES/base.po b/extras/locales/en/LC_MESSAGES/base.po index ff31cf1..3ecd6d4 100644 --- a/extras/locales/en/LC_MESSAGES/base.po +++ b/extras/locales/en/LC_MESSAGES/base.po @@ -15,59 +15,48 @@ msgstr "" "Generated-By: pygettext.py 1.5\n" -#: launcher.py:51 msgid "Starting..." msgstr "" -#: launcher.py:56 launcher.py:57 msgid "Could not set up PostgreSQL..." msgstr "" -#: launcher.py:68 msgid "Launch without loading the module" msgstr "" -#: launcher.py:73 msgid "Search for update" msgstr "" -#: launcher.py:73 msgid "Checking for update..." msgstr "" -#: launcher.py:89 msgid "A new version is available !" msgstr "" -#: launcher.py:93 msgid "Update ? [Y/n] " msgstr "" -#: launcher.py:97 msgid "Downloading..." msgstr "" -#: launcher.py:108 msgid "Tuxbot is up to date" msgstr "" -#: bot.py:52 msgid "Failed to load extension : " msgstr "" -#: bot.py:59 +msgid "Extension loaded successfully : " +msgstr "" + msgid "This command cannot be used in private messages." msgstr "" -#: bot.py:63 msgid "Sorry. This command is disabled and cannot be used." msgstr "" -#: bot.py:66 msgid "In " msgstr "" -#: bot.py:77 msgid "Ready:" msgstr "" diff --git a/extras/locales/fr/LC_MESSAGES/base.mo b/extras/locales/fr/LC_MESSAGES/base.mo index d01053d..b967b85 100644 Binary files a/extras/locales/fr/LC_MESSAGES/base.mo and b/extras/locales/fr/LC_MESSAGES/base.mo differ diff --git a/extras/locales/fr/LC_MESSAGES/base.po b/extras/locales/fr/LC_MESSAGES/base.po index 78e005b..61a4d25 100644 --- a/extras/locales/fr/LC_MESSAGES/base.po +++ b/extras/locales/fr/LC_MESSAGES/base.po @@ -15,59 +15,48 @@ msgstr "" "Generated-By: pygettext.py 1.5\n" -#: launcher.py:51 msgid "Starting..." msgstr "Démarrage..." -#: launcher.py:56 launcher.py:57 msgid "Could not set up PostgreSQL..." msgstr "Impossible de lancer PostgreSQL..." -#: launcher.py:68 msgid "Launch without loading the module" msgstr "Lancer sans charger le module " -#: launcher.py:73 msgid "Search for update" msgstr "Rechercher les mises à jour" -#: launcher.py:73 msgid "Checking for update..." msgstr "Recherche de mise à jour..." -#: launcher.py:89 msgid "A new version is available !" msgstr "Une nouvelle version est disponible !" -#: launcher.py:93 msgid "Update ? [Y/n] " msgstr "Mettre à jour ? [O/n]" -#: launcher.py:97 msgid "Downloading..." msgstr "Téléchargement..." -#: launcher.py:108 msgid "Tuxbot is up to date" msgstr "Tuxbot est à jour" -#: bot.py:52 msgid "Failed to load extension : " msgstr "Impossible de charger l'extension : " -#: bot.py:59 +msgid "Extension loaded successfully : " +msgstr "Extension chargée avec succes : " + msgid "This command cannot be used in private messages." msgstr "Cette commande ne peut pas être utilisée en message privé." -#: bot.py:63 msgid "Sorry. This command is disabled and cannot be used." msgstr "Désolé mais cette commande est désactivée." -#: bot.py:66 msgid "In " msgstr "Dans " -#: bot.py:77 msgid "Ready:" msgstr "Prêt :" diff --git a/launcher.py b/launcher.py deleted file mode 100644 index 59d5240..0000000 --- a/launcher.py +++ /dev/null @@ -1,116 +0,0 @@ -try: - import config - from cogs.utils.lang import Texts -except ModuleNotFoundError: - import extras.first_run - -import contextlib -import logging -import socket -import sys - -import click -import git -import requests - -from bot import TuxBot -from cogs.utils.database import Database - - -@contextlib.contextmanager -def setup_logging(): - try: - logging.getLogger('discord').setLevel(logging.INFO) - logging.getLogger('discord.http').setLevel(logging.WARNING) - - log = logging.getLogger() - log.setLevel(logging.INFO) - - handler = logging.FileHandler(filename='logs/tuxbot.log', - encoding='utf-8', mode='w') - fmt = logging.Formatter('[{levelname:<7}] [{asctime}]' - ' {name}: {message}', - '%Y-%m-%d %H:%M:%S', style='{') - - handler.setFormatter(fmt) - log.addHandler(handler) - - yield - finally: - handlers = log.handlers[:] - for handler in handlers: - handler.close() - log.removeHandler(handler) - - -def run_bot(unload: list = []): - log = logging.getLogger() - - print(Texts().get('Starting...')) - - try: - database = Database(config) - except socket.gaierror: - click.echo(Texts().get("Could not set up PostgreSQL..."), - file=sys.stderr) - log.exception(Texts().get("Could not set up PostgreSQL...")) - return - - bot = TuxBot(unload, database) - bot.run() - - -@click.command() -@click.option('-d', '--unload', multiple=True, type=str, - help=Texts().get("Launch without loading the module")) -@click.option('-u', '--update', is_flag=True, - help=Texts().get("Search for update")) -def main(**kwargs): - if kwargs.get('update'): - _update() - - with setup_logging(): - run_bot(kwargs.get('unload')) - - -@click.option('-d', '--update', is_flag=True, - help=Texts().get("Search for update")) -def _update(): - print(Texts().get("Checking for update...")) - - local = git.Repo(search_parent_directories=True) - current = local.head.object.hexsha - - gitea = 'https://git.gnous.eu/api/v1/' - origin = requests.get(gitea + 'repos/gnouseu/tuxbot-bot/branches/rewrite') - last = origin.json().get('commit').get('id') - - if current != last: - print(Texts().get("A new version is available !")) - check = None - - while check not in ['', 'y', 'n', 'o']: - check = input(Texts().get("Update ? [Y/n] ")).lower().strip() - print(check) - - if check in ['y', '', 'o']: - print(Texts().get("Downloading...")) - - origin = git.Repo(search_parent_directories=True) \ - .remotes['origin'] - origin.pull() - - with setup_logging(): - run_bot() - else: - with setup_logging(): - run_bot() - else: - print(Texts().get("Tuxbot is up to date") + '\n') - - with setup_logging(): - run_bot() - - -if __name__ == '__main__': - main() diff --git a/migrate.py b/migrate.py new file mode 100644 index 0000000..be1987b --- /dev/null +++ b/migrate.py @@ -0,0 +1,7 @@ +from cogs.utils.models import * +from cogs.utils.config import Config +from cogs.utils.database import Database + +database = Database(Config("config.cfg")) + +Base.metadata.create_all(database.engine) diff --git a/output.txt b/output.txt deleted file mode 100644 index 7002623..0000000 --- a/output.txt +++ /dev/null @@ -1 +0,0 @@ -[>, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >, >] \ No newline at end of file diff --git a/prefixes.json b/prefixes.json deleted file mode 100644 index 38c440e..0000000 --- a/prefixes.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "280805240977227776": [ - "b!!" - ], - "303633056944881686": [ - "b!!" - ], - "336642139381301249": [ - "b!!" - ] -} diff --git a/requirements.txt b/requirements.txt index 7adfa7c..6f86e00 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,13 +1,9 @@ -humanize discord.py[voice] jishaku -lxml -click -asyncpg -sqlalchemy gitpython -requests +sqlalchemy +psycopg2 +configparser psutil -werkzeug tcp_latency -yarl +i18n diff --git a/venv/bin/activate b/venv/bin/activate new file mode 100644 index 0000000..8bf2063 --- /dev/null +++ b/venv/bin/activate @@ -0,0 +1,76 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r + fi + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + if [ ! "${1:-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV="/home/romain/gnousEU/tuxbot-bot/venv" +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + if [ "x(venv) " != x ] ; then + PS1="(venv) ${PS1:-}" + else + if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1" + else + PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1" + fi + fi + export PS1 +fi + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r +fi diff --git a/venv/bin/activate.csh b/venv/bin/activate.csh new file mode 100644 index 0000000..248f5fa --- /dev/null +++ b/venv/bin/activate.csh @@ -0,0 +1,37 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/home/romain/gnousEU/tuxbot-bot/venv" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/bin:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + if ("venv" != "") then + set env_name = "venv" + else + if (`basename "VIRTUAL_ENV"` == "__") then + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` + else + set env_name = `basename "$VIRTUAL_ENV"` + endif + endif + set prompt = "[$env_name] $prompt" + unset env_name +endif + +alias pydoc python -m pydoc + +rehash diff --git a/venv/bin/activate.fish b/venv/bin/activate.fish new file mode 100644 index 0000000..f64cc0a --- /dev/null +++ b/venv/bin/activate.fish @@ -0,0 +1,75 @@ +# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) +# you cannot run it directly + +function deactivate -d "Exit virtualenv and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + functions -e fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + + set -e VIRTUAL_ENV + if test "$argv[1]" != "nondestructive" + # Self destruct! + functions -e deactivate + end +end + +# unset irrelevant variables +deactivate nondestructive + +set -gx VIRTUAL_ENV "/home/romain/gnousEU/tuxbot-bot/venv" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# unset PYTHONHOME if set +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # save the current fish_prompt function as the function _old_fish_prompt + functions -c fish_prompt _old_fish_prompt + + # with the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command + set -l old_status $status + + # Prompt override? + if test -n "(venv) " + printf "%s%s" "(venv) " (set_color normal) + else + # ...Otherwise, prepend env + set -l _checkbase (basename "$VIRTUAL_ENV") + if test $_checkbase = "__" + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) + else + printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) + end + end + + # Restore the return status of the previous command. + echo "exit $old_status" | . + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/venv/bin/chardetect b/venv/bin/chardetect new file mode 100755 index 0000000..f486b86 --- /dev/null +++ b/venv/bin/chardetect @@ -0,0 +1,10 @@ +#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7 +# -*- coding: utf-8 -*- +import re +import sys + +from chardet.cli.chardetect import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/easy_install b/venv/bin/easy_install new file mode 100755 index 0000000..1508e0e --- /dev/null +++ b/venv/bin/easy_install @@ -0,0 +1,10 @@ +#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7 +# -*- coding: utf-8 -*- +import re +import sys + +from setuptools.command.easy_install import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/easy_install-3.7 b/venv/bin/easy_install-3.7 new file mode 100755 index 0000000..1508e0e --- /dev/null +++ b/venv/bin/easy_install-3.7 @@ -0,0 +1,10 @@ +#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7 +# -*- coding: utf-8 -*- +import re +import sys + +from setuptools.command.easy_install import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/import_expression b/venv/bin/import_expression new file mode 100755 index 0000000..0c9e976 --- /dev/null +++ b/venv/bin/import_expression @@ -0,0 +1,10 @@ +#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7 +# -*- coding: utf-8 -*- +import re +import sys + +from import_expression.__main__ import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip b/venv/bin/pip new file mode 100755 index 0000000..abdf7ef --- /dev/null +++ b/venv/bin/pip @@ -0,0 +1,10 @@ +#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7 +# -*- coding: utf-8 -*- +import re +import sys + +from pip._internal.main import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip3 b/venv/bin/pip3 new file mode 100755 index 0000000..abdf7ef --- /dev/null +++ b/venv/bin/pip3 @@ -0,0 +1,10 @@ +#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7 +# -*- coding: utf-8 -*- +import re +import sys + +from pip._internal.main import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip3.7 b/venv/bin/pip3.7 new file mode 100755 index 0000000..abdf7ef --- /dev/null +++ b/venv/bin/pip3.7 @@ -0,0 +1,10 @@ +#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7 +# -*- coding: utf-8 -*- +import re +import sys + +from pip._internal.main import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pybabel b/venv/bin/pybabel new file mode 100755 index 0000000..b610b42 --- /dev/null +++ b/venv/bin/pybabel @@ -0,0 +1,8 @@ +#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7 +# -*- coding: utf-8 -*- +import re +import sys +from babel.messages.frontend import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/python b/venv/bin/python new file mode 120000 index 0000000..940bee3 --- /dev/null +++ b/venv/bin/python @@ -0,0 +1 @@ +python3.7 \ No newline at end of file diff --git a/venv/bin/python3 b/venv/bin/python3 new file mode 120000 index 0000000..940bee3 --- /dev/null +++ b/venv/bin/python3 @@ -0,0 +1 @@ +python3.7 \ No newline at end of file diff --git a/venv/bin/python3.7 b/venv/bin/python3.7 new file mode 120000 index 0000000..1a3b0c3 --- /dev/null +++ b/venv/bin/python3.7 @@ -0,0 +1 @@ +/usr/local/bin/python3.7 \ No newline at end of file diff --git a/venv/bin/tcp-latency b/venv/bin/tcp-latency new file mode 100755 index 0000000..d4e214f --- /dev/null +++ b/venv/bin/tcp-latency @@ -0,0 +1,8 @@ +#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7 +# -*- coding: utf-8 -*- +import re +import sys +from tcp_latency.tcp_latency import _main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(_main()) diff --git a/venv/lib/python3.7/site-packages/.libs_cffi_backend/libffi-806b1a9d.so.6.0.4 b/venv/lib/python3.7/site-packages/.libs_cffi_backend/libffi-806b1a9d.so.6.0.4 new file mode 100755 index 0000000..a74aa90 Binary files /dev/null and b/venv/lib/python3.7/site-packages/.libs_cffi_backend/libffi-806b1a9d.so.6.0.4 differ diff --git a/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/INSTALLER b/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/LICENSE b/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/LICENSE new file mode 100644 index 0000000..10722cc --- /dev/null +++ b/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/LICENSE @@ -0,0 +1,29 @@ +Copyright (c) 2013-2019 by the Babel Team, see AUTHORS for more information. + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + 3. The name of the author may not be used to endorse or promote + products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/METADATA b/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/METADATA new file mode 100644 index 0000000..4dc8411 --- /dev/null +++ b/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/METADATA @@ -0,0 +1,31 @@ +Metadata-Version: 2.1 +Name: Babel +Version: 2.7.0 +Summary: Internationalization utilities +Home-page: http://babel.pocoo.org/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +License: BSD +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Requires-Dist: pytz (>=2015.7) + +A collection of tools for internationalizing Python applications. + + diff --git a/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/RECORD b/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/RECORD new file mode 100644 index 0000000..278f241 --- /dev/null +++ b/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/RECORD @@ -0,0 +1,815 @@ +../../../bin/pybabel,sha256=X97VAhJYxnLuo8P_afP6cjXSHxOVROguRI628oADXjA,256 +Babel-2.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Babel-2.7.0.dist-info/LICENSE,sha256=Wg311G6MsUvV1MLNdzOLIvaB_oi9NCNmJrGjrjtQEBo,1451 +Babel-2.7.0.dist-info/METADATA,sha256=Sakpsddm3zC_mjuf8Xl2jlGr9l6fN3QH5TZOJCKBxbs,1223 +Babel-2.7.0.dist-info/RECORD,, +Babel-2.7.0.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 +Babel-2.7.0.dist-info/entry_points.txt,sha256=dyIkorJhQj3IvTvmMylr1wEzW7vfxTw5RTOWa8zoqh0,764 +Babel-2.7.0.dist-info/top_level.txt,sha256=mQO3vNkqlcYs_xRaL5EpRIy1IRjMp4N9_vdwmiemPXo,6 +babel/__init__.py,sha256=Z867BI00sB-W9VXa74LciJmUXnLcDE8HYet2sPuwwwA,714 +babel/__pycache__/__init__.cpython-37.pyc,, +babel/__pycache__/_compat.cpython-37.pyc,, +babel/__pycache__/core.cpython-37.pyc,, +babel/__pycache__/dates.cpython-37.pyc,, +babel/__pycache__/languages.cpython-37.pyc,, +babel/__pycache__/lists.cpython-37.pyc,, +babel/__pycache__/localedata.cpython-37.pyc,, +babel/__pycache__/numbers.cpython-37.pyc,, +babel/__pycache__/plural.cpython-37.pyc,, +babel/__pycache__/support.cpython-37.pyc,, +babel/__pycache__/units.cpython-37.pyc,, +babel/__pycache__/util.cpython-37.pyc,, +babel/_compat.py,sha256=DHx6vQR-LazZlNdeBE7wGTOBv1_1HWbRboKOkX76TiY,1685 +babel/core.py,sha256=fkxYTtAryVBEwsfYg78Y-eRfq_dRkzSFlMSXoDoliSM,36907 +babel/dates.py,sha256=LLBcjpOs_8Ng9StJdk-HcUxUnrTTtLtKYKOdLzxGJYU,67479 +babel/global.dat,sha256=h29NSbHmHUP3FulyRoiU6aDSj7FgcGBZyc212lnUL_c,253062 +babel/languages.py,sha256=UmLTj4Nai3kQrwHX6jptehVLeAw-KAdxcmcp2iDlgvI,2743 +babel/lists.py,sha256=nON3qfMoLLap0YTTRGBYWbwekBFknIABbulnsX70lrk,2719 +babel/locale-data/af.dat,sha256=n7zhDTHDXh8voszXVJXnIaLC_ihEYStNBQdNZC_TKYE,167016 +babel/locale-data/af_NA.dat,sha256=SCCik7H53r08j9VR2MlOEPFagN5P153ef0fbsIFwsvY,1425 +babel/locale-data/af_ZA.dat,sha256=TvoOI0O5FP8QqIwI506xwhymtW-ZwtnGSh7OGg8W69s,626 +babel/locale-data/agq.dat,sha256=K7_PXOHrOyQBaZgHpJFpsI7DyOoOgxij1TmGPvHW3r8,17399 +babel/locale-data/agq_CM.dat,sha256=gn5wN9w6lKBCTkuKLwKCFrnkSAxA5Utr1N9py-ciqkc,627 +babel/locale-data/ak.dat,sha256=TI9PGam0sIfflHMh1jbrZ9hKpRVfcZVLC1rGyFFTZh8,15911 +babel/locale-data/ak_GH.dat,sha256=iTuDrca96IQYXDwnpNiqKtGn8d83kgs-LsWTfJntS4s,607 +babel/locale-data/am.dat,sha256=lT1XWTPuNzjyYF_VTyqhmCGAY7upVMPUlD70ebWnqcw,198396 +babel/locale-data/am_ET.dat,sha256=8zdStIXJmVpgppSVpZUwV04WJ4kUWQwFWRVb6AQZahY,626 +babel/locale-data/ar.dat,sha256=8ClpxZdrXplVALQ7UzQRwmmEbJMo5nSH1wNzQVfXBr8,335041 +babel/locale-data/ar_001.dat,sha256=gmM0xCrg2w8yy4Xh8viSHsnitl6HnuuDMv1yRdaE9Us,1698 +babel/locale-data/ar_AE.dat,sha256=0HjSYklVuIAiTVYB-qEzvdHa2jpX0d1bOxoNhPORbO0,1056 +babel/locale-data/ar_BH.dat,sha256=Yt0X2Col8Ha78QaBs5_H0FDllYTwUeGSZI7VCvMplqk,669 +babel/locale-data/ar_DJ.dat,sha256=0hcv-LMM77rO6XsghzDdqbIgEXy7b3be1ENh994EWrM,647 +babel/locale-data/ar_DZ.dat,sha256=LxHWSDb5T857YqpPqJebqi4rrm08IvgvuK-B8I8B7PI,1731 +babel/locale-data/ar_EG.dat,sha256=7NhEtUCRy1NrFLHz8VXla7JSZdILpnIc65DYOD7zLYY,706 +babel/locale-data/ar_EH.dat,sha256=CNFFp1Dwz32d0TcaMjTKIYVxJovVaFLlUvf9ACV-9tQ,607 +babel/locale-data/ar_ER.dat,sha256=gRdDWAO_ViChJEPChaJcNSIX4DmMbkAMPOpKnq1QuZU,628 +babel/locale-data/ar_IL.dat,sha256=v57QG4365fJbwLF6usm9-fEuq9qJYd74nRD_gWmoTk4,1213 +babel/locale-data/ar_IQ.dat,sha256=4Xi7cAbOE20jYoFxqifGshiC5tGrXrEUCsiZGwR21kg,2354 +babel/locale-data/ar_JO.dat,sha256=LouBd_IYs070hS-CYkyqg5_Y8sE-_y6cl_TNp-lGFcU,2353 +babel/locale-data/ar_KM.dat,sha256=vvAkx4__TxagJozvyudv38VzVgIljVPBPvyMGsz11G8,1179 +babel/locale-data/ar_KW.dat,sha256=oWH9zhdJxQivmCEgKERJ8ADEobK0BqdIf_6zfUfGgkY,669 +babel/locale-data/ar_LB.dat,sha256=cKY3x_auClEkSF-O9bdUuG5bOi-X4HZcZv-7HcM_uV0,2354 +babel/locale-data/ar_LY.dat,sha256=NsVSuzMZGJg7-AN4FurzzIMh8iqQECSEPJHF6bW0nJQ,1670 +babel/locale-data/ar_MA.dat,sha256=4XxmrdYhzerIdqqgHLH55Hg9W0cMFcqHjcAlPLRNn2g,2025 +babel/locale-data/ar_MR.dat,sha256=IRZkTxOZ39eZDjDDr_h9GjzgGbS9Kylg-8QQfJAzl0Y,2191 +babel/locale-data/ar_OM.dat,sha256=SwCPf_1V4qcSfB6F3LzQo861te_t91Kk4GUX8IV_A_M,669 +babel/locale-data/ar_PS.dat,sha256=hGNnwUj0XR-x19RySMnnOgC0Kd4odVkj85gYJHXxGLg,2291 +babel/locale-data/ar_QA.dat,sha256=Ymdu7PL0gvtu7JPJ-hUyoL1eN_mHss3ZF0XzMkqrsFQ,669 +babel/locale-data/ar_SA.dat,sha256=MxBfIMX4A3Na-0pYAK868hW35JMWu5msIVsF8PRWF0c,1906 +babel/locale-data/ar_SD.dat,sha256=nl5A-ub3QRuVC94h7X9Ak3gG9nrtylVXPh3O7Pb9PvY,669 +babel/locale-data/ar_SO.dat,sha256=5dbXvnXAyzeXNoRifm6dn9iHT3uR1-sERZJvw0xrdAw,626 +babel/locale-data/ar_SS.dat,sha256=xeUbi35nGVXcOa1H9b97Qx0GhQGmzbQmuyQwhpP7dFs,649 +babel/locale-data/ar_SY.dat,sha256=bHN5TGlFMD7H2qIWGO_5GCLjtPm2z5u6pLDLPTksCWo,2353 +babel/locale-data/ar_TD.dat,sha256=NbYxtvmfDFCgyukJrfEluSKqSvxGkXkFdVjbowqUUmM,607 +babel/locale-data/ar_TN.dat,sha256=utHPqirZxiuEj86xGgZwtv50o4sqavbD-pCLA02EkC0,1669 +babel/locale-data/ar_YE.dat,sha256=ugcwooc31b9YRfoeVUihy87zQGHbpe3Pcp_GzSmYk8Y,669 +babel/locale-data/as.dat,sha256=v0f4nBxj3fZPic9BPsPp4w61wfdUWeyrn0fZjyfNWTg,230479 +babel/locale-data/as_IN.dat,sha256=_1h7wmRt7evqXyqFIKyo7Ppjclhn5w9rMtLEZG7Fdgc,649 +babel/locale-data/asa.dat,sha256=uX2LqOD_JuPjax_NYcVVCrXUCKAzA0xAve7S6ZzfeY4,16240 +babel/locale-data/asa_TZ.dat,sha256=Rfjx5YBakJ7TNX3X4uETtuArOF40ftdEonNMAwVNRzs,608 +babel/locale-data/ast.dat,sha256=RiZhHO8GKLb6a6dSEdV5LLGpLzzfhNz0XMzHpgB5_Es,210339 +babel/locale-data/ast_ES.dat,sha256=-iocl3j5ah7FgqKU9XQR-adJKP8bGsBMLBFrbg8NjbY,645 +babel/locale-data/az.dat,sha256=QWl1YVyX7U_hMfum9QuBZBz4zB3NXNEYbuuJC6Ja-P4,191875 +babel/locale-data/az_Cyrl.dat,sha256=nPqsNWZo8xrNUv-N8zko_9Ve5pQ9WIDMdeI95CytiPs,38954 +babel/locale-data/az_Cyrl_AZ.dat,sha256=0C_Y00OpozgdcwgbDOGs2R67PQ0TBF1bMz4ovHLmZ8o,626 +babel/locale-data/az_Latn.dat,sha256=7wrR4YWwEpz672DsTtFe7BItp0oMldPoRHnDT4yP38g,2246 +babel/locale-data/az_Latn_AZ.dat,sha256=MxhS4BIHuziBc2dKVrCvBHWw-zt-ZRqb_mc7QyRegm4,626 +babel/locale-data/bas.dat,sha256=O9JPLvooFeB-9j1_1l_ceoknZUVVG7aTxXf0Pud4Xss,17184 +babel/locale-data/bas_CM.dat,sha256=6roi5oBt9ByVAnbFcedUcRg2HmkLI7m_gL8L8Xx18II,627 +babel/locale-data/be.dat,sha256=H_LUM0cEwMyAkBnw0UvBLHTR-mYnJxv9pXOjYsLZrUE,258410 +babel/locale-data/be_BY.dat,sha256=GTDO3Za8gEdwa1UabcPJ32n-rSJMvRfzBA_sh0Mv19c,626 +babel/locale-data/bem.dat,sha256=-_tBxlhspwM9kCqBwWO7BtjzMW-lA53eZ2omKfErUng,6555 +babel/locale-data/bem_ZM.dat,sha256=d2G4WKDe_WNPgAv1JxwRp4vsMfyIGdgBfj5xhy3E5MY,608 +babel/locale-data/bez.dat,sha256=bOuSiU51Cl-TV2XlIdyvBaRSvWrua_6FJSf0Yd3bT4w,17031 +babel/locale-data/bez_TZ.dat,sha256=pPQxCLShiJXgsePTeWi_2VtDRgjIorpbw2Ab-43Pvfw,608 +babel/locale-data/bg.dat,sha256=UJAqU4JVTF_CqUzNFPOpGadCFUe8YoG_-GZJ_wCwygs,228458 +babel/locale-data/bg_BG.dat,sha256=S98UqeOmRzTvOPoQeaXthy7N_0-ZCi3g3qScIOkGPxE,644 +babel/locale-data/bm.dat,sha256=UvSMvGOkaTuc1xClsfD0-6WaTGswrRKfYq89SzD8imQ,15938 +babel/locale-data/bm_ML.dat,sha256=SYWVELAdIQjQ3lBuH6pTth6iAqJNNaIx7BcxGiVwnos,607 +babel/locale-data/bn.dat,sha256=78q3cks0mrxGtG7Ap3UAHakz6y9ADBlJVF2is2BWX5c,257002 +babel/locale-data/bn_BD.dat,sha256=MwMquIkmom46hTd8CKQqNjq0h9oIT6kWmj-rTvLDXsM,626 +babel/locale-data/bn_IN.dat,sha256=r2TxDmuVgf4o9O7Cf4A1b5C-V9KwuY-0P_l8tAd-sKA,884 +babel/locale-data/bo.dat,sha256=3dtXet1Kn30N9c7_IKIVXY-Y2i7Hy9pWsIp262awTMg,22546 +babel/locale-data/bo_CN.dat,sha256=MvO2V4RU2DlFEFgbOw-hjWd6urYl307-WshKY3rlTfY,626 +babel/locale-data/bo_IN.dat,sha256=IORPH-j_pfMPkLapwgn1WpiXBz0Ww14YpwqWlbPzrps,722 +babel/locale-data/br.dat,sha256=Efh7GVXZ7TseoExgtaItATT6JYDbCaCURZTsDmXOEWM,252311 +babel/locale-data/br_FR.dat,sha256=YpLV2DDbxqxCSqEIeARiZFN93Zdm-sIvK1WbZSFaDfE,644 +babel/locale-data/brx.dat,sha256=7B-M7g9qrHT2PDvZq-6xTP898ngNpqH-qwiB3JvY8gA,124250 +babel/locale-data/brx_IN.dat,sha256=bwkbIeGZgMgv1gqFuvtEcg8IaUw8fU8386EX9NxwazA,650 +babel/locale-data/bs.dat,sha256=xA5-CHy_ckC36r_0fjX6F8tGkwtVvr5Q3fPs1Af1KOk,236345 +babel/locale-data/bs_Cyrl.dat,sha256=qpMX1PaPmMJl2MhacaXdE8cy1q3vPVvlIQRsKCNj6lk,191220 +babel/locale-data/bs_Cyrl_BA.dat,sha256=BgEXeYz5aTttgFbOjVxMt8iVwiiYfUW2K-sCsjqcqrs,626 +babel/locale-data/bs_Latn.dat,sha256=uwk-ewWX9CKMAfLwY_vXGjHF_g4Wr9DrpESqKQxUmtc,1978 +babel/locale-data/bs_Latn_BA.dat,sha256=edblc-NRnO8KuX3JVgKLz_gnAwYe2j01L3iORx-8IW4,626 +babel/locale-data/ca.dat,sha256=VkbK3W_WAdeKSNAPreqIHHehmkXa29lio6HUuFchyas,205061 +babel/locale-data/ca_AD.dat,sha256=bNYq_kpURgNvLK9gIo8oTycE1bPkGYTKMqa6JKHaLkM,644 +babel/locale-data/ca_ES.dat,sha256=B0ToeaEZp_47D_jlPmegrl2nux7jJdhTjERBHAllQ5s,644 +babel/locale-data/ca_ES_VALENCIA.dat,sha256=faT9vchH9xCQ3oY6aR2shGI-nra3S2BHllCzqcvI1Xg,3694 +babel/locale-data/ca_FR.dat,sha256=JxBZKaUKWLHNi01OliUAwDTlDAob7flGt2J_5gYaZRU,663 +babel/locale-data/ca_IT.dat,sha256=F-cytzBQLU3eQ7d-eqwL5FBwUsyES-w7IenrpJfRGKU,644 +babel/locale-data/ccp.dat,sha256=D-Z3zFzkvAtx4TFn94QvpQXfbzVSE3KNWIa7HRCCvJs,275311 +babel/locale-data/ccp_BD.dat,sha256=X4bHc5AxNakoqr-IIiW5pXlshq1rkhmd4j0vASONKCY,627 +babel/locale-data/ccp_IN.dat,sha256=CkS-g4E_3UpAWPJjcfSgLLlcyuD27IU-Qm4IWbAwj20,650 +babel/locale-data/ce.dat,sha256=gwy0HT8b1m8DmGvv5a8wee3bFCNEYbDa0pwxlmJaonY,138724 +babel/locale-data/ce_RU.dat,sha256=rKRXXvqaIlC_Tfa5SF9R6HucBA-8xKjx2A0rYkq6ico,644 +babel/locale-data/ceb.dat,sha256=YwpY0r-7M6KN1YmcaLW7SgFSdx1yFp52mVqibrVQ4cs,14214 +babel/locale-data/ceb_PH.dat,sha256=IIz-5_7M1McMnHJN34FagjNAKNFeyvgDIHo2RC0qaJo,627 +babel/locale-data/cgg.dat,sha256=5Yi9LIsGO_eigtjnod_HlXYMFk7f_GyahSbPgM2yHkI,16281 +babel/locale-data/cgg_UG.dat,sha256=29oSxuO0qn15ASh7buIKuXO7mY8l0CCjXPlLeTtqBGY,608 +babel/locale-data/chr.dat,sha256=xXmjvaz3o4oorx1Mz8mSYeR0mY1U7m6Kk9FaIdIdK34,196079 +babel/locale-data/chr_US.dat,sha256=j11CnbcC2COjt8xpooqGlEaMuY7HogdiNJLxKfeBvTU,645 +babel/locale-data/ckb.dat,sha256=RkC398fj4MV-J3h5gXZIVvx02o-QNVoLejjxegf6QUY,41589 +babel/locale-data/ckb_IQ.dat,sha256=aFZTJDR-waJrFCJrO2ugN4TRok8PWQ3gcEmdV6O_T7U,670 +babel/locale-data/ckb_IR.dat,sha256=yrnqsVJoq9LsKcSg_ohDFgHkWBjxYrR24hlZOUSgk94,1222 +babel/locale-data/cs.dat,sha256=jh-2Zl-BZDY4XdLcLQkUbirc0T3LoLD43EY63kfd-fo,287652 +babel/locale-data/cs_CZ.dat,sha256=Imyqwx45p8yWaflAiltP7ccTxXK0F3potP7N2RTbv7s,644 +babel/locale-data/cu.dat,sha256=rPG2xcL0oE-YmO73cZTNqfj0yY4mIKV2_58uWin5cXE,20286 +babel/locale-data/cu_RU.dat,sha256=LkEMZZAlLI59QsMHcLj_yE9c7vvXGnFE1FSvnkgZkwE,644 +babel/locale-data/cy.dat,sha256=eWrgesXXY9RM0NtaNKPdVocHizrtfpt1nKJFAawESl0,321884 +babel/locale-data/cy_GB.dat,sha256=rCCg3lvwpJORx7LgBBF2dWsd6VnYybbrA2Na7gu7Q_I,644 +babel/locale-data/da.dat,sha256=lE_A0Soj5f717xbJRW-885u210Qb80YafUnsIJ95y4U,197581 +babel/locale-data/da_DK.dat,sha256=vP5m0YY7VSUJcHy4fJHTaaYnJFWKEv0N80rUCEd7pPw,644 +babel/locale-data/da_GL.dat,sha256=OodsMwjqjnqXSimIZCOmUWDLLiIR9vvRj0imLbqx-mo,607 +babel/locale-data/dav.dat,sha256=A8cH1vXScTUFVf2lRV0hMXQ957TF3vFcfxdejDO0hIo,16323 +babel/locale-data/dav_KE.dat,sha256=tcXkL0o3AyPNTHJy5FlubLlhfHI6XzDwSK3lXSJMws4,627 +babel/locale-data/de.dat,sha256=qIRVbYLDdmpybehjkMIM3VGlNVFPpUk9x5J1mIE4PUQ,197068 +babel/locale-data/de_AT.dat,sha256=32JpQ4PJkEsB0dlBCz4BOqrFV6ANrXeYZivIGtw55OI,2581 +babel/locale-data/de_BE.dat,sha256=pubC4Po4M_ZKPiT6EBNPMqi5TT9p_v-cf4V624OX2GE,644 +babel/locale-data/de_CH.dat,sha256=DsfawA4hefcW9wyxOmvvkcmOdVQr4CivD37so9Bk_AY,4023 +babel/locale-data/de_DE.dat,sha256=2ON8LeGdw4j5qNt9W0QKYTxxTZMTnAMR7pTw6qnwoHg,644 +babel/locale-data/de_IT.dat,sha256=7adTDuD3BT7FDRebEbAq8VDJPVof1zhCfSTPFd0Ojm0,1637 +babel/locale-data/de_LI.dat,sha256=cgNDay03PtZnrNp06QeLFXcLdsA8ObWQy9pW_NFepj8,1339 +babel/locale-data/de_LU.dat,sha256=CQs6M5llCpctge33i5uLbxUzxTgNyuXa-izDdHwIVLg,1083 +babel/locale-data/dje.dat,sha256=agpAQ8Rcw1YbFLLtsKktAqSazx2zpq8QpCNrHX7v2MI,16245 +babel/locale-data/dje_NE.dat,sha256=RuHsfFP4RVlgcjI8SNx2Fu6TLMbxKkHMNcT-oiJIMOM,608 +babel/locale-data/dsb.dat,sha256=kozmD1hEZMJgcGpEkRHvaYP2Qr8CJFI85L8fVIAu8hM,179612 +babel/locale-data/dsb_DE.dat,sha256=7T-uUURejnX_8k9QPe_18AlJaUSEe9sA9TMmQ1wgZMQ,645 +babel/locale-data/dua.dat,sha256=bcxuGLFcYCtVFRPd5SgDkhTWbuqkxOD3mSgpdGOY4Eg,5376 +babel/locale-data/dua_CM.dat,sha256=lIrnKrSo0EoxlBu9GBhXq510jzSkiEsjqqUqC_995XE,627 +babel/locale-data/dyo.dat,sha256=PnfWJWkoPW-ejx4amchVKh7Fgj0QUhQZRKunyHtTNxc,10562 +babel/locale-data/dyo_SN.dat,sha256=-x1xQXODFTNEgMz82mei0ikGCKCtFqaxU_NfECwqST4,608 +babel/locale-data/dz.dat,sha256=tJZ6GIwTI1Bdbu58kL1EkSXljEaKeK4m4izG2a9ILV8,89987 +babel/locale-data/dz_BT.dat,sha256=VdAHDYTPNqFMnYGYbHo576XYFPG2UF384aLrehyaqts,626 +babel/locale-data/ebu.dat,sha256=HbHtkTdQzr5YZAkDkcFVHLrgjvX666F3RtNfS0Pp8jY,16295 +babel/locale-data/ebu_KE.dat,sha256=cTzjMTyTnaPHdpt23aNKauKqMbsLluKDRAtqLlEcHdM,627 +babel/locale-data/ee.dat,sha256=sbuNGrchVAjxZSubXsHsHsZhHnxDF6EQvlUsBituVeY,142502 +babel/locale-data/ee_GH.dat,sha256=wQJ8oXvauwAl57jr4kCI8Lbab1x5TkDHXekT4DFthso,607 +babel/locale-data/ee_TG.dat,sha256=TM0UYyUrl-Bmt1jlWRIF5bV2I2VE66u05sKuR8FGEFY,1159 +babel/locale-data/el.dat,sha256=p_pvI3tl89K1INhijzoF7kqTS-3O9G1E6ZJbrzFxxok,238804 +babel/locale-data/el_CY.dat,sha256=VD2Go4Sc1zQVjMJtLGvnmuF5OPEIeywn2Wwf4H6Fdjk,626 +babel/locale-data/el_GR.dat,sha256=zCFaaWmChFq2CF-AKZwRnBVUrM7g3sLNzZnJwrSIIZI,644 +babel/locale-data/en.dat,sha256=wX1t7tIVoUaqjyZItj-cKAaXrfqsCGL7ECdLhTSIj-A,188908 +babel/locale-data/en_001.dat,sha256=D2FJZ32li6xFhpO-R9idEpvtufrPSiN4ek0LRdzYTu0,22984 +babel/locale-data/en_150.dat,sha256=qTgjmREIoEc2Tb0aLkbDyjAzCKmCT8TVzcCpLX-Ryy8,1783 +babel/locale-data/en_AE.dat,sha256=CN_hxkBYRCd-a9Bz3FlCK-EFnZQK_toPEIc4XFZXiEw,4132 +babel/locale-data/en_AG.dat,sha256=1DmA0Xy8J-0SnJ1Q6v4esqx9Pefv-HSW78yVFJmrj9U,645 +babel/locale-data/en_AI.dat,sha256=bhCQZ-Z5tOZRQW3PTA1rUs1sQGjGEbKximNzwVSchG4,1197 +babel/locale-data/en_AS.dat,sha256=_SIxul_ScgcJ-XykW7DL3KHVNv64MupuobtohitMP4c,626 +babel/locale-data/en_AT.dat,sha256=0SVp9Ppii1u4xW0iw5g23pZgoOf5oGK25-irBqFVMns,1218 +babel/locale-data/en_AU.dat,sha256=sWiCpDVuk_oH7fZe2a8dyMYEy8anAaewADDb7DQAw00,19371 +babel/locale-data/en_BB.dat,sha256=baZlkeds3JOu-3UKCF8ujDV4rvfXYlaKgJ4Dc1fXqiI,626 +babel/locale-data/en_BE.dat,sha256=a7P2EyuIaAjbXY7j58nq31PwGuZf1lYRFnojFYj_OMI,1511 +babel/locale-data/en_BI.dat,sha256=bkq4k2bBo_E3A8wwX7QEnZkU5LtQdTrMk2-QQ4imhMw,1180 +babel/locale-data/en_BM.dat,sha256=X6PvUgbDx6OwHMalkjAXDd0lydkRBsSBrZqNl6UVNoc,645 +babel/locale-data/en_BS.dat,sha256=9TrCiUQfxHHx7g-yz-_GI2klqVu2g252j0LuXaktYRo,829 +babel/locale-data/en_BW.dat,sha256=X-9K41JjPlc41gmxcpaodMoIwXn5fa96bQYbJqv-R_U,2789 +babel/locale-data/en_BZ.dat,sha256=eWzZruSKi_tgKUOc2rXUdxwNefFNhkLWXEan5uKgYI8,2968 +babel/locale-data/en_CA.dat,sha256=fjrU_3issiPH6OyYP7McT5ksLmvajnuZ-E1MeTGuN5o,23633 +babel/locale-data/en_CC.dat,sha256=B5QeLVVvHi6KfCjRyhNIOqSCQOUZAqw2oXjAWJhgRnA,1178 +babel/locale-data/en_CH.dat,sha256=3sFUD9lRJQzMY-3aEbRem9G11PSz5qK405qUUSAIJgA,1118 +babel/locale-data/en_CK.dat,sha256=sK0edWgfCMJRg0cwNPMEybw4r3xMqvpWEqndFBiWNvw,1178 +babel/locale-data/en_CM.dat,sha256=6bqtyCDo46KZ9i4uwBJFM0ycKp5xMfKM1k2otywINNk,1428 +babel/locale-data/en_CX.dat,sha256=aBkfLVs1UrWVD6v5uwBtSWBPVV4laI_FJqzutMfgG9s,1178 +babel/locale-data/en_CY.dat,sha256=ccjHkNGpGSiwfPpVuQ6i-bP6V1DUzgl-uZkksWdgSY8,626 +babel/locale-data/en_DE.dat,sha256=VEBAVN7p29z-qYL4NfxSToa6NoODpWBrE2Ubt7lzrdQ,970 +babel/locale-data/en_DG.dat,sha256=MwocxJMOwXkvTQWxNUWVrJJ0aoodMD_ynLWZmJw1bQc,1159 +babel/locale-data/en_DK.dat,sha256=5FZcz_4YOiccWFgO78b17TMTraw3jHBQxZ9KGOFGPKo,2368 +babel/locale-data/en_DM.dat,sha256=6Rm5kjxUpw1DEtfbgMyvqxGc3adeO1k3-d1m2Hmmpwg,645 +babel/locale-data/en_ER.dat,sha256=txc2R0EnMpsjnCYuYvwmufbkFsZoAICHle1dqc9BffE,878 +babel/locale-data/en_FI.dat,sha256=ZzcResQZZ2FjJ_mibuL3uUNoko6y7eLBmnL-csiHl2A,2300 +babel/locale-data/en_FJ.dat,sha256=AkxJVXEQLA1Ua0DngWALCkrXqrUubqYV19gSTpzSz4g,663 +babel/locale-data/en_FK.dat,sha256=DyyvgOqM5DTTLJd_q4p3LxhnoGDiJTIQGKGNxhmIou8,1201 +babel/locale-data/en_FM.dat,sha256=H--72gqzWZz5eW4lUNxBsNBxCln8004Xx65RlJ2S2lA,607 +babel/locale-data/en_GB.dat,sha256=97zYSLKA0m4v7P-V_TFSN_UKyIUKDZAjTftemDkO-oc,4341 +babel/locale-data/en_GD.dat,sha256=bgvDAPFq8l7HTa1kE5IzpVU29YsGZkEmmqyIsSRCJQU,626 +babel/locale-data/en_GG.dat,sha256=ifKq9gm5rf9YNecYWdijPzNYoI_Qf1dFozvQIhDR8Y0,1264 +babel/locale-data/en_GH.dat,sha256=hy1ltceBe1aTrg3z-DHmfB-bnAfWYpEA1OEZ5gsyql4,880 +babel/locale-data/en_GI.dat,sha256=1Iy-rJ22TireT4CzQfqaqi3a202-tCvmb7zdJ4314is,1219 +babel/locale-data/en_GM.dat,sha256=z2DFG5DR5B8RtTTIsVwZATy0S8rv6zAWLyRscQd3k8w,876 +babel/locale-data/en_GU.dat,sha256=bMtawZxXvjc5yRXtQXrjZPTX4ZCMpnNF4CVS_1F9K5w,706 +babel/locale-data/en_GY.dat,sha256=BqU6DCTro_M6hKRhOa9m0hyBDkpP0sB8RfHbDP2qrVo,685 +babel/locale-data/en_HK.dat,sha256=ECm3X9TpRz3BRiZzS5PE9PPWavfqzmIOtxKHWJViFl4,2041 +babel/locale-data/en_IE.dat,sha256=2lBAkPxrSoaj7bZJk8cji60rG8vvmccyjGpqDhpnBTw,2061 +babel/locale-data/en_IL.dat,sha256=AX8nfaJRv06sLvsAwjd_6T-4Bq3bsUX2t09IbslQuh0,1415 +babel/locale-data/en_IM.dat,sha256=H0aqDWIDNMcvtiWVlyCxLrUuKck6W4Z7PSaFTBgZN4w,1264 +babel/locale-data/en_IN.dat,sha256=V9VhRVn8_5Gxz-flT_wSPUXB-ZeQvrxkHQ054fe70I4,2935 +babel/locale-data/en_IO.dat,sha256=XHbYlZcZAPiASCNOgblEjjfEteKK1t_yMs5KuH7mbXw,1159 +babel/locale-data/en_JE.dat,sha256=mTXSvtZBCgkwSDw_98BlFoHt4t8HmT8LpG4EwyLxFXg,1264 +babel/locale-data/en_JM.dat,sha256=bkStMWRlXL3avU9nP8SqLr3RgexXFf2759B-ddWpYSU,1617 +babel/locale-data/en_KE.dat,sha256=zIumuglOQkXXw6UBWKjYVsM4Xh9ofXtjzSSELyG0dek,1449 +babel/locale-data/en_KI.dat,sha256=yzrJdQnlYEqBLzn0_lhvdvwLgO8PNu7amY049pRWNvY,626 +babel/locale-data/en_KN.dat,sha256=Okq8Q5sfYpMAFO1mWI0zrnt5T1HOIS0RsizwmOvDnvg,626 +babel/locale-data/en_KY.dat,sha256=6OZST-EB0kOfsSXlH3Lf50yIDUXEqb3Tv4pt41bHCcI,810 +babel/locale-data/en_LC.dat,sha256=uT6fq9Y7SonQiwTniGmVdoEiRD8V9xnG2v0JPq7_JHA,626 +babel/locale-data/en_LR.dat,sha256=3X0kSLR9P2riE7L3coz02uSB0RBvBjmGDI4xNw9IDbE,876 +babel/locale-data/en_LS.dat,sha256=C-zMOTGHLkrT9Rcy7Fb6iyqdsLVxTFEkfDBMxq1bnhA,876 +babel/locale-data/en_MG.dat,sha256=2tWGxQYUoBOnqap1IPgCbk-uEOJVohjPMwJwm3eZ-Q0,1429 +babel/locale-data/en_MH.dat,sha256=bbe847fcBp-4kzZv-XJvFWSbVz0p9RyUz11Q5BBkyrk,1359 +babel/locale-data/en_MO.dat,sha256=pL92dz8HCGnjF-wdGm8wsnouPCLFTCOE7vhstf8XpfM,821 +babel/locale-data/en_MP.dat,sha256=15WnCMndLQPYhH94O54yb39H-WI_Cbv1wOs226UjhEA,1340 +babel/locale-data/en_MS.dat,sha256=HKKSWqRV5wLDYJ8bsfptoeUmTKM4WF6uQXCCWSn4tAI,1178 +babel/locale-data/en_MT.dat,sha256=jG_r8nn4zfVenRg4NnDCE49KUwgwSZMULF1fn6-Aaxc,1945 +babel/locale-data/en_MU.dat,sha256=dTXBxkoStB11NJEfCI7Lk-aikFtQfNbzqu_AO00Dv8o,1429 +babel/locale-data/en_MW.dat,sha256=S3sP1oPcKqzi1LuD0RPS_zhHjh-QwML8N690WMu4lwE,877 +babel/locale-data/en_MY.dat,sha256=r7yQs7ToO6rVdU03_THwkvj5of3irIgsaBP7NVVa-hg,707 +babel/locale-data/en_NA.dat,sha256=TUtOUjM6gdOCCxRo1hyY2uD0FAxuAsJ5zX2ttnqi3PU,876 +babel/locale-data/en_NF.dat,sha256=3Ax90cl26hC11xws1R1BR7f3a3Aq0bir0iLJpoZyQS8,1178 +babel/locale-data/en_NG.dat,sha256=sFZNrlCMZepXl5e9keqii9Z5zFuyS16R8oseYHN7T6o,1430 +babel/locale-data/en_NL.dat,sha256=5nDlKVas8Unr6igs2LEZP-vABuwzCm_-FouqtOoi76Q,1115 +babel/locale-data/en_NR.dat,sha256=CYRxajoGhR7Q4_cLTM7e0CxZ5WvRkuIAPB78bMt-sFU,1178 +babel/locale-data/en_NU.dat,sha256=CCM0oHPRMBwNCGlNJm94krl1zyJvVCTIr4yD9XOTers,1178 +babel/locale-data/en_NZ.dat,sha256=-BiOtyCtqLAyIOwsI-IlWLi0vjoxUAOkqR9I58QLOWw,2252 +babel/locale-data/en_PG.dat,sha256=g6U0Ch-VKEDfF67z7xPDto7M21uck2BLN1fxMVdYUN4,626 +babel/locale-data/en_PH.dat,sha256=iyRv_Jzm5_mzlOEv57K5SY0F1p7Q62IEEhSwl7-zl3Y,647 +babel/locale-data/en_PK.dat,sha256=VgvTUaova5Mn2m1PH1vmTL5SY47fiF52g-0rNlFxNGE,1977 +babel/locale-data/en_PN.dat,sha256=uw5Q9dusFuS0N_WpHusewTgruprVn6rH5AmAcTqNkhA,1178 +babel/locale-data/en_PR.dat,sha256=wiMMS_iLTFOKBnPfXBKi8axJ18gZs_AhsSX-C5xbXuc,626 +babel/locale-data/en_PW.dat,sha256=YsAyfY2ZCIxcH7vjdDhhJw0i1aZBUjnhswCmvFPTcEI,791 +babel/locale-data/en_RW.dat,sha256=SXtqSRArmZ8Ae6tRLBVKN9U2jbcj4SBMVlmJOPfFY88,1429 +babel/locale-data/en_SB.dat,sha256=TeRvJTZaVeth3jfMShL4J0uOP-c1ALqPuGz4Eay40y0,626 +babel/locale-data/en_SC.dat,sha256=lUN96sufcnMy_OfIOEl6UCRSSvo_QItmM9rrwBHWLBQ,1179 +babel/locale-data/en_SD.dat,sha256=0HivywfMrAPu5oRYBvds840J3Z6VyuucNEwLVQtYLA0,919 +babel/locale-data/en_SE.dat,sha256=Nu0YE8yE53k2b9-z2_v0psDPiBEXS3CSUj1qRyvMUoo,1445 +babel/locale-data/en_SG.dat,sha256=LT5fEP_actBmosDXIwGlfrS1aCuLJpN3ZOZA4XzVjfU,2075 +babel/locale-data/en_SH.dat,sha256=5XMQM5SJRvoOIRqaR41oTHFJExn5V7GDUX5xiWlwF4E,1201 +babel/locale-data/en_SI.dat,sha256=UjOS7UvVNnplRgs7YHYq_y3VO1mOaa989GxkXgaBMgE,986 +babel/locale-data/en_SL.dat,sha256=1rmpoVhWrAXrcs-bHLXkrg-kpDL_YZalzDuH2fqR4k4,877 +babel/locale-data/en_SS.dat,sha256=oGCDgGIjDXUK9hT1suYMfML6oHRHm313ecMk7k3nNmY,899 +babel/locale-data/en_SX.dat,sha256=9s9Gw3t7w-GJsdo6kj-1x0g2TqQQmwfjPN3HbK8UGFQ,1181 +babel/locale-data/en_SZ.dat,sha256=9Z4vBh4OBz9qkogw6-_gC5TQMhZBr88afkxbW6W9sek,876 +babel/locale-data/en_TC.dat,sha256=jSfmNR9nqY3QY3N1OV5hlpZqi1zbULjy6FoBgkp1yWw,607 +babel/locale-data/en_TK.dat,sha256=7Gy79svNx87Zoojlbh47BuKDzXGnX8MAtDZfp8lwgVs,1178 +babel/locale-data/en_TO.dat,sha256=_jEqtNA13O7OvHiS2nYxJRJkSVTEwHp0Gz8a9Ap2Qi0,627 +babel/locale-data/en_TT.dat,sha256=vckW6xKEehZYDQSnUNFc4atkBHUoYuQIHwSh2955a0w,645 +babel/locale-data/en_TV.dat,sha256=zxOKaQFXMQ-Xuop3ypoHtrkNnjJxEsBMLzY8qo06_4Q,1178 +babel/locale-data/en_TZ.dat,sha256=y1E2NWDNV24pOB07058E130ucyopZL-ES-J3BLZxf2g,1430 +babel/locale-data/en_UG.dat,sha256=YQAlu-EAk9G9yvfvTQ15JGjfHYdBcaaspXwLDlbQ44g,1430 +babel/locale-data/en_UM.dat,sha256=-dAHtvQRXZvhqFx4Ywp5ZfEmyLmmas7IoEyZqiUMBNk,644 +babel/locale-data/en_US.dat,sha256=MXmrAUts6riPcAPxycocGvrM5Pe1Yy4c1bgrMf1X1CM,644 +babel/locale-data/en_US_POSIX.dat,sha256=ItCAoaRpz7l_hVn33T09k8oO6G2zaTPLVhetlzyRu1Q,1222 +babel/locale-data/en_VC.dat,sha256=Iw2ujy8fVyq5JqWFMZpI4mV8Se5tshArasCTGBiiAYw,626 +babel/locale-data/en_VG.dat,sha256=4bSbfJa4Ce2prdCxLLgLLavOIgLzDd8SPAXOudWIGbE,607 +babel/locale-data/en_VI.dat,sha256=z93RamsD556JVbwZ4QgFpZl51wlljnQL2PaLjZz-AIU,644 +babel/locale-data/en_VU.dat,sha256=G35aw89Q2xsVnTFS4KD01XW2zejaqcEovjriXjcTh0U,627 +babel/locale-data/en_WS.dat,sha256=Mugt5NlU0-LzAyWlSsLv7yUli8JgZdwcuwnxsRiU0pU,647 +babel/locale-data/en_ZA.dat,sha256=MBf_0lOrGeM0Arv0-lAbLO3W8UR8WYdDwyhBGyWSMbY,3290 +babel/locale-data/en_ZM.dat,sha256=QF860N1xHPumZFZUTGddFIUF6SGBBzga4-QoFozR13E,876 +babel/locale-data/en_ZW.dat,sha256=yY7aWXU5ZJbBowJmIGIa5RvoanIkTHvOiiQqIp63jis,3220 +babel/locale-data/eo.dat,sha256=TMUDwM_wjcv8djKo31VYxyFR3Mt_LIiEisfBu_pqN6k,40766 +babel/locale-data/eo_001.dat,sha256=tSaGBVAG3TxoCtDe47FyjEQ5TfafFOfiLMuGo_UHtRA,841 +babel/locale-data/es.dat,sha256=VaR2n-jrrDSsyTdDayniQ3astW1gipiOHaQPQHBWEdE,194176 +babel/locale-data/es_419.dat,sha256=nUWG6Iwzuf3F3b0JTiSMPWWLrvY0rpjdoTybS2dxfYM,26732 +babel/locale-data/es_AR.dat,sha256=Ghc1MeHAXi16RC3ZKebD5d2woK4nAJQ65f4BkCBO4VQ,9240 +babel/locale-data/es_BO.dat,sha256=aQDwHqAMdFenAwv6-_r03hhEoyiDyxLZXzocEbBJfsQ,1995 +babel/locale-data/es_BR.dat,sha256=QZywQ5KZyLte_fNcqbDIygsPzmDF6Ja9As1TFzHMyL0,646 +babel/locale-data/es_BZ.dat,sha256=a7sxdDb9LL0tGBxsi1BDaAp6f5Uja3sCJUxhD3RHHHg,645 +babel/locale-data/es_CL.dat,sha256=1EN5nTveM5IM2j8yUW8Y1-9ETVwCb8zzp4OGfLQxbko,5519 +babel/locale-data/es_CO.dat,sha256=5t2IwUjWhe4ac35CmHySMf3T1h53HVwVTkU-ArphtUE,8827 +babel/locale-data/es_CR.dat,sha256=2BdF9W_FF2r69u1RwZVZYo5mpZtSZ09VQXYLLZDMzzY,1840 +babel/locale-data/es_CU.dat,sha256=sgvidA7XSwMT_7G8aC-Wa0ORz6uo183Wi-Kqfi6MB9Q,647 +babel/locale-data/es_DO.dat,sha256=Ky4Hi1jNM_D54yZqdOHHcJo3p955IR4UqoWOcf8EoC4,4375 +babel/locale-data/es_EA.dat,sha256=KsGmYEQF3RGUBwIl6cy9rrK_w6EfBFQEumG_eUjfLs4,607 +babel/locale-data/es_EC.dat,sha256=QuO779SP9qI4P3ibEvXN85Vz5TNZwVawZ2xzSsX_ls4,3428 +babel/locale-data/es_ES.dat,sha256=h4CfqSZjQiBcat4Ail-hSnB-m-Y_NugqeBfxIgMt4bc,644 +babel/locale-data/es_GQ.dat,sha256=RQKztBLLAiTfMLf3BGg7dKptJiuAtnJnqCzDDwsNsPk,890 +babel/locale-data/es_GT.dat,sha256=9AynkT_3-f1NgQwjHLhDBuTdk1g-fEk6L-lgByM9UzM,5401 +babel/locale-data/es_HN.dat,sha256=HG_KSm5wCWirqFgpkqsblYDIP-DvG8uXCJl5SerlPB4,3630 +babel/locale-data/es_IC.dat,sha256=CVkbqGdI9IV5YLwQm98JmeV_j9aNVBVXfybiAfUllTM,607 +babel/locale-data/es_MX.dat,sha256=RtDHQqHZ0k4x6swSZzqEBAMz1H0dDvg1dbp6x3c84bc,30227 +babel/locale-data/es_NI.dat,sha256=PoUvRbw15FFGGMSXH1UcGAMFDE56pUhCwqMy-OQJONI,1774 +babel/locale-data/es_PA.dat,sha256=_FLAiUSNiqSMOo_4Rie9jUQbuRyPvWayhasf0h2offg,4038 +babel/locale-data/es_PE.dat,sha256=J7BX7ZMyKLKaoZqdnaJ6ZhEE8IobYAgdbr3cdKdmZJo,4997 +babel/locale-data/es_PH.dat,sha256=GjEpZddkkAk4djW5BGTdAbNB3uNLxyF9gRI0Bq9TJ3I,1223 +babel/locale-data/es_PR.dat,sha256=r1ZKqoFKiRSgmnMsQqmrUg6YJE6mIQ7X9XgpE0G40Ys,3899 +babel/locale-data/es_PY.dat,sha256=JbyP6zYSw27ITxC1cnjOG9GdF9uyvYkvTHN7Tu0a7-4,5696 +babel/locale-data/es_SV.dat,sha256=tjtwln9QSey5X6pYLlGnjTAhqHr-qWB0QpG4w2njsAE,1482 +babel/locale-data/es_US.dat,sha256=VrgZvQX1GgetFpYpHvckiQYogVWRwjc2M8D-7TxZv2Y,24121 +babel/locale-data/es_UY.dat,sha256=tim90leiltAPHB6PL4tKlPxtRdOQeyGdZkmhcX3Mt_k,2620 +babel/locale-data/es_VE.dat,sha256=zoBdAd14aue2QuyRO3mezQrhE9cxwcBf9NKlk7ztd2A,3869 +babel/locale-data/et.dat,sha256=24aCVLFHNppuQ0X_vnBr4OVFcmWZe5dupyyxcfTEe-I,196651 +babel/locale-data/et_EE.dat,sha256=UwAPPde8mci_crGM2PS2J8THqUklf4iLC0LNkJfhIm0,644 +babel/locale-data/eu.dat,sha256=WVuvurT0swPV-ulsfGd2jOpdLnXcbeDs2Ffwf8Q-ulU,173710 +babel/locale-data/eu_ES.dat,sha256=e2mycbz_gicbFO2NxOroBar1paVtVks6rbKU0DQk6hQ,644 +babel/locale-data/ewo.dat,sha256=CVDmRkKDgI98Ivojg5oKXKmA7nVSweqZwBOXA8GhMe8,17648 +babel/locale-data/ewo_CM.dat,sha256=yI-aZ1OfnkLk61VJZjwNOTD7D2vMJtMSLKsd00E59uw,627 +babel/locale-data/fa.dat,sha256=FKYp2fb-CSGnNTlCC3A7rMbyzhsLtRVuNKD5P26dhFo,209713 +babel/locale-data/fa_AF.dat,sha256=NaSQ96MgGR5EB_RrJuP7gFiVrb6loUYEMYcypy7zyoI,10921 +babel/locale-data/fa_IR.dat,sha256=_6JotyeGMhpci4hEoWxCqlzsc95z74XWfNvGlGLtWLU,669 +babel/locale-data/ff.dat,sha256=xPo7THascqSO1Rwq2MGxSCutINq4u6E0KdHNA1auG-o,16137 +babel/locale-data/ff_Latn.dat,sha256=qOic-JuCdeeklvJgvz_ic-aw9d3EsppDxyFQm7wj0mM,857 +babel/locale-data/ff_Latn_BF.dat,sha256=TBLGrbWzDpgdEEYFKQkpZQkv2I0K51gAkxkurqqohVg,607 +babel/locale-data/ff_Latn_CM.dat,sha256=ZQcoR9AgeNUiLmlOJpSHw3Yd5sbahiRIaB2lsQkOsWw,626 +babel/locale-data/ff_Latn_GH.dat,sha256=7DSLmQiwlps_cxa7MaALYb7DX_AVYlGnu15UqDtApyw,1206 +babel/locale-data/ff_Latn_GM.dat,sha256=xbEOy_TAIY-LSlRkH64Vsg9xfbKVrCkmwamam8Z3xds,1202 +babel/locale-data/ff_Latn_GN.dat,sha256=ULYUzwcPqC_gimeYGNJH_j-bIOlzZ20uoedKbXB6jxc,627 +babel/locale-data/ff_Latn_GW.dat,sha256=lTepLAkz6EAUlbB21jVvqGFUKf5JtLzxaOViEFlwj9o,607 +babel/locale-data/ff_Latn_LR.dat,sha256=Lw-PTOI2dHUkdIDKR0M2QaJCGyI0MUHJK79r_khRokA,1202 +babel/locale-data/ff_Latn_MR.dat,sha256=taaeZq7jyy_js3p3c0Yu5UNx4qqUTZ6zMRNowaqodLY,1203 +babel/locale-data/ff_Latn_NE.dat,sha256=iRB6Y3Yvt1tzteou4XSPNImRDVwE9N89BZ21Y8n9A1U,607 +babel/locale-data/ff_Latn_NG.dat,sha256=iYPaPREiLDN1GaFfnhAPlFgOIMeGiUBcESuCiffDGnI,628 +babel/locale-data/ff_Latn_SL.dat,sha256=_u93myYrYvpfjoKunJyec1UR0N_sLUQwHftaG9RPOns,1203 +babel/locale-data/ff_Latn_SN.dat,sha256=-2Hxjt9xNLa_654UcxzMMv1KwUzUhE1wKPyg6jJ-cJU,607 +babel/locale-data/fi.dat,sha256=czptLtqLLwRaT87E8j4viQYquDALQk_dO4Z38WvE0Yk,220945 +babel/locale-data/fi_FI.dat,sha256=MGv__27w2XhG65cGxiclFzXkB_D_mtD80vGGsy6nGHQ,644 +babel/locale-data/fil.dat,sha256=BOUOPdsMPhiofGhRVZanQzT3o3b8bK3V-bPqJ3tMKAo,172841 +babel/locale-data/fil_PH.dat,sha256=MTbJNbhtGJjMyeNVaDc06RxfS4iJax53eUzWVERHSzk,627 +babel/locale-data/fo.dat,sha256=kCxt3Qo3lYS5H-DL_DHwQOsIYEqOkgs_GtQQ44Cozv0,157255 +babel/locale-data/fo_DK.dat,sha256=lmV9_xmi7w1HrXr5Yw7buNhw8hkF5DuWjQByYKIFm64,665 +babel/locale-data/fo_FO.dat,sha256=1YHlmzgNj00QkxBql37zGh2Xk59lIuEbf4AKb7eTPSk,644 +babel/locale-data/fr.dat,sha256=OGxp5sUu9_y5IP-Ku2J-5FDGvkpVbTqOGgQ36ZXt7PY,219084 +babel/locale-data/fr_BE.dat,sha256=XvJOf1Tlc0LpQ3-hhQMfSC_XiiGOSR4P-bHQPtcboBM,1272 +babel/locale-data/fr_BF.dat,sha256=Yg6rGyEXEZDbXdN1s2EmVqgVKAzdkaSRsWYbbF9QIW4,607 +babel/locale-data/fr_BI.dat,sha256=QTAzACKRrDjWBIvPnuh-S7pKSgQ0-EaPibEA9bnw-2Y,628 +babel/locale-data/fr_BJ.dat,sha256=84mKliQXBjf3pNe-WKeZERB1Bcdtxy-SZL8uMHZal_c,607 +babel/locale-data/fr_BL.dat,sha256=Ka59fV0FRq0JzEeNzq-St-Mf848YNmOCCKEpLp4EX5g,607 +babel/locale-data/fr_CA.dat,sha256=J6ijg4KmEIHf2UergLrNzEP8TdL7s-7iYK188NvccSg,66182 +babel/locale-data/fr_CD.dat,sha256=iqZnRL-wo8a8aVUWQElzQt2-Gue91GXrjaxZZ5JgmJ8,1124 +babel/locale-data/fr_CF.dat,sha256=RwuuRJZlNtIhH0rxZ0Q_mfYx_Z5Q3HA_D52-qMSNaUY,607 +babel/locale-data/fr_CG.dat,sha256=GLdmlB2XcqX8tkO6l77gX8QUiA1-5eoQG9392mOXupI,607 +babel/locale-data/fr_CH.dat,sha256=i1hobiCDrCYMXMOmTILEGR33AgD_S4Idqo_xXn7g0SY,3101 +babel/locale-data/fr_CI.dat,sha256=AMmxI3EDyAk6_eB5vGbm14Z2bUL2RkkkgjOLqqYwrZA,607 +babel/locale-data/fr_CM.dat,sha256=_EXjlq9zVSLhwAr9dlGsxBYYaLBuSYIJ_-i3yQy62ns,2101 +babel/locale-data/fr_DJ.dat,sha256=eahykN-Lv0UwRDXuwv7-28RSO9l8kC9mOOHHXG99Ad0,1223 +babel/locale-data/fr_DZ.dat,sha256=JKNF1ht8PGAL4-e60lungLzK4K20v4DGZbNQt15o3fs,1265 +babel/locale-data/fr_FR.dat,sha256=a3YB7XtoKNc7Xdt-U_6EuFyVBEgFgHeZ9CvBc3Yzjx4,644 +babel/locale-data/fr_GA.dat,sha256=Ps4qHm0s51rRNwpSw2YbSexqC7sDYMfyu0zL0iJc6b8,607 +babel/locale-data/fr_GF.dat,sha256=F8q_LQGRmooV6LxNIHDfnG3oDXXGnYYopP-ujwDSizw,710 +babel/locale-data/fr_GN.dat,sha256=pq1ao_Medwq07tp6P5AtqZPlrBzqtIA2R6ywLllJyQY,627 +babel/locale-data/fr_GP.dat,sha256=weJ00L82hKf6xOGLC61Tij1VJ5H3bvN0dnSUEUV27Pg,644 +babel/locale-data/fr_GQ.dat,sha256=KwtyyIOmimlf-5-PEfzlJsztaM0CXx13IDZ68jdBYmo,607 +babel/locale-data/fr_HT.dat,sha256=JcnL7Nkbmlcm2GsiQIxnEcrB5VoeHuw0i9Io3ujI640,1891 +babel/locale-data/fr_KM.dat,sha256=wAFOqIYkxtC8fubh9W9hKEOw89O2qmIWGH-z8m4zC3Y,627 +babel/locale-data/fr_LU.dat,sha256=X48QX-y6C5HuwtOvBCYjYbG153us51yvLE66MTYk0g8,705 +babel/locale-data/fr_MA.dat,sha256=HAKjUmYRtiYg5bPuxpmWOiETyHSM2e3F0MJKyF7clRo,1295 +babel/locale-data/fr_MC.dat,sha256=xg-cqOKfvudfHpU0YEWJwKBYdPUUCyf04623NkzlPIA,644 +babel/locale-data/fr_MF.dat,sha256=y7K9sxuz2cfiM5mxj5HP3vUrFwUvxdNqhNXBmf7IrhQ,607 +babel/locale-data/fr_MG.dat,sha256=Ie_c98hiYNsbSsj3XgSZBv8vBsDJvGhPXD7cBBtzSSY,627 +babel/locale-data/fr_ML.dat,sha256=xtckB_DZJRbd9j-aekggaDo2wtG4K4Q2MeG3GBupvPM,1144 +babel/locale-data/fr_MQ.dat,sha256=taQ9FDzbIrvD6LgkraEmPVD_Un0unsJXkgkU1rH_718,644 +babel/locale-data/fr_MR.dat,sha256=PbXQYUYZlFU8URmI8OeZ6O7SAZZOx_TCL2bxs_ilAxI,1203 +babel/locale-data/fr_MU.dat,sha256=8fELzIJhgczk9JucdI1IMikCSnL5k8LCk1dS68q6opg,627 +babel/locale-data/fr_NC.dat,sha256=uMR2yxfoIpxTTmeGka9y7p0hnL3ch6F24SBvbDLrl8s,607 +babel/locale-data/fr_NE.dat,sha256=Qx4bREydf4aupR9SU7ZZt72aoVBHzDSdQs2bWnDTa_Y,607 +babel/locale-data/fr_PF.dat,sha256=nvJ7u7lByAKHmmz4HghF9_RaL5f3r2HF4YRyOMl8AKM,607 +babel/locale-data/fr_PM.dat,sha256=uorRfZ9XXMmL3UebWphyYLgmkIQaKgHBQfR2YDbKwWU,607 +babel/locale-data/fr_RE.dat,sha256=biR9VqHmRXkXuNVbp62tu5m0e47UOupW8xLHCseLfzs,1160 +babel/locale-data/fr_RW.dat,sha256=ya3hnx8I_04vbYN8UolfgHuDGYsxIMyIcC99i6hxgYI,627 +babel/locale-data/fr_SC.dat,sha256=_l7VQ0RTuCTNdRpKIb4_59iTeU5ACXYRTQ6MTrg2CHM,627 +babel/locale-data/fr_SN.dat,sha256=vUOjpMoha0OtOvwn70YnaafSUc_KwL2iwgQzeYWjH6o,1295 +babel/locale-data/fr_SY.dat,sha256=ue9tpydvd35pkJSa3spqtBLRhVNesB8cUHGsaV6_xhk,1265 +babel/locale-data/fr_TD.dat,sha256=txJhV9BQyT0mzhyO--I5U4Tg7aFJybP1mIFWGCnY9i8,1183 +babel/locale-data/fr_TG.dat,sha256=6wDKXhxKopznkvtYoOrdWSnoyG6uFIZBMfsVBaicOPI,607 +babel/locale-data/fr_TN.dat,sha256=Ezx2vA1loQOWkUJ7J4AJp9kkNOG1LZtht3aFo5RpelQ,1203 +babel/locale-data/fr_VU.dat,sha256=ZgQ6sA467JMwLeFluaFNu0K371_nB6U60xoVu8f5-Ag,1203 +babel/locale-data/fr_WF.dat,sha256=3cX5YsVQSnIWvtZUfWDvmS6j1ROn5n2y7NQhaMVd2BA,607 +babel/locale-data/fr_YT.dat,sha256=PlyHp0evijlgZNqwlqzGA3Vd5Sg-8YqAshn0qa2DOjk,607 +babel/locale-data/fur.dat,sha256=l7MnjsGyf6paBvhc2ptmAzs76ozv3dFl9ZOgKxsF5uE,35099 +babel/locale-data/fur_IT.dat,sha256=ejwvN9cxtQZwBkyXQAZpNYUzYFysgFxVgvXVKclQJRc,645 +babel/locale-data/fy.dat,sha256=G_LaZPkRr04wFmJ9kGuI_AeG52QymjLrPEVwbooJfWQ,110136 +babel/locale-data/fy_NL.dat,sha256=nhxKtw7-G25m_7b7tSRqOJnimegsXUPL4HUh-5A1ZaE,644 +babel/locale-data/ga.dat,sha256=pOfrnacrPyVVIj2mIVQ3Zx3xuLpsfiyQ5MkpQrcySQE,315363 +babel/locale-data/ga_IE.dat,sha256=EnRVPIh4VUjDfkiB9s02m2TP3vDnBaHlmKPgfW95AbI,644 +babel/locale-data/gd.dat,sha256=F7tTnoJpF_v73ELahGQKGS8JxkYOErOa_rRsoeFp7wY,286062 +babel/locale-data/gd_GB.dat,sha256=U0i4ctdbsfL5CoppA4cD5B98vSfsfz0liAAGQ0A81bY,644 +babel/locale-data/gl.dat,sha256=wD6yvNvveYtPHMlxgnPDol4n2w2sJxeE3nBJTLZZrT8,171486 +babel/locale-data/gl_ES.dat,sha256=QnkGnjNmjUPyyrlCJH_ERU8d2R7hoxwaOjgAnDsrr4k,644 +babel/locale-data/gsw.dat,sha256=qggIqARI0NVgVe3t1I6BqiufNHifSpVwZZKuEvr4Jh0,108107 +babel/locale-data/gsw_CH.dat,sha256=MYXozdZ0H_TO_tLGex3-9jMtxwA_ggC67YOtIL7HVP4,645 +babel/locale-data/gsw_FR.dat,sha256=Tqt7-0qu6jwWgmUJqpE-IlXVdxJWwSzl0CL5fFjvsrU,645 +babel/locale-data/gsw_LI.dat,sha256=z97gmIurjhls6p3KuVQaEjtVhKMN3Gb1N6DZFp4Z2X8,645 +babel/locale-data/gu.dat,sha256=EPgHFPlwI-8galuxjZfM_SKN30m5FxFNuHs94u0DN-8,240477 +babel/locale-data/gu_IN.dat,sha256=IKkO1dxbal-2z4ryCnkkfqnIq9ODz7mkcIA1qTMHqz0,649 +babel/locale-data/guz.dat,sha256=p_lLOLBUf11NUyX3s_ko99ZWvzznAfoLt8eZzfHzNVA,16068 +babel/locale-data/guz_KE.dat,sha256=mZPi0_BX7npx8EdqhpWJind4kbNhw7OYAAqu7lAo-H0,627 +babel/locale-data/gv.dat,sha256=0Dl1uBLCFLB7Az64CxnnqE4q8LIfSS0aPeAce-Vkxg8,4167 +babel/locale-data/gv_IM.dat,sha256=PABxweL3RYxPq4l55rn6LROd-5DVwKZ0pysu1Fuc6nI,625 +babel/locale-data/ha.dat,sha256=Qoz8fQprv6EosR4QwGBn7fWFrhclPtxFWuht4SWYpes,48450 +babel/locale-data/ha_GH.dat,sha256=w8NWdFj6TIKsC8eo2kWorNwgLj4sZYYi56ufkWfzySw,1206 +babel/locale-data/ha_NE.dat,sha256=J2YetP7_CacIyuOkdRf6iiqsJ5u7RdLH5n_4UHFUyUc,48142 +babel/locale-data/ha_NG.dat,sha256=jlzeaMkYJFe_hIpCbfJIZn98hJWUh7EAroefrGANiqg,607 +babel/locale-data/haw.dat,sha256=tXRuf2DBns0_OvMJt1wNoQkknnKXG97hHLqo1ixldzw,16127 +babel/locale-data/haw_US.dat,sha256=oOE5jDDNwZpoRIAsW9IH9z8O4dJvDkJGBcrhxqclTag,645 +babel/locale-data/he.dat,sha256=oz7v2LgGDOqaDQM7PX6N_YVZwKS-o1wu1GcrhKJ7wJU,261916 +babel/locale-data/he_IL.dat,sha256=t5wXL5OtYL02_veNJ6q8Jjl6tO_rvngx-k-ET_8Rke4,669 +babel/locale-data/hi.dat,sha256=qN2bvRPesEdxGLGXCXGGs8kXyqZFaFOs-OtOcwqtfK0,236155 +babel/locale-data/hi_IN.dat,sha256=_E26Mtk63x0S_sHBIDa0OH2l-671oKocLyKNbyOxhts,649 +babel/locale-data/hr.dat,sha256=XidD6n4-tSsjxwUu0Du6XuQmxSuY4EoH6FwOkIHzuoA,219476 +babel/locale-data/hr_BA.dat,sha256=vXmTmPdhw_3UK7Uq-DtS69o4hV1p9qh_ATXztcz-edI,1179 +babel/locale-data/hr_HR.dat,sha256=mOAx3MaOIHtwTdxBS2LqjvttQODxHsqDlUHHFolsE2Q,626 +babel/locale-data/hsb.dat,sha256=0WoqoPYp46ku2fxzcewLkyXAA3sjnCwkwwd59DKAAMU,179147 +babel/locale-data/hsb_DE.dat,sha256=AD3yaZypLO6rR-ljF8F3NnVJxCYYCxL9WTMUpagn6Qg,645 +babel/locale-data/hu.dat,sha256=bokUuxUQyJkPGvtaEesVa3BVOS0WZ6ZLREN8lb0YtNs,190503 +babel/locale-data/hu_HU.dat,sha256=HzJJfSglsxs-2KNMktuKpZ9yOMa6bfQ7eudxGh0orHk,644 +babel/locale-data/hy.dat,sha256=USOsG-90IoRXB-uvsQcCvwFskJS9MIuRcQ6ZYcO2tB4,207877 +babel/locale-data/hy_AM.dat,sha256=E4cN87TIR1DzbY8ih_H4bv-s6sC0EuAO2F1l3I7xKt4,626 +babel/locale-data/ia.dat,sha256=AbRm3U78keo7BHpisZrZhxe9zH3Ss846DrslrRo52z0,112799 +babel/locale-data/ia_001.dat,sha256=hlAa0qtQHkLi2GpqHmsEyROZth9cXgmAH1-4X3dGlR8,932 +babel/locale-data/id.dat,sha256=7FGnheeRztRFIuqveQHXi--GtIBJX5eGHZ3DiFEc9VQ,159525 +babel/locale-data/id_ID.dat,sha256=89br-9nXjDl_YMYlmEZsXntLYg3zVvhnWhPW7I8TsK4,626 +babel/locale-data/ig.dat,sha256=FJX1bguZTb9nzeGLvV0-rCCPMSJD4zNBB4vRovFhnDk,19576 +babel/locale-data/ig_NG.dat,sha256=zLSkkEBmGK6ao-gQHmpoLw0QGnpk9AIfpHuwReF436U,607 +babel/locale-data/ii.dat,sha256=jqN5Jw5vYev2U5prUjk7W8b2ZDcEqfRWFY3F6TRX03A,12609 +babel/locale-data/ii_CN.dat,sha256=kSOyQhGFX7GGD9QbOTiQ03AIwHKTlPiAA79bhpnd3RA,626 +babel/locale-data/is.dat,sha256=2wQuZCiLU8LVDm_etN6Q0Sb2uUgj1906stigLji9lvk,184858 +babel/locale-data/is_IS.dat,sha256=ggPurORP9YiNltg50UxSvBn-IxSLQkh1Z7vfuAIat2Q,644 +babel/locale-data/it.dat,sha256=kZer8Sb4ged3pz5ZqH0NxxTblZGQih9UTQMKdzjHjt4,186750 +babel/locale-data/it_CH.dat,sha256=x6SYxtwSWqZ7uK-WH7KnW9JQXFt7vjDVWLeRdxNMOX0,2794 +babel/locale-data/it_IT.dat,sha256=69HQXaQ0Hwq5FyY_HFQer2_TaNRK1cBkfwcQoOgD3Q4,644 +babel/locale-data/it_SM.dat,sha256=-_kXYm4nypjxW8slleVL6npDk4swP0VuVVyDSZzusQA,644 +babel/locale-data/it_VA.dat,sha256=bRLH_PNEqsBmiEeJ4M2QrT7m-lxfJ5a0mtzpOm4wUnY,644 +babel/locale-data/ja.dat,sha256=44T77KPAAaPXTo2vWGPev9w8uSgBjuxH0NdE3Rw6zV0,195480 +babel/locale-data/ja_JP.dat,sha256=_CcStROLo6tiJ1AUoPpL9-zhxd3dWAcgH4UxcKW5p5k,626 +babel/locale-data/jgo.dat,sha256=JHgE9Kgm8gcKZXkRFnBOZL8SWmVefD76VPN1EM660yQ,12649 +babel/locale-data/jgo_CM.dat,sha256=EA-EL_vITEcScepi8XtIZp9SfEo8yNtP8mjMDRmurrk,627 +babel/locale-data/jmc.dat,sha256=ACdBwmyvLhD-wfTDx5C9BkKiLgAwYi7MXIZjWdnqDs4,16120 +babel/locale-data/jmc_TZ.dat,sha256=YEigUlJSIsg-2I3KAGmE_7yjVxXeavgHrzATPawzjZ0,608 +babel/locale-data/jv.dat,sha256=bugx3sMto8K-q6UzIdI9_ojxBU4freLUOFll-ZETNrk,127658 +babel/locale-data/jv_ID.dat,sha256=jqAFeLVxdIWZRCImM11zmQxFVN-dTNSHqkJjMd0fL9Q,626 +babel/locale-data/ka.dat,sha256=PMXkpUZaQ_RakmWrSOapK6BVvijldgDvMO5S5sBzZBM,256694 +babel/locale-data/ka_GE.dat,sha256=Nexqnu44i7cXrHB0jlI_229QKiNr_k1TSvCRgb_Zs14,626 +babel/locale-data/kab.dat,sha256=Y9PhkPEATNfpisfItTMvOrgnz4QOSJKONa2Xiz4u59k,134995 +babel/locale-data/kab_DZ.dat,sha256=7hA6qIdD8G5tnw61QlzIlnS5U__261v3XMZT2Wr93X4,670 +babel/locale-data/kam.dat,sha256=bcQ2hEj3o40MkIoAuecyMBXwN2xCRvPoalAKBmbVDYM,16227 +babel/locale-data/kam_KE.dat,sha256=1FV6VA2MmH4ZgMbVnb24DpSO3XAYfQzmKxz3z6CBAjg,627 +babel/locale-data/kde.dat,sha256=f-939iTq5qzLyicPxbSOfbEd5zUzqNxVOJsIP-2sGoE,16527 +babel/locale-data/kde_TZ.dat,sha256=BNkDCTvTEt2R4XEQbKwuNKe_n132hEdXd_YaYunBcL8,608 +babel/locale-data/kea.dat,sha256=XBJTSBu8-1l_psgKfETjGEq29Q-l2mAd-XqgpKt01FE,71368 +babel/locale-data/kea_CV.dat,sha256=vucrSfeOhAM0D2XqJADgq1aQj73Y83CxF-5dBMPjT0g,608 +babel/locale-data/khq.dat,sha256=xIIW5VdIykrnI2yYkhlJzskHmJtESjaDC3GMB9PlI8g,15992 +babel/locale-data/khq_ML.dat,sha256=KdvooU50jIQ2C4KfGTuRStG_55DFqJxRh5UtULTAQW4,608 +babel/locale-data/ki.dat,sha256=zVuht0vDiuwqzW0ifSGnmQ0UvWm3RyyWCA9zzEFrY5M,16175 +babel/locale-data/ki_KE.dat,sha256=JWVi4WSFtN_grtZ1IzyNQlSb1tzHGg0UxFV9MdcONUg,626 +babel/locale-data/kk.dat,sha256=PAr_CVdN5k0hKuGNK9hz6vVVdPqcfLe03yGEL6djPr0,204884 +babel/locale-data/kk_KZ.dat,sha256=XSbqJqUhzr0pVwmcdJJPydCzCawHbVeP0u4DNA_jhXE,626 +babel/locale-data/kkj.dat,sha256=fNklRbLpQSyan1SPutZRhWBypx8TWYjc1j3UIvwYTwk,4907 +babel/locale-data/kkj_CM.dat,sha256=zBage_7dyPY1TQoWCrfV2TPdlDLYTSAFmoj25CyvALs,627 +babel/locale-data/kl.dat,sha256=ARiEanVrxmHJWpZoDHL3-5bDLLpnc9nS76f7Kp0EqBY,58262 +babel/locale-data/kl_GL.dat,sha256=U8dA1dcOZa88aDh_Za4Q_cuO-QkjQON-623nTSwvzJo,607 +babel/locale-data/kln.dat,sha256=0KelxA56bvhWs7VzcNIzxcJg9wg6P2bfOkZf1ksltd8,18062 +babel/locale-data/kln_KE.dat,sha256=pA_rLiD4YhTe_VqEzm1jUNqr-p3oC_RW2Q2JCptdo5A,627 +babel/locale-data/km.dat,sha256=I0D0wPvYR6vlyAqaPiNmS3du1xnjKYALBalmsisoGZU,199952 +babel/locale-data/km_KH.dat,sha256=pJZh2Z4Gna9qJRlOE-yQUcTXYMJLxXW2GiA41f911PQ,626 +babel/locale-data/kn.dat,sha256=rI2BegUtzlVb2hNxE99hBpeFtNzVHoOdhVYQsh_hllU,254243 +babel/locale-data/kn_IN.dat,sha256=O1SMSp5lyz45vrJD6qZOGdUJvk8sA_pEMpfZ7GT1Hcg,649 +babel/locale-data/ko.dat,sha256=3ewk1I7Tm2zL68NIomcXz-j08yxgg_sILYHgKkn46n8,168189 +babel/locale-data/ko_KP.dat,sha256=Irg8tFOdTqkLtdsJK-whgWIl6CYpzQP5RQSBXFt5lzA,807 +babel/locale-data/ko_KR.dat,sha256=iWEE9J7ywo-4HLyDz5nRWPCIBwxMXN-g8ZVluEgP254,626 +babel/locale-data/kok.dat,sha256=rzStfhkGwa7NIRzhCOntT-evQiacjkBTH3I7F24Pc8A,82765 +babel/locale-data/kok_IN.dat,sha256=RxPYjH26AlDPH57oj9jYulR-b4PZST3ejoheHedo2Bw,650 +babel/locale-data/ks.dat,sha256=P0rhXLRGAZk9ltGxyomMfGhleHVGmt0Ah_mWMYTpkbA,102488 +babel/locale-data/ks_IN.dat,sha256=FbxiVs_-h7d9TExduoM5qzHoWM6j60ymcgK1_2sjGKg,649 +babel/locale-data/ksb.dat,sha256=R8_UFjgnrqwE8sBhSdvxmmE3pjBkNJKSbjW0xvc4eQM,16095 +babel/locale-data/ksb_TZ.dat,sha256=P6Ek580ObjaXqb-Niin4LoZm-eBCTCZ_NDULLzMeC8Q,608 +babel/locale-data/ksf.dat,sha256=XAtIX8SXZvHlRlMgRfFKFDaQlnVN1AuK2LjxTWQY260,16569 +babel/locale-data/ksf_CM.dat,sha256=_uUJd2qMvazQrVHTPOtyXTDlMvbuoISj1F1s4EH0DOc,627 +babel/locale-data/ksh.dat,sha256=7CaMQbLiUj_7eMvTq8m0mFmYr7kHfthLgCZerVssnNo,88940 +babel/locale-data/ksh_DE.dat,sha256=VH8heoZOq8IaxYfsA9eB4CBxb_M1bSvV4pdJ8Z2E-EQ,645 +babel/locale-data/ku.dat,sha256=Dw_Ht0WncxRqQySXLb0an4q7FaRG4oomDNNzGp3XxWY,25250 +babel/locale-data/ku_TR.dat,sha256=M_qtiMFC7haUg8UTe3AZ1i-uMY8hzxjtpUgLM2kEYA0,626 +babel/locale-data/kw.dat,sha256=xYQtQdmpCyUVtWGhBb714h-X_IK6ImM_0yPXyYerZJ0,7263 +babel/locale-data/kw_GB.dat,sha256=-Xb3hs90VWg54TdwSerXDq3lxuOc-Y_wMyfdt9i-agA,644 +babel/locale-data/ky.dat,sha256=xnyKiVz_OQ00SVC0ab6tF2lsjZOoLDXI9vKYaGKtcKc,198490 +babel/locale-data/ky_KG.dat,sha256=KjjmZqSgzsCGALW0y1fiURGMVa7qmafuit7QKHfJEPk,626 +babel/locale-data/lag.dat,sha256=OZ2DjZQ-J3Gn8NWaF8byI4KmSO0gxo6ynBR63oK89rI,17194 +babel/locale-data/lag_TZ.dat,sha256=SCwIMT10n8D1ekVMqg6nAcRXYq6MJXNeWOsvVX8IkOw,608 +babel/locale-data/lb.dat,sha256=FhQHzrbTMvwFSRCjlRF6Y6FRqDKKiXqhtJiP2x-sJMA,164416 +babel/locale-data/lb_LU.dat,sha256=olHjVPCdBZkqm868qqx4i65Wwx6AOPe3lINW6aJAbeA,644 +babel/locale-data/lg.dat,sha256=Tk3WfKfVjXIw1gCFo5hlyNuxhuqYdGe5a2jkjPDVYyU,16487 +babel/locale-data/lg_UG.dat,sha256=qCr6389TVehgSTUVXvzatoLF1e022IThG7RjKuJpACE,607 +babel/locale-data/lkt.dat,sha256=Av8k434perqFI_6BRP6EPkvchiPN_nO2CklTNVE80FQ,12787 +babel/locale-data/lkt_US.dat,sha256=Ri8yk6RWC_94pO6kLYqlpBv03WiLu1rRWfmaNDI6IOY,645 +babel/locale-data/ln.dat,sha256=yQJCBmMpVevkIOBOUGvrMkp_QhqUwdk7inbzdAEAdBY,25944 +babel/locale-data/ln_AO.dat,sha256=gEc6KBMCLUvrJkXrbO9QBF21m4dzmbA30ThJTD0IhLo,627 +babel/locale-data/ln_CD.dat,sha256=wd8KeWpyBgRxFdZOzPRq6rPq8E2BhpWr9-6sZpT4VuM,607 +babel/locale-data/ln_CF.dat,sha256=Uz79NMUs8cX3JzPyKMf4u9_byQhjbl1wSGZsb0atVI4,607 +babel/locale-data/ln_CG.dat,sha256=9Fq944Hcd7zuj_TDXg8RiHhLVlhxrchJSSD1TwsImbE,607 +babel/locale-data/lo.dat,sha256=gDnF_vrRlupEleLl6cLJAjSHOQrYZ_LXQgllbgc6hmI,217931 +babel/locale-data/lo_LA.dat,sha256=gWqRr8oGNYTNVtYlY9w5uqi9JSgzDujHxvaJZW1V3T4,626 +babel/locale-data/lrc.dat,sha256=YdYWaAoqdqGpAmmjgdnIWYdVSVT-swZUtdX_3TOSBwU,19108 +babel/locale-data/lrc_IQ.dat,sha256=mjDNrH0bJqU2uNIPr4W13ychqJLfa89zSoHyAflcM50,1246 +babel/locale-data/lrc_IR.dat,sha256=htOM82RqZN8DsREw0zziwiwEf1Q2wRwzN4O1EPSUucs,670 +babel/locale-data/lt.dat,sha256=iVizf8vz2hHKOs3M_WJsRUecMBMtgfJMAvXYWnBYdjc,281388 +babel/locale-data/lt_LT.dat,sha256=xcQ9WNnIdoq-pTrJDlo6J2SKztAPEXxOK8xN09nbqsQ,644 +babel/locale-data/lu.dat,sha256=iaOgjHXhBbxqkaMZfnIjZ9yE_ivOogLi_pIltMav4DU,15940 +babel/locale-data/lu_CD.dat,sha256=kt1Ck2JozYYmkDCjbBbDR4U1PqKsmVHCQQqQViFWoDE,607 +babel/locale-data/luo.dat,sha256=OMmTjOcNBykKCDRQkrlMnvTXd4TTWph1KNFEYeOp5BQ,15937 +babel/locale-data/luo_KE.dat,sha256=mkNhvC4s9Ay-_oOw0UAbozSEtyhwktZb4IEXqHb7R6E,627 +babel/locale-data/luy.dat,sha256=AivHUzk0jfpYWCd9XP-Vgn5SplZZ3Ruk7rVJTXhEqoY,15911 +babel/locale-data/luy_KE.dat,sha256=GQ1Tripph0Kv8EWkDmbUkBoGNRu_Z6_nLraREYDOJIg,627 +babel/locale-data/lv.dat,sha256=aYbZgH9misbuEu8xd2OPukaAcwmqJZkQjMB9I2TRq60,211983 +babel/locale-data/lv_LV.dat,sha256=RTXhOKiDi25vJqvlFjMfx4ipS6IAz9NOsPfvqDA1xHw,626 +babel/locale-data/mas.dat,sha256=GrT-spd-MlSJf_rY-oBo322W_GHL4BGR5-b9WAWhIJw,17349 +babel/locale-data/mas_KE.dat,sha256=dvwGyYsRBTACJmRkHe9OqHdBcE7-_3JETAE5DcZxyOw,627 +babel/locale-data/mas_TZ.dat,sha256=_awVxs_AqCJK_nFGOWUV8CfM0wkBTOyTxp1wf2wx38I,629 +babel/locale-data/mer.dat,sha256=dfZLAHiehC0aQF-BXQRnSXnJlOkC0R5HU8uMpPqBMDk,16140 +babel/locale-data/mer_KE.dat,sha256=VAEMZzP_zylJ_jGnwsvys8iwOWMxpnFD2FM73GEhVFg,627 +babel/locale-data/mfe.dat,sha256=f7Hwcx7ufQhlyaXWkshVNcgbh6RbSWBa_YtQhy2nXDo,15169 +babel/locale-data/mfe_MU.dat,sha256=fXUaGJO-Fk72_qgRVSQXN_4lCMNiOfLbtcrEMoc3Z-E,608 +babel/locale-data/mg.dat,sha256=7bWB6HH3HhS2boHUg3Fk6mCT0hTOz3Ev3X9KfXZbEMA,23602 +babel/locale-data/mg_MG.dat,sha256=Nvn9k4UDSq5ryL5pXMItBWimjPbby559_heRzkodQOQ,607 +babel/locale-data/mgh.dat,sha256=sBxM1pG-ctAdiJ9hl4Hag3hN31bkQBRC2t6R7YIqnF8,10532 +babel/locale-data/mgh_MZ.dat,sha256=bj3fkT73aicgmEW0wNj8Tl1fMg2raeXoMaC3u-CobtM,627 +babel/locale-data/mgo.dat,sha256=1YpgmLHiJWcmqQvNcxpOIfKfOQ1etEAMDxkQi-psLm0,8266 +babel/locale-data/mgo_CM.dat,sha256=PF1Fm-f95o7Lu9qG5FhDJCaHBmXmV1yCuacaLuiJcUY,627 +babel/locale-data/mi.dat,sha256=P7-TssxweXKOKPlQy2VcGNMmiFVVVe7LP3hv6aWtiOk,19166 +babel/locale-data/mi_NZ.dat,sha256=d_K1LO5OIXVG8bHH1WSIVFWt7mqjxrk9T1FUY7Kr3ko,626 +babel/locale-data/mk.dat,sha256=n5yeB-nRq2EjOqcfESsgXiuuspwby7SS1l79DazdC3g,230242 +babel/locale-data/mk_MK.dat,sha256=GT9GwufrdgHTNvJlilW5X73eWe7x8nPNu0orikX-q6s,626 +babel/locale-data/ml.dat,sha256=TEf5l5IQe9_eEMqM81zevTtjxYnTU4ri4xdIrUIUfas,282791 +babel/locale-data/ml_IN.dat,sha256=v0Q-8P2QVsISoVj0c7_SPRfv20HgUV7UpMkkyDXbr_4,649 +babel/locale-data/mn.dat,sha256=bRv5302YnTCpr3_mHntO_5niulNYjgAIpzQbE1gyA4U,196664 +babel/locale-data/mn_MN.dat,sha256=BU1qpiwX_J5qR1sdIZzJWIWhA2vVo4R85xQCMO53zeA,626 +babel/locale-data/mr.dat,sha256=pTtl6V-f6FrjTSAdZRDI6uxb83NjwdyrOswfplw6GKU,242765 +babel/locale-data/mr_IN.dat,sha256=aef7VFbvFenw6pJg7ZDW9vV52b9PENWqbIZ3ttetPiU,649 +babel/locale-data/ms.dat,sha256=qTclc9o5XP5CTlDz3RDep-QXIKYCjSKcseELT9H53XQ,141460 +babel/locale-data/ms_BN.dat,sha256=nmT_ZUqySFofHKPBOeB79OXLF0_rN9_1l7Ene73Tovs,1275 +babel/locale-data/ms_MY.dat,sha256=V8RAiIYkRQeK0zZmGrMX_kYD81Y2jdyK8UkwWficNrw,626 +babel/locale-data/ms_SG.dat,sha256=fN3UCsrV9k3HFZuIlgDV1Zx84C9EVpyOcGmy8HTM9VY,645 +babel/locale-data/mt.dat,sha256=NvIc3Em2gHEQOmJNNyG4BlPFKayvF1oWOiXTyJKMfUI,99982 +babel/locale-data/mt_MT.dat,sha256=XbWtjBItwBOzQ9dfXffkG7ip2cQtJUr_uOLXc3nP-Y8,626 +babel/locale-data/mua.dat,sha256=5u-bhk5Yqn4Ol-rPRUa440aVFzOb-ehzlYykaVALBQ4,16624 +babel/locale-data/mua_CM.dat,sha256=jU0ODX7XWhTxSHF4Zr1r6qX4F6GMTIhfojNXPlQeVzU,627 +babel/locale-data/my.dat,sha256=0UdwyYsfpnde3_TNEl0ayDs42Zgg-QrRyuLehT1EJ9I,207004 +babel/locale-data/my_MM.dat,sha256=Bzi2JR91HdxPj-Z0eaOLHk8UK7tTh3YVb65GdioGkgY,626 +babel/locale-data/mzn.dat,sha256=vwdqMcpOLkqkInioKWteaAVTJj-zYLNNI27VcZA6S30,65508 +babel/locale-data/mzn_IR.dat,sha256=Sx-axN08t4qUK6gkdZJI9gE_od94O9yBOl7R-xydR74,670 +babel/locale-data/naq.dat,sha256=vBYqqBMEGqzIQx5SW1A_P3rbt7IAQAHBaBFc6GpSYWM,16670 +babel/locale-data/naq_NA.dat,sha256=SVEYeRqX3deeAluOQ37gwHfm5cGLcPwiNtIKsGLaRlo,608 +babel/locale-data/nb.dat,sha256=7TBrzSoRB76QUbA2V1dSgapFQhNxRQ02oxXxdnQeZxk,206686 +babel/locale-data/nb_NO.dat,sha256=La0HFi0QPvkipslkGYJWyrvR6qyZSrVf3rxyFy5MAeM,644 +babel/locale-data/nb_SJ.dat,sha256=kfp9zsPvpbuegd2KdJl4WO2eppXw-ybSBDrj_5-Sepw,625 +babel/locale-data/nd.dat,sha256=-x3R97oZR6KBTEa7fIudTqIzEGLVO4ErMabTlbH5cqE,16364 +babel/locale-data/nd_ZW.dat,sha256=VVVYbvzg8pvKDAUDUfCOBwVjgSqR8kRNZa3OvIMolxE,626 +babel/locale-data/nds.dat,sha256=3Atq1yjM2MyTIsdO0hTSNHuRa-3XrXHsctO9BXE7QA4,50653 +babel/locale-data/nds_DE.dat,sha256=g4IgcJMM4s8ZDbkks7VetJ60Fv_SDd2Z7KYNtt9EwKA,645 +babel/locale-data/nds_NL.dat,sha256=LwfMpT7rOaqwl7fz_0H_rnApfhJpG_WG29HY5fAW4vA,645 +babel/locale-data/ne.dat,sha256=CDG36HQ3kKDZcPo1_aTwmnLqLKHg7J-tx-twaveRRCo,243069 +babel/locale-data/ne_IN.dat,sha256=Ty0qfdsX3laEfDmVosUYb1nO4lbPuSwUiUklpZ4CivU,1283 +babel/locale-data/ne_NP.dat,sha256=_aOsODQJSx-2Qp518aT1E7U512mda8-coqIfgRf-pjs,626 +babel/locale-data/nl.dat,sha256=xb8ziI2bEfydTIXYg5xpiF4EzxyKe2_9-BHzDKpBbfw,195437 +babel/locale-data/nl_AW.dat,sha256=lWaFk0qMUWo_XmeHAOHypINaoLCgBOytw5qh02PjR2g,629 +babel/locale-data/nl_BE.dat,sha256=F20Z12nxs501ipOGOVYEWgoMwOM37I2l__E-nytDl9Y,1853 +babel/locale-data/nl_BQ.dat,sha256=NjO4GFRK0oeyv6-hC5I5TWijntLMaQkvTTeMTxeo2go,626 +babel/locale-data/nl_CW.dat,sha256=a6rSPV1TGf0Iqj4WPtNdCMturkN_acf-wTdGwTjjcmo,629 +babel/locale-data/nl_NL.dat,sha256=Z5xxMZmUPdO3at7G0RRR1jvz0nNRAErbp-u7EhLRJ9Q,644 +babel/locale-data/nl_SR.dat,sha256=6KsySLDzDt8H_rmCVslpGWYLln01xoKdnjkga1MgbY8,687 +babel/locale-data/nl_SX.dat,sha256=ax_nqnWCIEWLBF_OAZApdqoZxHGmrrpc9OXt_2GVKTM,629 +babel/locale-data/nmg.dat,sha256=JHmpf4ywpHJH5X8pM-aIQeKqXICoMuvUgH6LfvYA95U,16241 +babel/locale-data/nmg_CM.dat,sha256=kfA43rC6-mmtCEQ4KahPBUyi1SLoxOm2_k557QI7Z-I,627 +babel/locale-data/nn.dat,sha256=3xzcFn_sSkfJdOutUmKmMXnuNvpUbqEfsmYUCR44V3o,179805 +babel/locale-data/nn_NO.dat,sha256=D9aXKaMt_90RbYBc98NvfkVZ_fYV2qjg0JgsEHKXpE4,644 +babel/locale-data/nnh.dat,sha256=iJ8maFwvQWWF2z83wPNeDWCjtXJkb-6BdqWTNe8S4vU,6787 +babel/locale-data/nnh_CM.dat,sha256=iCo3O7kW-Lcsr5g1Bv5bfLm8FeKXt68i6aq0Qqa1cM8,627 +babel/locale-data/nus.dat,sha256=cdQBVX2H5tuWRZUTUORxThKZb8ZA79CFis8Kuetgjxo,9183 +babel/locale-data/nus_SS.dat,sha256=EVZv-VLRZIXfmHowg2c8xkeZcrFC6DWKbb9aYzuykl0,608 +babel/locale-data/nyn.dat,sha256=glQUHKEgHma36QtPbPimKIoFNkaJflVPo6r9zpx15eI,16328 +babel/locale-data/nyn_UG.dat,sha256=m8ciJ8wJ7Ss0sZEwo7hLys0p8X9Tc3iaPAIjKEvvzak,608 +babel/locale-data/om.dat,sha256=C_dYC8Os3xZXrR2l_U3HBivYw6aidjIq7ROnCYtY9Ds,16609 +babel/locale-data/om_ET.dat,sha256=Og-EdTbZ_HAokrmbPFbOUuekkmh7ch2DKDZctUISASU,626 +babel/locale-data/om_KE.dat,sha256=2ANK67n7vvRAps6fTVn15wa2yYnz_2yy303bozguDSo,1584 +babel/locale-data/or.dat,sha256=hN4VKCkslHZrQ7ti_sU0z_NiZoiEPkYbmmg6wcKMGkw,237035 +babel/locale-data/or_IN.dat,sha256=Dw9dokygCMjypNY4sndqyvOnTFsaqW7pq_UPLLHqtTY,649 +babel/locale-data/os.dat,sha256=8dpK1HvfjQPT978fBRPiuk1wRtlSp4N4lcyPGbxy3lQ,17648 +babel/locale-data/os_GE.dat,sha256=hSmyrOpT0abvV17Q5Myoar7UMieDCmckpszmuRQJxfQ,626 +babel/locale-data/os_RU.dat,sha256=HRXdDl4XLKTYX63EpDJYq556qySfi_W9q_qzuQL3J0A,686 +babel/locale-data/pa.dat,sha256=kZGBG23ACj27nI-LNf02A3Vqw_KFvk5D2lQHH0G6UXk,237090 +babel/locale-data/pa_Arab.dat,sha256=Xe20lZpLm3stLXFQTLIq5PKUD7TvC4ORuMzRZiONqJA,3967 +babel/locale-data/pa_Arab_PK.dat,sha256=_VUdc706YAQSu8NoUCuvHosko2l8GtwMdvTXQkuh8oY,626 +babel/locale-data/pa_Guru.dat,sha256=RemwK9VZViIqmbpdFqso7S9xD55LITJbW3sewM9oRUQ,1267 +babel/locale-data/pa_Guru_IN.dat,sha256=S0ctHEKJ7RxJFlvLs31Yflmn3-RwlWb_zc_gCyVYzrA,649 +babel/locale-data/pl.dat,sha256=kbqTerSZmugR4oZtKNUgvAbQFeRSK3uk5QfgvD32Xk4,228149 +babel/locale-data/pl_PL.dat,sha256=3aqxuKmM1y3xnRC3iG6DRzNquFCjOhSe-USlt0jKY0I,644 +babel/locale-data/prg.dat,sha256=cvJ-ohjlFKFxS89iBJxru6s2seI9xnB6GLQor53PfjU,20221 +babel/locale-data/prg_001.dat,sha256=1SYLtROetU-QpYhHxtQWxcY5wNmVnqFxk-J1-3BLYoI,1585 +babel/locale-data/ps.dat,sha256=QNalPguMKEGOci07W4rvZ5iHYDPK_eziWOIvKT_Aq7k,142340 +babel/locale-data/ps_AF.dat,sha256=SvjOBEFhyxjFIfXoaTObaS7vNyKTC5-Q1qkOetchsWw,669 +babel/locale-data/ps_PK.dat,sha256=bpR4HtSC8OlQ7_J0bMbZLFV7VwPzthm93offnsFF7Vs,7979 +babel/locale-data/pt.dat,sha256=lrMOGnflLfsboEb198U1WjHI--FEF92bvn5hcr2bKqI,185356 +babel/locale-data/pt_AO.dat,sha256=QxTSJ_njOOr9W8_wFKMefA5_X_vLLmbTyTShIPr2TYk,1013 +babel/locale-data/pt_BR.dat,sha256=QIoCEwQ4MVDEptP3uYSfuxaUg8LM6Udx_j0u5Pmsscs,626 +babel/locale-data/pt_CH.dat,sha256=1vlk7Moo0YazBGXzCnhzraOQ3na7vMkhiacGprATTPo,644 +babel/locale-data/pt_CV.dat,sha256=TuFHk3Ps62TlH8OXoA0I5n20WXMtJwRkTX9F-ifPPSI,1038 +babel/locale-data/pt_GQ.dat,sha256=n0jtozbmyMELo3v12qZh0bLpOXkl5Yo3-SaoNQzB76o,607 +babel/locale-data/pt_GW.dat,sha256=kzmvupn16Ryy-Ad021sANJicQ405NWoLR3W0hYNgEHY,993 +babel/locale-data/pt_LU.dat,sha256=2PCOkLE4Fpb5rr0S2yGHzWr02NXkEU1DFq9cb3jYPEk,663 +babel/locale-data/pt_MO.dat,sha256=ASQA8OF0aPTEMnnUxPgOuTbVb-Viwruz72TsvnJXhCQ,1610 +babel/locale-data/pt_MZ.dat,sha256=OyyIvwlWPVXczu-1QDtFD9x3TCgbuWVmSfabMU50XIU,1033 +babel/locale-data/pt_PT.dat,sha256=JY6J734UdlBda2-mpgdbopkJ29vH6yRCa3Drs8TXlLY,95387 +babel/locale-data/pt_ST.dat,sha256=XFGnYoCcv12C1dIPHpxn_yWF9TO42Ze6HqHaVX6lJXs,1013 +babel/locale-data/pt_TL.dat,sha256=Kjoj1JE5C_EsdKbWricnpKTUirMnSI2bVcljIDUEtXg,993 +babel/locale-data/qu.dat,sha256=UUdFgTeC0zefb0AXhnHREXZ_bRxMx4L3gZERS2byRFA,63678 +babel/locale-data/qu_BO.dat,sha256=VSRgVtWlTouId0GarX_ZO5CZYAY7HulYTZQzsrcKpC4,854 +babel/locale-data/qu_EC.dat,sha256=_Ftb9lPp9eFysmiuzd0LXl_da6UfcDdAH9iK4_OfceM,828 +babel/locale-data/qu_PE.dat,sha256=u7IelNiSOLZ2gSHzXBLQloraDKQK6rn-Gj3PDrpg7v4,626 +babel/locale-data/rm.dat,sha256=6VF_YfKkH4zszgClWGpCutJLsTtPzjz8YqqIIbN0cIs,67703 +babel/locale-data/rm_CH.dat,sha256=nM16gH9DyFuCWJvzmeUYDeI935TZvG9PeHlnJtwsDJ0,644 +babel/locale-data/rn.dat,sha256=fM0YjSf63MqPnOGaoSAiJ8pGuNLeS89EbA1uvZffPD8,16834 +babel/locale-data/rn_BI.dat,sha256=ysozkQZjd7MbdqQR0Appe1Z0pWEI4Uvt2sOjh1Bm33g,607 +babel/locale-data/ro.dat,sha256=0F4YaitEpsSgSlFL-spgysyo8PD5nWzldC3JRPR5iKY,214600 +babel/locale-data/ro_MD.dat,sha256=BPMaAQ0BsOZkbpcwrDCR1dAYiIpEoq-d2QttEiTxLsU,3468 +babel/locale-data/ro_RO.dat,sha256=YGMdthocAJEwwzNv1M1FpXmx1npnT1o6QEj6AIUJA1g,626 +babel/locale-data/rof.dat,sha256=_9WDdCvXcsispR_Jc_bd29GLdco-0vv8BGBAuM_x45c,16222 +babel/locale-data/rof_TZ.dat,sha256=SSYWLiv1Okua7sYRhIAvr9NDMLo-s0R9fVOnh380Hu8,608 +babel/locale-data/root.dat,sha256=3EUwOJF9fGXgbeqm_oB0m72qKAi4P2KtMhTX0tgfCno,39989 +babel/locale-data/ru.dat,sha256=4CN7wom0dRoNu6rl9v1J0obCQ_hqEhFFGogsD11Wotg,297015 +babel/locale-data/ru_BY.dat,sha256=lucENheWCnm6l2kFWx5iiOQ90RnN-a3SrbbpSMEH23c,667 +babel/locale-data/ru_KG.dat,sha256=q9H-sCDazhmCcmFn-QlJ1eBwVnXaGmqTFZT4W-9ssHs,650 +babel/locale-data/ru_KZ.dat,sha256=zOoq7g7gEqS5ScuiwnNbhFcCYXEBVQ45hoH7x-SKlAs,647 +babel/locale-data/ru_MD.dat,sha256=XQc4LwXm3XLmevt8NRZEVQ5tj03YVkeeHdVb_mzH0wI,645 +babel/locale-data/ru_RU.dat,sha256=UZR5Khv2hLp9BZbGcy8Fxg_luQDS8dgxQqXnmrYOGKs,644 +babel/locale-data/ru_UA.dat,sha256=OVdhP1Te2SUv0a46Hp1FTLUVwBTbNyinQDy1RLLE1ko,1765 +babel/locale-data/rw.dat,sha256=gah0pwvkYNXB7CP42-1B5CMFxjVNjNKBXPBjdPumHuw,16234 +babel/locale-data/rw_RW.dat,sha256=lw9-N-Bp6o2kOraBdnpqbHXS9S06vRGoRnTi13_-IDE,607 +babel/locale-data/rwk.dat,sha256=UK-qdX5VjbSXmZ1sWLpUggwyEbekDq1nSWMqwgYdPYw,16109 +babel/locale-data/rwk_TZ.dat,sha256=qOwICQ4gh7wiNjyNsrZXTCxpEgydLb3KLC-aGSaTdiw,608 +babel/locale-data/sah.dat,sha256=qTZCEQCAO_up0P4P-GCMC_Kzofutc-1oSBqmetTCvqg,47992 +babel/locale-data/sah_RU.dat,sha256=fOJxjNB4yurKvi_Nf5qd_vZ6Ph0P5TreDKysCfkMkAM,645 +babel/locale-data/saq.dat,sha256=_DqapaXzmUP8SkniGLNOgMtUOCixaqY-2P6V6BUbHXg,16507 +babel/locale-data/saq_KE.dat,sha256=lO5cOVp83wj9tFYBb-OT_EoKnvI81obyEODz__dalok,627 +babel/locale-data/sbp.dat,sha256=B8DxXa27HxQ03WmXTxDWsUPpr2Xy66X5utdQ0lTjmQo,16532 +babel/locale-data/sbp_TZ.dat,sha256=Ck2aub_MmSbx5Ixgs3RxF7dJU90nUufYP7cj1eq1eeQ,608 +babel/locale-data/sd.dat,sha256=TyFszAcJ_BrtUiPW3HO8I3ZCQDGJJt5lukJMGWeb1uo,188120 +babel/locale-data/sd_PK.dat,sha256=tYuzmCxNr02-FV1pgkN94YAvn67EdkluBYaUTr9S3A4,626 +babel/locale-data/se.dat,sha256=cWzX95vS957wbyZvvNNrhu1QzUcSBMCH1RVy7buxSsI,72355 +babel/locale-data/se_FI.dat,sha256=ASUyHt_ysdjsBx4jb2V3d7TFnnCrvKrAYOwvYHSdvfE,46541 +babel/locale-data/se_NO.dat,sha256=9fc96ChjeOE9FGup8pr5S_zGqirgrITKl28Qz0lHu6M,644 +babel/locale-data/se_SE.dat,sha256=pVfO3I0swSWy7w8Jb0V-om95t4fWuDWB1YdE_Lr-Gf4,685 +babel/locale-data/seh.dat,sha256=LsaTKr3qRHEBZWqNElUVVxIS46jCkcyaoYn0d8lSq0A,15963 +babel/locale-data/seh_MZ.dat,sha256=0diTZF6N0YXrV0kLWl9RY-0JXNWQ3uEToZEbCt6fOq4,627 +babel/locale-data/ses.dat,sha256=dnGrI4ChN-RlvGcNMK8pcMGNgNuDzfsx8XF7RmAhz2Y,16051 +babel/locale-data/ses_ML.dat,sha256=9Ske0UaoHZbpE8jhXhk-VL3HhIhmXnsIbcNQZeO08Qk,608 +babel/locale-data/sg.dat,sha256=NLfT67esLoB5WoBHWk7E0TmROwJQ6VXil5yiqzOkUag,16688 +babel/locale-data/sg_CF.dat,sha256=7BfhzANJ38-LvzQu3NSxTdhJtQ4kTwcKQAkt_NOWHtM,607 +babel/locale-data/shi.dat,sha256=3VVluM_KYVrNoROBAAs3xUORYRV3p9-Whq9pBS-slDw,22106 +babel/locale-data/shi_Latn.dat,sha256=uJs5g6f9GlIzLrzmtgx1rjgY9xWypEsYciU0lTEbtiY,15670 +babel/locale-data/shi_Latn_MA.dat,sha256=Erhb65aX0qGNECddJ7M8fKy2zjJlOxXSKx42GdQdf0A,608 +babel/locale-data/shi_Tfng.dat,sha256=N4AQ70s0OZV-xQ4VWLo5bGuPPWk2AxmAjNuNwvG6bIQ,965 +babel/locale-data/shi_Tfng_MA.dat,sha256=Erhb65aX0qGNECddJ7M8fKy2zjJlOxXSKx42GdQdf0A,608 +babel/locale-data/si.dat,sha256=SUEexMOM-IB__OVxxxhsDx4RAb5P3mw88ZoS-gemPTI,237479 +babel/locale-data/si_LK.dat,sha256=r7FqkzM6vXhR4XJ5rou9GYtU_PEyYXE3tZZqsLyR-mc,626 +babel/locale-data/sk.dat,sha256=HOqji-Ab_yHtmlHJtbnrTy8wAbZW0pemfjhxeZeU72o,246088 +babel/locale-data/sk_SK.dat,sha256=EOO68wv6kuaSIKyg_BNUAfCCffzGwQlG0ABrSFcDJF4,644 +babel/locale-data/sl.dat,sha256=AZ3B8_mrbyi03b28ovtymTa7G08kAijUL-XKIqdDLqM,237036 +babel/locale-data/sl_SI.dat,sha256=0JTmTrnyrjJHBBK5TeJw_c-WW0ZmLYWZxiXPS2serts,626 +babel/locale-data/smn.dat,sha256=V4oN72KuQA5RQjG6DCR2RFo4USYyOiFFLZu8hA0QZVk,42730 +babel/locale-data/smn_FI.dat,sha256=87dist-cLsNDQvOxUdqRgAuktFuar6Tts6CRw41B1k4,645 +babel/locale-data/sn.dat,sha256=DdCgeUS6dW5p-d4oye7sMvAKDG1v99WB8alp27YF_dk,23304 +babel/locale-data/sn_ZW.dat,sha256=351RLV0bhiAOnieNqRyHN3oqG-v2GvJsYUSqXm1wHaU,626 +babel/locale-data/so.dat,sha256=M7lVxEpfLSVGo07ZxLvnLWPGwZTmzT3N2mKWLvhS2iY,160943 +babel/locale-data/so_DJ.dat,sha256=4nw-1K0M7naoWcZ63Gars4xqbghDMiSHbZpXIbPP6OQ,647 +babel/locale-data/so_ET.dat,sha256=fsS5yO4V8ciQMRZwZ6NDWcQBQFCxe1SnVgH6BUurHSE,646 +babel/locale-data/so_KE.dat,sha256=E6NS6cdOmMJTOUpVuwgBE24tiDpcC5IOw_JZhkMjbd8,1199 +babel/locale-data/so_SO.dat,sha256=AFtTqshg30FEAb50Q2lAC39KETj0xcSxc2DdMegt2zQ,607 +babel/locale-data/sq.dat,sha256=dU1WRBIEsS2Lg6f4mGs_Rc0-PjyrQO1syYMLcaVM3gk,173926 +babel/locale-data/sq_AL.dat,sha256=5SVT0s2J5NyvKwdqzWW1S2PIj33PVVz3pWw248jn17c,626 +babel/locale-data/sq_MK.dat,sha256=PsaPJulWBIFlvRKlnhK0he5lZm_AXVYmeyCq53YQcAI,1199 +babel/locale-data/sq_XK.dat,sha256=d_afvPzsozxIT1_MAI-fK_-V8nHTN07NmyhuBwQAdVo,1178 +babel/locale-data/sr.dat,sha256=Ki0eLxOs1OAdQX-0Vr0rSnWC67af2gDpGeXr7OuSEWM,277049 +babel/locale-data/sr_Cyrl.dat,sha256=0n_px-LQ4ZndDQXL0o_mRHgVxLFeYLNjn5MGOovuOoY,1978 +babel/locale-data/sr_Cyrl_BA.dat,sha256=kKuyKTEZCb1Gja2U90yXFFfUDBAKW_ErHKjdHuJ1ZN0,3930 +babel/locale-data/sr_Cyrl_ME.dat,sha256=gKr7GFHjcxb7Sq9uINfs8VEP2y7MNh1pJ2h2VGqblM8,3724 +babel/locale-data/sr_Cyrl_RS.dat,sha256=2TmzkiJSNDpNIij3yJ8nDhrKRNPUV1XX1_RXOKaGvAw,626 +babel/locale-data/sr_Cyrl_XK.dat,sha256=43d0NPoc03YrzZBie2_7DLy5oScODx-cFOK5e9hOMxQ,2607 +babel/locale-data/sr_Latn.dat,sha256=-jvRgfFrHOe4yU5pAbwigibdz7b_Wef6VzoQn3t86S4,230271 +babel/locale-data/sr_Latn_BA.dat,sha256=egRvjBkcg2d2Zm_ehAqKrixzVpD9EtOws6JVnbVusc0,4195 +babel/locale-data/sr_Latn_ME.dat,sha256=tnFSZ7uoBD6mSZL32MvharPQR1QL_oV_XaJ38bc4gbE,3579 +babel/locale-data/sr_Latn_RS.dat,sha256=6mUxn3NaqRzjHSNgJA_xzJCefSUxqXSUEsoMJ-juYUI,626 +babel/locale-data/sr_Latn_XK.dat,sha256=6BLGB40r1xCprUQ_rR6TVqubuxmhQAUKyEJa5XWAGs0,2693 +babel/locale-data/sv.dat,sha256=TZH0cY_a8DJK5BAZLOPgACkWLLwFkf5swjFulbKwJTQ,213436 +babel/locale-data/sv_AX.dat,sha256=qxZlBDrxLtr7jugZ2tsY6liGF9U84UyZIT7Ap8eL7iA,644 +babel/locale-data/sv_FI.dat,sha256=aV9hIsn7qoQ6nGzCbnJpiiPRSYFOq0yogn8ujMenwqU,1391 +babel/locale-data/sv_SE.dat,sha256=HUamjd52mLuY8GbVgBeijJpc--Z3dnARh2lcE4QvUYw,644 +babel/locale-data/sw.dat,sha256=OBKVQn-9PlGNBo89Ms1lDlr2yRQ8gX_1lUMDn1bhzMw,175523 +babel/locale-data/sw_CD.dat,sha256=dsCHY8aKyhmiRIIiStPfM3RK7oyvdUUYXY94Or28t28,2698 +babel/locale-data/sw_KE.dat,sha256=o7UlPWH8XMknIL_QFrQHFYzS7KRh9f0SFyekUCJZ_kE,4051 +babel/locale-data/sw_TZ.dat,sha256=igleCvk2ZHZM9NoYRtnSd1s-oxuQjL1wLRvD_ZLBovo,607 +babel/locale-data/sw_UG.dat,sha256=SmNCoGRulbvTy-uR4zcKFkELWxnfZRHTFQQCkMzL99g,628 +babel/locale-data/ta.dat,sha256=hIoFhN-PxAoTw0W7Qiy0UXsD-qxIoiNeV-9jNuHr5Mk,256586 +babel/locale-data/ta_IN.dat,sha256=U63SpEBo7jY11wGhZNMFeC3pyS0ExnmbUlmDjgOeuuo,649 +babel/locale-data/ta_LK.dat,sha256=jTOsLhe3x-1CXvk6IDUuiEwrc0OvqkmsJmZjpQgDbpw,1199 +babel/locale-data/ta_MY.dat,sha256=eO3c5Qh0zUMlQeFZgQSzQedaSz7mLTFlVDKR1Z-2VH0,1256 +babel/locale-data/ta_SG.dat,sha256=1MxGlD45nwX3TAvpVESvP4CXrmxgqySQjeZjzH4Cofo,1275 +babel/locale-data/te.dat,sha256=lPRz8YpMlzuXt_B5br8V3dYjMFTZ6metR2KM0xCF42Y,255657 +babel/locale-data/te_IN.dat,sha256=nRtR37nSryged20zk3SQMpmOf-UuNFjL_2YFTMpyN7k,649 +babel/locale-data/teo.dat,sha256=XiaLo3ISyEZi89ypqEUw0Yom5tknINNoJzne6gtXQio,16723 +babel/locale-data/teo_KE.dat,sha256=adkRA-mbnesgQLGfXEqwmXSVQDi63TQyosUzSLt3Xu4,648 +babel/locale-data/teo_UG.dat,sha256=jfrgUMX_AeETJJFHJAx4dXi9nXjywSwr6tZUQksea7c,608 +babel/locale-data/tg.dat,sha256=mjaUWm9mGHcYYsk5vmlrFj9zTXf5WcJMRDGex2axIB4,29662 +babel/locale-data/tg_TJ.dat,sha256=DvdJs-nLilO1-m1YgIOjEeY_f0Viv5FYjZ9VR8O6BAc,626 +babel/locale-data/th.dat,sha256=hj2O37fbmx_Nb49pOFF2oniumaxVDlkOA5ULcI9hD8o,230260 +babel/locale-data/th_TH.dat,sha256=msHIvTxB6gWoJP5-ClkGH_IedRgXpRQzZPj0oIyVkyU,626 +babel/locale-data/ti.dat,sha256=l6soKgfd9j_zCwP2oOKDdY-P-8FUul0u8WvGiAW1VZ8,73057 +babel/locale-data/ti_ER.dat,sha256=9n_DeoDAeCwpjxww70RaNjQFwSY0dAOwwdAxYhVhw_Q,976 +babel/locale-data/ti_ET.dat,sha256=uo6aLFMfTxKNSCagtB8OrTa99SUAmiNUA1txEd0tHtE,626 +babel/locale-data/tk.dat,sha256=TcKx03xQoOnnIbGUZGszP3MDMoy3Ncaq6ALq2j93j5w,165033 +babel/locale-data/tk_TM.dat,sha256=te9O_n9WobfEaJnREwyPHVH4EMHcvdClBXZVPDWXKqA,626 +babel/locale-data/to.dat,sha256=3Pl69HQ7EZkoRYPF_uD8XCinhZeYGlQLeYGRp8sLXco,155039 +babel/locale-data/to_TO.dat,sha256=4uG_hh9oRlbSUKvlRBuU4uKaGSSs8WXIdQ6oPE9nsXw,607 +babel/locale-data/tr.dat,sha256=bwVttS8wUkqEjHqWYckkC3mIBMvlR204MbbKx2RTxUk,199237 +babel/locale-data/tr_CY.dat,sha256=O7i548Pkn7b38K67zCSjikwvr3fqUzgQFtw85HwftvE,1202 +babel/locale-data/tr_TR.dat,sha256=60u8smbVf6BBW48PI-x3EpHvJ8tsbUSbJHhCqFwqEWY,626 +babel/locale-data/tt.dat,sha256=_FR8BBZRdW9-Oio2iafGcu48Z9FtldL05h1qzhmM1OA,33295 +babel/locale-data/tt_RU.dat,sha256=KMOS9m6E-8xqJQZQghSnhQdNjkNVu0dXsQkFwnfl7lM,644 +babel/locale-data/twq.dat,sha256=OEwG2SXr9fEeQCMaHxjvKWlWLVtUhGamoG4qG6wjUmo,16224 +babel/locale-data/twq_NE.dat,sha256=qI3KClszp7x1J6kAz3_warLhjR4nJfJo_KfvlGfrnLo,608 +babel/locale-data/tzm.dat,sha256=ZYs6PgmbBXaPk6WRnQxKT5wzR0RzqXFPa12OWoO7dUc,16200 +babel/locale-data/tzm_MA.dat,sha256=Iju1azWHoAUAq4eKXDIeqAfAEz4HrOo7iUtyrOvSpl4,608 +babel/locale-data/ug.dat,sha256=TfYKG15NTf4fouUcNaqdusAKRxa1RISFoA8RYi4LN5s,128365 +babel/locale-data/ug_CN.dat,sha256=U8dQkbFRAbqdAKISnFUw0FNUPmzrkJdLpK4n_14alKk,626 +babel/locale-data/uk.dat,sha256=vdV4W3lJEEENgT6Z37OtNlhQPDq8sVw7nWzL3hCidSo,307335 +babel/locale-data/uk_UA.dat,sha256=aG-RNa11ss0qW7vF50ajYL0qAxtKC-2ghG-P4W6vI8g,626 +babel/locale-data/ur.dat,sha256=rw0vgCWeX-JSUbVwTUGvRddcKePa_yrnoFDb-_2SoW4,192380 +babel/locale-data/ur_IN.dat,sha256=R-Q6ZyVoQOPtQr-7izba7emv8X589nguoMF477kk7-k,12616 +babel/locale-data/ur_PK.dat,sha256=pjqQbfrTsrzcFl30FILINl2ELO3fBxiaRBhCr-KxWlE,626 +babel/locale-data/uz.dat,sha256=9hnTiZ-2Ug_qye8uaQ_y6yORtJx6ufmXVo7yXEeRXv4,167959 +babel/locale-data/uz_Arab.dat,sha256=5iCqhCA86006i6CS3bLMZ9IsxWlQoPmQKFbD5pRUJO4,4135 +babel/locale-data/uz_Arab_AF.dat,sha256=Sx_uYNKZPq-kRomMbkZ9n8QRHjADxn9Jnga6fTVCn2s,669 +babel/locale-data/uz_Cyrl.dat,sha256=GYkeuqND_cGOn_PWn6yDG-GJl-739xKvN51DjAFRRKs,98765 +babel/locale-data/uz_Cyrl_UZ.dat,sha256=3wc5_aiBB_CAeYu5YRxPxZHUd0sZ4nIXafgKFDygfZs,626 +babel/locale-data/uz_Latn.dat,sha256=U7k5KfSvCsFk2Atujg4-iVDmGikwr-vkjPTpb0D_vP8,1283 +babel/locale-data/uz_Latn_UZ.dat,sha256=3aYJK9kmVFmLlDvDgL1ndruFKWOqVlHHJjag1NqV1a4,626 +babel/locale-data/vai.dat,sha256=RqFFCMxtJ670vHHKuitqD_dDlWU2PBHYjgw-9yA8fTE,19046 +babel/locale-data/vai_Latn.dat,sha256=X4-xL0o7OgV8SCyY9w5JPbtwU24MfloL1z1fS9FGL_A,15311 +babel/locale-data/vai_Latn_LR.dat,sha256=wILwRiJahBELmxou0KnRkVMZReFA3tiFJzmb_OqZhNc,608 +babel/locale-data/vai_Vaii.dat,sha256=HR8PB6naRA8D96Yn5Iy8ODPw8CLzpfrVoXyXd0apmLM,684 +babel/locale-data/vai_Vaii_LR.dat,sha256=wILwRiJahBELmxou0KnRkVMZReFA3tiFJzmb_OqZhNc,608 +babel/locale-data/vi.dat,sha256=q1bLXzF2MlouEux5Mf1jmX1aHrQ_ymF1F6o4hOSmCJY,163315 +babel/locale-data/vi_VN.dat,sha256=ysK3bxwU2cv3FiZx5XYBq5kGCnsRAIYXefasVKTIKXE,626 +babel/locale-data/vo.dat,sha256=oGgNNbnkX_qhkH1hjxpt0molwl0N68bFK4mP0Yh2k7o,5246 +babel/locale-data/vo_001.dat,sha256=QsXTJYVDrzM24v2rKaWPgtSSN54Ege4iHGzzdxCElg0,841 +babel/locale-data/vun.dat,sha256=n04FmbzLYMYEmZaOUMxpxLNF-v6OeB_RtLVQH6_vOc8,16119 +babel/locale-data/vun_TZ.dat,sha256=0sIAdc6gRGhTbpGXbPkAwTzO7g-QBG0WdKsmjvFSxY0,608 +babel/locale-data/wae.dat,sha256=qRzR0haJ9sG5gyquuNV8SZXHdSL-Ku8F0j9OGnYsIDE,28712 +babel/locale-data/wae_CH.dat,sha256=7C-tFbrZbBSHRUeTh5pVAS-8twkecVsqzHHXfE5BJ-w,645 +babel/locale-data/wo.dat,sha256=Wbq-Hjgk2y_GIAVbiGK7oT7Cdewbl20WN8w71cN75L4,25738 +babel/locale-data/wo_SN.dat,sha256=SFe418ytRv4n5qYdd7PB5Z7wW2lMXKLaOZ6iTb1dTw4,607 +babel/locale-data/xh.dat,sha256=aizGIMM4WRNrhmWoZSfTGjT2zPbTxyrZYokFCwFDYFI,15076 +babel/locale-data/xh_ZA.dat,sha256=Si4fXBHGmX5L_vioWN-a_PowYRePLdkfsBgvfSIN_7c,626 +babel/locale-data/xog.dat,sha256=MgoUNE6v6h13pdogvhGzpyc45PJlCUuLzk_xlQVSHFE,16608 +babel/locale-data/xog_UG.dat,sha256=7Ft3wGj-9xqf-9AUUQlteuqRqcQKdmYx69SapgOjZCE,608 +babel/locale-data/yav.dat,sha256=TgY99ASgQyjnMOCW4N2x3xonlWPNekJvOudZ53b325Q,15354 +babel/locale-data/yav_CM.dat,sha256=7Zh5rEYu5q9jspiu_9pbQ3xewmLg_my-9Rt7rqOELnE,627 +babel/locale-data/yi.dat,sha256=8oRirXWekzHxj00Uer9bzo08HWKIKAAS5MBFjfuipw4,30403 +babel/locale-data/yi_001.dat,sha256=7Xf8CLNU0_JtRoWjwRdeOMQUxuCOIfMn6JgtuU3y2yk,903 +babel/locale-data/yo.dat,sha256=ZufCWXkkr0h2lFvqXBzIrSTAfUOYV1aWOo_0-dgEr8Y,36073 +babel/locale-data/yo_BJ.dat,sha256=v7sGbQbpNvx_925zeSHoAtlRlGj7aZuZ1Q-REvqaL9s,35277 +babel/locale-data/yo_NG.dat,sha256=9XY2uokNDXCH8mbxl2lIRWAr0h39yAteR52zoOq7P7g,607 +babel/locale-data/yue.dat,sha256=kDR-_CW4CAqqV-76GqPuvezZ55XloDm1cq9ckhoqPrc,176622 +babel/locale-data/yue_Hans.dat,sha256=_GkNxpR7Ako3sJcHGfBiWl6aOswYZCGImDsM2Aj-BZ0,176532 +babel/locale-data/yue_Hans_CN.dat,sha256=FzQmGCVsYnueDspYNGYj_DXHmMt_HLYG5gGmRFUHyT4,627 +babel/locale-data/yue_Hant.dat,sha256=Na0Odc_YNhhVKjF2-IECRD83MTyW5JxDa214bVwKeSE,1297 +babel/locale-data/yue_Hant_HK.dat,sha256=Ec0YPHv508DXmz4WTZyRXL3o1Vzt4g9aNtLy2QEC0ko,627 +babel/locale-data/zgh.dat,sha256=D4nbMm0USubx9XKDP5MCvIe0QjMKsTWvZC_5EWOAYkQ,30568 +babel/locale-data/zgh_MA.dat,sha256=jC43Fo4jRYvZlJw8YHkxMp397MnNJcV98LTJNXom-kQ,608 +babel/locale-data/zh.dat,sha256=m43JIyhOjoSliRYPlfNr1BtwGB0weDIyhj8fgbX5iCs,178476 +babel/locale-data/zh_Hans.dat,sha256=GjCjyfNKG8X93jBUM1Kz2xyOzaRo9c7d-wNjUxMQtC4,1296 +babel/locale-data/zh_Hans_CN.dat,sha256=p5jtbCp3q1z8RyhHQbdEYtxxHeJZjE6hPHCZeaKuY18,626 +babel/locale-data/zh_Hans_HK.dat,sha256=HFh5Iu49qjRP-aGP4Drj2Nd-afmy_nXurl2mz5CrsgE,4321 +babel/locale-data/zh_Hans_MO.dat,sha256=4u8prMqf76bZEGZWmnttt8rFnOG9GQ9TJ-qvhDJMBV4,3266 +babel/locale-data/zh_Hans_SG.dat,sha256=_d4ULzZoQycECN9bjIaWM7QT6NeqppuqZ_2J77SxEa8,3462 +babel/locale-data/zh_Hant.dat,sha256=J-_FNz7Imicihh7kKd6t-AiB7JFNO_Et9D4L5BZL5GM,182449 +babel/locale-data/zh_Hant_HK.dat,sha256=gna8uBk3yehPbTncX4g-xu_PKtqIx0m6wab3OFkocCo,54211 +babel/locale-data/zh_Hant_MO.dat,sha256=ZAm_SZd9QU5d9yIpBY7zkXjftOuUsxO5n76GBC7PEr0,648 +babel/locale-data/zh_Hant_TW.dat,sha256=HbzN_NAwhpX9xJ2wB2lCg-DcBsiCuP7lp3RK6xNtM58,626 +babel/locale-data/zu.dat,sha256=Tk8uVFm5e3BT0K4TcTCBmh-KVbrfpP4EiUxAb97G5-M,170194 +babel/locale-data/zu_ZA.dat,sha256=_bcquTfNmZuNRLAfnjIPVwShwnBdealyaQtkfbJB5bI,626 +babel/localedata.py,sha256=YRA37osx1uFriIA6xsV4PH73pzZIRKAGC3DMAoiAhbY,7317 +babel/localtime/__init__.py,sha256=97IqJD5FNnhre8jiYaEisp7PeWaBMhsb8EH0Gd2q5bM,1721 +babel/localtime/__pycache__/__init__.cpython-37.pyc,, +babel/localtime/__pycache__/_unix.cpython-37.pyc,, +babel/localtime/__pycache__/_win32.cpython-37.pyc,, +babel/localtime/_unix.py,sha256=P66o3ErKXzhFvj3e3Qk6MBS7AR0qsDqSQclIAMHKp18,4801 +babel/localtime/_win32.py,sha256=dGzhQ8AlY5iItSd-i3Fi2O3YWuVJ83PFSWe7EG2BaBg,3086 +babel/messages/__init__.py,sha256=FslIS7Co5VK7Ec4g44kFO7m7zWw2-fQuu4gvTzqeIrk,254 +babel/messages/__pycache__/__init__.cpython-37.pyc,, +babel/messages/__pycache__/catalog.cpython-37.pyc,, +babel/messages/__pycache__/checkers.cpython-37.pyc,, +babel/messages/__pycache__/extract.cpython-37.pyc,, +babel/messages/__pycache__/frontend.cpython-37.pyc,, +babel/messages/__pycache__/jslexer.cpython-37.pyc,, +babel/messages/__pycache__/mofile.cpython-37.pyc,, +babel/messages/__pycache__/plurals.cpython-37.pyc,, +babel/messages/__pycache__/pofile.cpython-37.pyc,, +babel/messages/catalog.py,sha256=kSj7z8QcQblXLvwyEC5AsuXZ1PnHEyWpBM49X0jUNqQ,32296 +babel/messages/checkers.py,sha256=KwSkPIg3JJyjvhxkMOAAZFUs5ZQNae_wnMKRAYP6sis,6085 +babel/messages/extract.py,sha256=E3mIosZGF6wNcVyylNIDbm1osgJsgRcOMGJnYNVLmus,26428 +babel/messages/frontend.py,sha256=xN1sO2Y3Qp4FGj94vQToev3HeAzmFK5knWKtRzVpnzo,38729 +babel/messages/jslexer.py,sha256=81Cun16nkMdbML2NAxixUls1fr2FHaW_-Uqju0wrL7s,6334 +babel/messages/mofile.py,sha256=Ry-YGbadmabyB2I0WC8lGSBwLgYPXWOChzRILtcjs4A,7204 +babel/messages/plurals.py,sha256=fXgXJ9kTllO0OjqQ-mqrpXW2lXNY1FTa9UrGrlaGBuc,7206 +babel/messages/pofile.py,sha256=tGdqHM3tDSyDc6CFw9RwHDxCMQTEgacooKREqUnJBuk,21795 +babel/numbers.py,sha256=wa9bdJvj5PZvM1wX7rzd4hdp3i6mx8nlk6et6aQKTF8,38551 +babel/plural.py,sha256=rI02SVvzNQNniSh6TjfWIdwONJ3cE8JRS-V43P-KlC4,21314 +babel/support.py,sha256=MSiTpW8BHfYAZMF6zfkfcv_pReGlmxDbAdNu96DzFjM,22303 +babel/units.py,sha256=0Sl-FFQTRK36UMlFDZyDuTgRm1C8gT8SK02P3ViAr5M,11105 +babel/util.py,sha256=JRSyuEezoLBMDDpIGrsgoV9bRTyOOS9CZEVppRbUjWo,7582 diff --git a/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/WHEEL b/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/WHEEL new file mode 100644 index 0000000..c8240f0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/entry_points.txt b/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/entry_points.txt new file mode 100644 index 0000000..18c3a58 --- /dev/null +++ b/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/entry_points.txt @@ -0,0 +1,22 @@ + + [console_scripts] + pybabel = babel.messages.frontend:main + + [distutils.commands] + compile_catalog = babel.messages.frontend:compile_catalog + extract_messages = babel.messages.frontend:extract_messages + init_catalog = babel.messages.frontend:init_catalog + update_catalog = babel.messages.frontend:update_catalog + + [distutils.setup_keywords] + message_extractors = babel.messages.frontend:check_message_extractors + + [babel.checkers] + num_plurals = babel.messages.checkers:num_plurals + python_format = babel.messages.checkers:python_format + + [babel.extractors] + ignore = babel.messages.extract:extract_nothing + python = babel.messages.extract:extract_python + javascript = babel.messages.extract:extract_javascript + \ No newline at end of file diff --git a/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/top_level.txt b/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/top_level.txt new file mode 100644 index 0000000..98f6593 --- /dev/null +++ b/venv/lib/python3.7/site-packages/Babel-2.7.0.dist-info/top_level.txt @@ -0,0 +1 @@ +babel diff --git a/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/AUTHORS b/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/AUTHORS new file mode 100644 index 0000000..a6212c2 --- /dev/null +++ b/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/AUTHORS @@ -0,0 +1,41 @@ +GitPython was originally written by Michael Trier. +GitPython 0.2 was partially (re)written by Sebastian Thiel, based on 0.1.6 and git-dulwich. + +Contributors are: + +-Michael Trier +-Alan Briolat +-Florian Apolloner +-David Aguilar +-Jelmer Vernooij +-Steve Frécinaux +-Kai Lautaportti +-Paul Sowden +-Sebastian Thiel +-Jonathan Chu +-Vincent Driessen +-Phil Elson +-Bernard `Guyzmo` Pratz +-Timothy B. Hartman +-Konstantin Popov +-Peter Jones +-Anson Mansfield +-Ken Odegard +-Alexis Horgix Chotard +-Piotr Babij +-Mikuláš Poul +-Charles Bouchard-Légaré +-Yaroslav Halchenko +-Tim Swast +-William Luc Ritchie +-David Host +-A. Jesse Jiryu Davis +-Steven Whitman +-Stefan Stancu +-César Izurieta +-Arthur Milchior +-Anil Khatri +-JJ Graham +-Ben Thayer + +Portions derived from other open source works and are clearly marked. diff --git a/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/INSTALLER b/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/LICENSE b/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/LICENSE new file mode 100644 index 0000000..5a9a6f8 --- /dev/null +++ b/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/LICENSE @@ -0,0 +1,30 @@ +Copyright (C) 2008, 2009 Michael Trier and contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +* Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +* Neither the name of the GitPython project nor the names of +its contributors may be used to endorse or promote products derived +from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/METADATA b/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/METADATA new file mode 100644 index 0000000..7341d8c --- /dev/null +++ b/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/METADATA @@ -0,0 +1,29 @@ +Metadata-Version: 2.1 +Name: GitPython +Version: 3.0.5 +Summary: Python Git Library +Home-page: https://github.com/gitpython-developers/GitPython +Author: Sebastian Thiel, Michael Trier +Author-email: byronimo@gmail.com, mtrier@gmail.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Operating System :: POSIX +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Requires-Python: >=3.0, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Requires-Dist: gitdb2 (>=2.0.0) + +GitPython is a python library used to interact with Git repositories + + diff --git a/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/RECORD b/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/RECORD new file mode 100644 index 0000000..f87c014 --- /dev/null +++ b/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/RECORD @@ -0,0 +1,207 @@ +GitPython-3.0.5.dist-info/AUTHORS,sha256=QleDRpZmrngZXrZxZVPInx1CGC6WBs2wgquWA4u1R48,1645 +GitPython-3.0.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +GitPython-3.0.5.dist-info/LICENSE,sha256=_WV__CzvY9JceMq3gI1BTdA6KC5jiTSR_RHDL5i-Z_s,1521 +GitPython-3.0.5.dist-info/METADATA,sha256=Dh4IlJPr-rEGh3U-aLCdO8p-myboJXkcU6MX0SJWAsI,1094 +GitPython-3.0.5.dist-info/RECORD,, +GitPython-3.0.5.dist-info/WHEEL,sha256=p46_5Uhzqz6AzeSosiOnxK-zmFja1i22CrQCjmYe8ec,92 +GitPython-3.0.5.dist-info/top_level.txt,sha256=0hzDuIp8obv624V3GmbqsagBWkk8ohtGU-Bc1PmTT0o,4 +git/__init__.py,sha256=8bGCzvbncwo7QrZPEkY8mozm-JTkrGaNlSMCS-pFWHk,2334 +git/__pycache__/__init__.cpython-37.pyc,, +git/__pycache__/cmd.cpython-37.pyc,, +git/__pycache__/compat.cpython-37.pyc,, +git/__pycache__/config.cpython-37.pyc,, +git/__pycache__/db.cpython-37.pyc,, +git/__pycache__/diff.cpython-37.pyc,, +git/__pycache__/exc.cpython-37.pyc,, +git/__pycache__/remote.cpython-37.pyc,, +git/__pycache__/util.cpython-37.pyc,, +git/cmd.py,sha256=gZn9tM9Oy6Xao8O_Q6_xOlhYYhzU5dagxQ4DA5Cwyk0,42869 +git/compat.py,sha256=g00yP8eq-ap2IOXXo6mppHd-4Iw8q-d6tDvQegj5UYw,9436 +git/config.py,sha256=HJnL92g8GmpwNgH2LOkV3Z1eRK3KGgOh-gbGmHP4fiQ,28415 +git/db.py,sha256=bhsP6H_7pAnm5hUBKod8ZguYrmYMB2L4RgkNswiMgEo,1963 +git/diff.py,sha256=QaI2R3uyB0k9YOuBTOmzdIF7Om9iWgEzHtmJVRQJ7Xc,19965 +git/exc.py,sha256=j09XYwbEitorNgZNeNvsCnroQ-IUUM2f35rO2Hy8Myw,4907 +git/index/__init__.py,sha256=Wj5zgJggZkEXueEDXdijxXahzxhextC08k70n0lHRN0,129 +git/index/__pycache__/__init__.cpython-37.pyc,, +git/index/__pycache__/base.cpython-37.pyc,, +git/index/__pycache__/fun.cpython-37.pyc,, +git/index/__pycache__/typ.cpython-37.pyc,, +git/index/__pycache__/util.cpython-37.pyc,, +git/index/base.py,sha256=jkb965u7XrZ4rJFwUqbpfX2vx5AURcdt48KLag1NzKU,52209 +git/index/fun.py,sha256=EmS1szuhLthtoVw9fJZf2QKyiXAbcgZjM9ghfxfUgoI,14280 +git/index/typ.py,sha256=GLBZbDS3yScHJs0U18CX-heLTDjjGu6fN7T2L_NQr4A,4976 +git/index/util.py,sha256=l6oh9_1KU1v5GQdpxqCOqs6WLt5xN1uWvkVHQqcCToA,2902 +git/objects/__init__.py,sha256=6C02LlMygiFwTYtncz3GxEQfzHZr2WvUId0fnJ8HfLo,683 +git/objects/__pycache__/__init__.cpython-37.pyc,, +git/objects/__pycache__/base.cpython-37.pyc,, +git/objects/__pycache__/blob.cpython-37.pyc,, +git/objects/__pycache__/commit.cpython-37.pyc,, +git/objects/__pycache__/fun.cpython-37.pyc,, +git/objects/__pycache__/tag.cpython-37.pyc,, +git/objects/__pycache__/tree.cpython-37.pyc,, +git/objects/__pycache__/util.cpython-37.pyc,, +git/objects/base.py,sha256=UZiyzyzx4_OJ3bWnwqb3mqh0LXT7oo0biYaTm-sLuAw,6689 +git/objects/blob.py,sha256=evI3ptPmlln6gLpoQRvbIKjK4v59nT8ipd1vk1dGYtc,927 +git/objects/commit.py,sha256=-4Wn1dvM1EM6Su_NNZH-FKdyBtXM18jK4TUORApnnos,20761 +git/objects/fun.py,sha256=UKFhCINLA4X7YjnGH1hxF0Uj2sHsKbxFDd3rrQmRq6U,7352 +git/objects/submodule/__init__.py,sha256=OsMeiex7cG6ev2f35IaJ5csH-eXchSoNKCt4HXUG5Ws,93 +git/objects/submodule/__pycache__/__init__.cpython-37.pyc,, +git/objects/submodule/__pycache__/base.cpython-37.pyc,, +git/objects/submodule/__pycache__/root.cpython-37.pyc,, +git/objects/submodule/__pycache__/util.cpython-37.pyc,, +git/objects/submodule/base.py,sha256=s8H54844tc09U76G3-2lM2X3A518AGyVTD1rvEh81tM,53875 +git/objects/submodule/root.py,sha256=N2i0PjRcw5bNLLIDAkviQjXhf9RvGSfVnbav4FNzkXo,17656 +git/objects/submodule/util.py,sha256=VdgIG-cBo47b_7JcolAvjWaIMU0X5oImLjJ4wluc_iw,2745 +git/objects/tag.py,sha256=OFeN6ZkLU5zVTz_1xuPNpz4YoEgpSpVZ1MMJnnUyGqE,3127 +git/objects/tree.py,sha256=Ta1qAkuwzn7lk54_d7knqF2WL6DOc2MQG1k8mKLel1s,11069 +git/objects/util.py,sha256=fmukvCi3HUjD9fE8AtNgn0qhsImKCJGZ5sdEwGiM358,12451 +git/refs/__init__.py,sha256=3CRfAyE-Z78rJ3kSdKR1PNiXHEjHLw2VkU2JyDviNDU,242 +git/refs/__pycache__/__init__.cpython-37.pyc,, +git/refs/__pycache__/head.cpython-37.pyc,, +git/refs/__pycache__/log.cpython-37.pyc,, +git/refs/__pycache__/reference.cpython-37.pyc,, +git/refs/__pycache__/remote.cpython-37.pyc,, +git/refs/__pycache__/symbolic.cpython-37.pyc,, +git/refs/__pycache__/tag.cpython-37.pyc,, +git/refs/head.py,sha256=KY_-Hgm3JDJParX380zxQv5-slxtTNnUE8xs--8nt9U,8706 +git/refs/log.py,sha256=NI8RndjtjKzOoqo2hx_ThSQ1lt0trHMgJYW_1ML62_E,10918 +git/refs/reference.py,sha256=OcQMwHJuelR1yKe1EF0IBfxeQZYv2kf0xunNSVwZV-M,4408 +git/refs/remote.py,sha256=6JOyIurnomM3tNXdKRXfMK_V75gJNgr9_2sdevKU_tI,1670 +git/refs/symbolic.py,sha256=TtPRNbt1dnki-_TAjAn3gP_h9Ixgba7z0rWcy7_WbQ8,26840 +git/refs/tag.py,sha256=qoHwJ9suHx8u8NNg-6GvNftK36RnCNkpElRjh2r9wcI,2964 +git/remote.py,sha256=aJsDcJwqGd3iqGBmDPrWzAlHMRYG1vUAnhjYS3J-k8k,35739 +git/repo/__init__.py,sha256=ssUH4IVCoua5shI5h1l46P0X1kp82ydxVcH3PIVCnzg,108 +git/repo/__pycache__/__init__.cpython-37.pyc,, +git/repo/__pycache__/base.cpython-37.pyc,, +git/repo/__pycache__/fun.cpython-37.pyc,, +git/repo/base.py,sha256=9h61NMN4IK4-kUBEmavZHlzO5fkJrux19YORfyeA8xs,44515 +git/repo/fun.py,sha256=SuguBZs4sZE_SvAcfvn7yxXdoxKmgQdwUhgKAkeyISQ,11396 +git/test/__init__.py,sha256=q-WCITGqFKTHnRFjUvJz5hUJBi8SP4InaAZRXZ8qj8k,220 +git/test/__pycache__/__init__.cpython-37.pyc,, +git/test/__pycache__/test_actor.cpython-37.pyc,, +git/test/__pycache__/test_base.cpython-37.pyc,, +git/test/__pycache__/test_blob.cpython-37.pyc,, +git/test/__pycache__/test_commit.cpython-37.pyc,, +git/test/__pycache__/test_config.cpython-37.pyc,, +git/test/__pycache__/test_db.cpython-37.pyc,, +git/test/__pycache__/test_diff.cpython-37.pyc,, +git/test/__pycache__/test_docs.cpython-37.pyc,, +git/test/__pycache__/test_exc.cpython-37.pyc,, +git/test/__pycache__/test_fun.cpython-37.pyc,, +git/test/__pycache__/test_git.cpython-37.pyc,, +git/test/__pycache__/test_index.cpython-37.pyc,, +git/test/__pycache__/test_reflog.cpython-37.pyc,, +git/test/__pycache__/test_refs.cpython-37.pyc,, +git/test/__pycache__/test_remote.cpython-37.pyc,, +git/test/__pycache__/test_repo.cpython-37.pyc,, +git/test/__pycache__/test_stats.cpython-37.pyc,, +git/test/__pycache__/test_submodule.cpython-37.pyc,, +git/test/__pycache__/test_tree.cpython-37.pyc,, +git/test/__pycache__/test_util.cpython-37.pyc,, +git/test/fixtures/__pycache__/cat_file.cpython-37.pyc,, +git/test/fixtures/blame,sha256=4EDRSXdgbRtxHU_2lASFXC7eNShL2cVq3IU43tLWlD4,3663 +git/test/fixtures/blame_binary,sha256=YLzoHqTAuv2Uv8IILh4ndQxJ_A1c09176E-3d5FMQsM,14807 +git/test/fixtures/blame_complex_revision,sha256=tPguLsqmLxjuZWg5nRcdZCZeaBi-LOeVQEHfTX6X_B0,7645 +git/test/fixtures/blame_incremental,sha256=3VXtrk8LVqfS5f2vsP5DTzFU3opeevUbENQUq22vTdw,982 +git/test/fixtures/blame_incremental_2.11.1_plus,sha256=JDA_xCevOrOMDeKW-U8svYeA0E8Pa3sI7G8GALpxOHw,1154 +git/test/fixtures/cat_file.py,sha256=7RDIymGyByw8I1OibenXM-DVsZ0_7gpazeYYG4C5GDM,136 +git/test/fixtures/cat_file_blob,sha256=ZOyIygCyaOW6GjVnihtTFtIS9PNmskdyMlNKiuyjfzw,11 +git/test/fixtures/cat_file_blob_nl,sha256=GJShnIW6FTrL90OsTkP8AEyJFgSyb4xp4eg-oq_HxI8,12 +git/test/fixtures/cat_file_blob_size,sha256=JdTyqG3rXiV0uzIQtnuyT8xK-xn5OntloFfaqHSp0Y4,3 +git/test/fixtures/commit_invalid_data,sha256=QlV-Pw5mw1Vhp6qivAQY5kcBP_BMJ_OIdLCinmes5Sw,242 +git/test/fixtures/commit_with_gpgsig,sha256=3in_tJPkQv2K1wFx-PGqaCZQe40liMnl9cMYOJ8krTA,1387 +git/test/fixtures/diff_2,sha256=sxE-xkV5lQrUEbpllp2X_AcFfPUmUr2wvSsc9qkZQLc,1994 +git/test/fixtures/diff_2f,sha256=na11T8R1dhJUOKeO-fEeHymOxhXNrjvzzmA_r7x6oJM,732 +git/test/fixtures/diff_abbrev-40_full-index_M_raw_no-color,sha256=AW-YEfutyH_RVyaP2nCTPhtjvkfqWi7NVL4s9Ab3Qww,109 +git/test/fixtures/diff_change_in_type,sha256=Wo1iCaT1YBfGn5ZSJ40H7iVeqXKm-v-qJnsBUBKrpsI,319 +git/test/fixtures/diff_change_in_type_raw,sha256=67KYtwIlQdTSwesABnIYTZxFgiwPhVyBXaDFoPXRFt4,108 +git/test/fixtures/diff_copied_mode,sha256=rzKjhxG_HWuzMJAuGlVS6RKYV6g7Ko8lhc1CdwxGj-g,95 +git/test/fixtures/diff_copied_mode_raw,sha256=dssv9FuXzR_-urJrkaZkBySxHosrGMyna4TxjVsOl-k,122 +git/test/fixtures/diff_f,sha256=sNsG26bYvqU4pK_RwahaO-Lya8O9Xonwlyth8do_ptY,504 +git/test/fixtures/diff_file_with_spaces,sha256=BOvQkq4AjQ_cR1e0iLYDQdNq2BLa-P5xhI4Xal7hYcE,216 +git/test/fixtures/diff_i,sha256=792rEQvP9Q-MNxZ3_FsvhG5emE_q1nT9jpmQ_A1hFWE,5705 +git/test/fixtures/diff_index_patch,sha256=qd9jD_eAQY5I9OLsbqdz3-lm_ncL2ALJhVLyj3enAfk,4598 +git/test/fixtures/diff_index_raw,sha256=odNXPZQ4rlBnqYfJvvTKGS8QvfJE33WN_X-lIRMT8NI,101 +git/test/fixtures/diff_initial,sha256=1RJTg7QSTdMGlqLDvjFUhKtV0bAV2NFW8rHBgzlVfyg,76 +git/test/fixtures/diff_mode_only,sha256=pqDOHBLm09TWZ0orff-S7pCkQktD2sooW5mURG0vqLQ,46005 +git/test/fixtures/diff_new_mode,sha256=b70EDNoC_gfq_P_fVFCIqT3WHU_P0l-1jhuR2cSEJFg,546 +git/test/fixtures/diff_numstat,sha256=_Ls171vvsERXlRiJ1i1tA5vHyoYCzt3hKorFmic7UyE,22 +git/test/fixtures/diff_p,sha256=3YlhR3UNFIPDv90Zn1vCXC46kQCVDuepUZIzwzD8xmk,19273 +git/test/fixtures/diff_patch_binary,sha256=CLWigD0x0z3n_fpdh8LlkEyRUy7oDiWM-CJpGrqWPiM,155 +git/test/fixtures/diff_patch_unsafe_paths,sha256=jsc2GM8j56puEDnMEhlBHG4jIhziN0uY8cuzGTTtHmw,3145 +git/test/fixtures/diff_raw_binary,sha256=-PUPqf5wop8KkmubHnPK6RAVinlJuQf9Lqo4VBff23I,103 +git/test/fixtures/diff_rename,sha256=-f4kqw0Zt1lRZZOmt5I0w9Jenbr3PngyTH2QeUQfv8g,415 +git/test/fixtures/diff_rename_raw,sha256=VVBUjGEoXWWMYQFq-dyE708DijCnG974Qn79plVT39Q,112 +git/test/fixtures/diff_tree_numstat_root,sha256=NbBofQm3wGm-1hyz8XKIoxMtC_bzz4x8TlxxuF8LLDU,63 +git/test/fixtures/for_each_ref_with_path_component,sha256=hHVSiVHNEW5PKSPP4zFxxpYs4EYlPSJ9y-yykzkpWjk,84 +git/test/fixtures/git_config,sha256=_Igi3In2TsksvwUdn7YcusMv-069ftMdlV1G7ZCs8nU,1517 +git/test/fixtures/git_config-inc.cfg,sha256=jYjjNgfYBBkEAXYj5wLy7en-ISXbvVyOOfOmKsURYdc,92 +git/test/fixtures/git_config_global,sha256=_tFDHYTW1Hxue2WXqjafVm_b9eM-OjTV6WTD2yZ3aqM,366 +git/test/fixtures/git_config_multiple,sha256=xhyn_df95CrbWfA_YWV_Y1eR9bpbc-xZxWAnzCJTUU4,121 +git/test/fixtures/git_config_with_comments,sha256=Q9IHrB4KE3l15iXoYD9-4TIMyd_rFczQ1CPAu-CI8bU,3997 +git/test/fixtures/git_config_with_empty_value,sha256=686iisjxnex4YeT4qWdjsQh22X8UDw5yzKSerefFSTM,35 +git/test/fixtures/git_file,sha256=44Qr9_8TluxWGPiPjDT4dEyF8x3fvnA9W7moDNiFAKo,16 +git/test/fixtures/index,sha256=OBeM4XodizcBFgK_7S92fdjNTaitNxGzSBkcHXXWQvs,163616 +git/test/fixtures/index_merge,sha256=IdtRRV85gi9dGFC4LNuGrZU2yttGAAANeS0_qvNO85w,9192 +git/test/fixtures/issue-301_stderr,sha256=z6QL_UgCKQ1MMviNQNdhM22hOgp00zfJyc5LCm7Jl64,302879 +git/test/fixtures/ls_tree_a,sha256=uBvIY8-7HnaBvSsVYigYJdsbeslxrtfeXh-tWXKtOnc,429 +git/test/fixtures/ls_tree_b,sha256=pW3aIRcXMA1ZSE36049fJWeiVQl95qk_31U8Eh3Tc1c,119 +git/test/fixtures/ls_tree_commit,sha256=cOgzX5Qcqvy4LU4dIBkcc63ccrOPBLab5DsCQPVpz_E,173 +git/test/fixtures/ls_tree_empty,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +git/test/fixtures/reflog_HEAD,sha256=8J5zwsJRmdb8xdWyQoRUzJYJnDSaeo0rLa5uthBovyQ,114210 +git/test/fixtures/reflog_invalid_date,sha256=VlAYk2kGs3CySphJV0OmWwpWZK_uB9FxICTICZuKwSM,409 +git/test/fixtures/reflog_invalid_email,sha256=1OoNdoGKNcNKWVQAv5ZKSgVEt0zSkigvHOTs3MMhVW0,411 +git/test/fixtures/reflog_invalid_newsha,sha256=i-xph-C12dZT-dEKWS4VTDtX4AzQVUcCF3KXfMp9Gu0,404 +git/test/fixtures/reflog_invalid_oldsha,sha256=guzXH-wQOfz3yQJFMChzhuXcgQ6G6rGTSwlIdBVX8Wg,398 +git/test/fixtures/reflog_invalid_sep,sha256=0D9WHWpIGE2tQXD8utDcq-bbxdgVnWWCAMK_vwI3-zA,415 +git/test/fixtures/reflog_master,sha256=K1-VX1oQ3gM_23qTjVV-8yQOXeXuRtePgUXAE6D1TVo,31286 +git/test/fixtures/rev_list,sha256=pJPFZuJGwLzQ6m4P2d7VNaRLdMefGxxtztgU9fQfCCU,123 +git/test/fixtures/rev_list_bisect_all,sha256=r0gnyZwq-IVHxNss4qE6zMv29PEcLyE0t_fV4MKISHc,2172 +git/test/fixtures/rev_list_commit_diffs,sha256=n8qhU8FHEqr7Z8z8PvRGEODveuPbFIuaXB8UYGTqTPc,306 +git/test/fixtures/rev_list_commit_idabbrev,sha256=W_cHcxor5sFGeS8-nmIpWNim-wtFY7636Hwh04Sfve8,271 +git/test/fixtures/rev_list_commit_stats,sha256=1bZgYDN3iqjdIiZtYUuPNZXcyJYlDiusy3dw5utnr3M,244 +git/test/fixtures/rev_list_count,sha256=wyBmlaA46bFntXaF6nx28phdDPwTZVW5kJr71pRrmb0,26855 +git/test/fixtures/rev_list_delta_a,sha256=ikrcoYkO311vbCS_xoeyKE6myYKlKP5by88KU4oG6qI,328 +git/test/fixtures/rev_list_delta_b,sha256=iiTGJRF2nzZrsHLXB1oOcZaoLvnSGAB3B9PLt5acmno,451 +git/test/fixtures/rev_list_single,sha256=YqAJowQ_ujS8kUnNfBlm8ibKY7ki5vu2nXc_vt-4nq0,293 +git/test/fixtures/rev_parse,sha256=y9iM5H6QPxDLEoGO9D4qSMBuDw4nz196c5VMflC1rak,8 +git/test/fixtures/show_empty_commit,sha256=xeKoNCOFUPZcSztV3olKSs6u14fVdHwjnkGYLsEcZn8,252 +git/test/fixtures/uncommon_branch_prefix_FETCH_HEAD,sha256=NO36DB4HWl4sOisR6EdFroTDakA-4XOx2kk4lFQIsiQ,603 +git/test/fixtures/uncommon_branch_prefix_stderr,sha256=4-rJlXvPu-1ByjZzsUUJXFruPRxan7C5ssNtM7qZbeo,324 +git/test/lib/__init__.py,sha256=k2xMRT9FC0m3yX_iMKaDcyuuZe0tGSr95ork3VOaeWk,414 +git/test/lib/__pycache__/__init__.cpython-37.pyc,, +git/test/lib/__pycache__/asserts.cpython-37.pyc,, +git/test/lib/__pycache__/helper.cpython-37.pyc,, +git/test/lib/asserts.py,sha256=_9sOUHopeO-3PZOkxMXfTWaTxxPaWwmpnAVaDxpcaWk,2273 +git/test/lib/helper.py,sha256=TI69pdx0xIMhfzOzBDB3BwqPvPsykp9bUXiyw2B0Xd8,13592 +git/test/performance/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +git/test/performance/__pycache__/__init__.cpython-37.pyc,, +git/test/performance/__pycache__/lib.cpython-37.pyc,, +git/test/performance/__pycache__/test_commit.cpython-37.pyc,, +git/test/performance/__pycache__/test_odb.cpython-37.pyc,, +git/test/performance/__pycache__/test_streams.cpython-37.pyc,, +git/test/performance/lib.py,sha256=qSicSiyRI30rP3EFeVoevC_sBDgXDFtZKIFr_Ikz84g,2427 +git/test/performance/test_commit.py,sha256=ws8ORcvg3h0eXkI2G7a4OEl5QFG-9s2Agf0ut_8sUqU,3732 +git/test/performance/test_odb.py,sha256=knbDhq2sRagwyGHKQ7uNZLWN8bzYt_VF6bNucoON6dI,2651 +git/test/performance/test_streams.py,sha256=YriRvZ8i-yhMtQ5UdyGUt-X-fKhSddACwtT5e09bDyE,5816 +git/test/test_actor.py,sha256=1bYmrTwWAYT_Qj9l9chbvuI8nNtHY6yGlDRJDDEq9A0,1242 +git/test/test_base.py,sha256=k6I5nG7ZeBCYpXwi3HX_mvURFelgvQFys5pWVQR6kjw,5649 +git/test/test_blob.py,sha256=Bs4FWke9Sjzx06EJuG9hh1T5qBgJEEz4aBCcr3cW9L0,878 +git/test/test_commit.py,sha256=I9bHaid6bAR9vdEYyYLxP0Dfosn0vJ_ylCb_r-BYttI,15442 +git/test/test_config.py,sha256=ZbUjlEwmIAKlpbpiJm8lizNUaopxLfslzsHiHtiaJMY,16611 +git/test/test_db.py,sha256=e9UNddyQfoa-kzZo-XyrwVuYiq887NUkYrK8wZkTu9M,939 +git/test/test_diff.py,sha256=49pEtXt6KTm3fHT3x0C1N7eh-lU4FY1pGDCZ8-k9bpw,15797 +git/test/test_docs.py,sha256=XZZnXG7ya-liddMy39Ao6YtsRvYY3pXKMQXAW3gmulI,25340 +git/test/test_exc.py,sha256=0DBYNiYVfPVlFKYRzqsoZUJnf0lQiUDmdrRIIHWeSlE,5123 +git/test/test_fun.py,sha256=a91XgGk-YPwlgJEc-gy2tI_ilSq29XSQEywwc-kDnG0,10456 +git/test/test_git.py,sha256=Jxd8gd0NKCnWPP0q9XbRdaMs6ZBb8xHaNRULYaAuWRk,11164 +git/test/test_index.py,sha256=SoT5SRXnbsITU9zTkbCrUNUhnGqYer3T7HIs3O1C9pU,37348 +git/test/test_reflog.py,sha256=vfI-NQCtnGlJEUtYR0_k7Y1Hc4pZQ5F_T4T49hxSnNU,3505 +git/test/test_refs.py,sha256=2rNm9HdJZTWXx775JHG_R9Pd5X022IQ9C2CbP_9vDoE,23357 +git/test/test_remote.py,sha256=pdrahbBiS513mS4oBLME2-pAsg0aMCYH5OoYaB9fD04,27019 +git/test/test_repo.py,sha256=LkgJY_MC4F_2ZRgyDZ_zIQixuv3dcMuNkZP4Y9ChAlY,40101 +git/test/test_stats.py,sha256=qmF2lL1wW0tEd17E-tkjmpPFVXzjREf7KW5JMCTQ4Zg,971 +git/test/test_submodule.py,sha256=yyMisD-6UH0Im4sAKGgG1XTNMIBTbs5bRAz-3iZivOw,41981 +git/test/test_tree.py,sha256=nR5OAQZLhv7kISoL3RO8ppkXAbKFYz3XlPAxABU1b4o,4046 +git/test/test_util.py,sha256=BWEFonEy5ZasCvNRKWqfqfnEQ3wVHVtMqnWkqfmfqAI,9308 +git/util.py,sha256=uJX0Q4FXyFYBhRQPWj6Cbe5KJCI7pvQzOn0wTSFtuKE,31606 diff --git a/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/WHEEL b/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/WHEEL new file mode 100644 index 0000000..3b5c403 --- /dev/null +++ b/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.6) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/top_level.txt b/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/top_level.txt new file mode 100644 index 0000000..5664e30 --- /dev/null +++ b/venv/lib/python3.7/site-packages/GitPython-3.0.5.dist-info/top_level.txt @@ -0,0 +1 @@ +git diff --git a/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/INSTALLER b/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/LICENSE.txt b/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..91e18a6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/LICENSE.txt @@ -0,0 +1,174 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/METADATA b/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/METADATA new file mode 100644 index 0000000..cfcac0d --- /dev/null +++ b/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/METADATA @@ -0,0 +1,196 @@ +Metadata-Version: 2.1 +Name: PyNaCl +Version: 1.3.0 +Summary: Python binding to the Networking and Cryptography (NaCl) library +Home-page: https://github.com/pyca/pynacl/ +Author: The PyNaCl developers +Author-email: cryptography-dev@python.org +License: Apache License 2.0 +Platform: UNKNOWN +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Provides-Extra: docs +Provides-Extra: tests +Requires-Dist: six +Requires-Dist: cffi (>=1.4.1) +Provides-Extra: docs +Requires-Dist: sphinx (>=1.6.5); extra == 'docs' +Requires-Dist: sphinx-rtd-theme; extra == 'docs' +Provides-Extra: tests +Requires-Dist: pytest (!=3.3.0,>=3.2.1); extra == 'tests' +Requires-Dist: hypothesis (>=3.27.0); extra == 'tests' + +=============================================== +PyNaCl: Python binding to the libsodium library +=============================================== + +.. image:: https://img.shields.io/pypi/v/pynacl.svg + :target: https://pypi.org/project/PyNaCl/ + :alt: Latest Version + +.. image:: https://travis-ci.org/pyca/pynacl.svg?branch=master + :target: https://travis-ci.org/pyca/pynacl + +.. image:: https://codecov.io/github/pyca/pynacl/coverage.svg?branch=master + :target: https://codecov.io/github/pyca/pynacl?branch=master + +PyNaCl is a Python binding to `libsodium`_, which is a fork of the +`Networking and Cryptography library`_. These libraries have a stated goal of +improving usability, security and speed. It supports Python 2.7 and 3.4+ as +well as PyPy 2.6+. + +.. _libsodium: https://github.com/jedisct1/libsodium +.. _Networking and Cryptography library: https://nacl.cr.yp.to/ + +Features +-------- + +* Digital signatures +* Secret-key encryption +* Public-key encryption +* Hashing and message authentication +* Password based key derivation and password hashing + +Installation +============ + +Binary wheel install +-------------------- + +PyNaCl ships as a binary wheel on OS X, Windows and Linux ``manylinux1`` [#many]_ , +so all dependencies are included. Make sure you have an up-to-date pip +and run: + +.. code-block:: console + + $ pip install pynacl + +Linux source build +------------------ + +PyNaCl relies on `libsodium`_, a portable C library. A copy is bundled +with PyNaCl so to install you can run: + +.. code-block:: console + + $ pip install pynacl + +If you'd prefer to use the version of ``libsodium`` provided by your +distribution, you can disable the bundled copy during install by running: + +.. code-block:: console + + $ SODIUM_INSTALL=system pip install pynacl + +.. warning:: Usage of the legacy ``easy_install`` command provided by setuptools + is generally discouraged, and is completely unsupported in PyNaCl's case. + +.. _libsodium: https://github.com/jedisct1/libsodium + +.. [#many] `manylinux1 wheels `_ + are built on a baseline linux environment based on Centos 5.11 + and should work on most x86 and x86_64 glibc based linux environments. + +Changelog +========= + +1.3.0 2018-09-26 +---------------- + +* Added support for Python 3.7. +* Update ``libsodium`` to 1.0.16. +* Run and test all code examples in PyNaCl docs through sphinx's + doctest builder. +* Add low-level bindings for chacha20-poly1305 AEAD constructions. +* Add low-level bindings for the chacha20-poly1305 secretstream constructions. +* Add low-level bindings for ed25519ph pre-hashed signing construction. +* Add low-level bindings for constant-time increment and addition + on fixed-precision big integers represented as little-endian + byte sequences. +* Add low-level bindings for the ISO/IEC 7816-4 compatible padding API. +* Add low-level bindings for libsodium's crypto_kx... key exchange + construction. +* Set hypothesis deadline to None in tests/test_pwhash.py to avoid + incorrect test failures on slower processor architectures. GitHub + issue #370 + +1.2.1 - 2017-12-04 +------------------ + +* Update hypothesis minimum allowed version. +* Infrastructure: add proper configuration for readthedocs builder + runtime environment. + +1.2.0 - 2017-11-01 +------------------ + +* Update ``libsodium`` to 1.0.15. +* Infrastructure: add jenkins support for automatic build of + ``manylinux1`` binary wheels +* Added support for ``SealedBox`` construction. +* Added support for ``argon2i`` and ``argon2id`` password hashing constructs + and restructured high-level password hashing implementation to expose + the same interface for all hashers. +* Added support for 128 bit ``siphashx24`` variant of ``siphash24``. +* Added support for ``from_seed`` APIs for X25519 keypair generation. +* Dropped support for Python 3.3. + +1.1.2 - 2017-03-31 +------------------ + +* reorder link time library search path when using bundled + libsodium + +1.1.1 - 2017-03-15 +------------------ + +* Fixed a circular import bug in ``nacl.utils``. + +1.1.0 - 2017-03-14 +------------------ + +* Dropped support for Python 2.6. +* Added ``shared_key()`` method on ``Box``. +* You can now pass ``None`` to ``nonce`` when encrypting with ``Box`` or + ``SecretBox`` and it will automatically generate a random nonce. +* Added support for ``siphash24``. +* Added support for ``blake2b``. +* Added support for ``scrypt``. +* Update ``libsodium`` to 1.0.11. +* Default to the bundled ``libsodium`` when compiling. +* All raised exceptions are defined mixing-in + ``nacl.exceptions.CryptoError`` + +1.0.1 - 2016-01-24 +------------------ + +* Fix an issue with absolute paths that prevented the creation of wheels. + +1.0 - 2016-01-23 +---------------- + +* PyNaCl has been ported to use the new APIs available in cffi 1.0+. + Due to this change we no longer support PyPy releases older than 2.6. +* Python 3.2 support has been dropped. +* Functions to convert between Ed25519 and Curve25519 keys have been added. + +0.3.0 - 2015-03-04 +------------------ + +* The low-level API (`nacl.c.*`) has been changed to match the + upstream NaCl C/C++ conventions (as well as those of other NaCl bindings). + The order of arguments and return values has changed significantly. To + avoid silent failures, `nacl.c` has been removed, and replaced with + `nacl.bindings` (with the new argument ordering). If you have code which + calls these functions (e.g. `nacl.c.crypto_box_keypair()`), you must review + the new docstrings and update your code/imports to match the new + conventions. + + diff --git a/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/RECORD b/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/RECORD new file mode 100644 index 0000000..19734d4 --- /dev/null +++ b/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/RECORD @@ -0,0 +1,65 @@ +PyNaCl-1.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +PyNaCl-1.3.0.dist-info/LICENSE.txt,sha256=0xdK1j5yHUydzLitQyCEiZLTFDabxGMZcgtYAskVP-k,9694 +PyNaCl-1.3.0.dist-info/METADATA,sha256=pl3lEk7jyuFK7MNHloWA3m28tYU6xHI-S4le43id_x8,6632 +PyNaCl-1.3.0.dist-info/RECORD,, +PyNaCl-1.3.0.dist-info/WHEEL,sha256=C4bGFJmj_qggBmsPGIGQ0FKvkClHeS8w8oo07-tVF_E,108 +PyNaCl-1.3.0.dist-info/top_level.txt,sha256=wfdEOI_G2RIzmzsMyhpqP17HUh6Jcqi99to9aHLEslo,13 +nacl/__init__.py,sha256=PS9BuXZoCwSvrDpB8HXldTHnA6lb4y00IRi3uqdW5_E,1170 +nacl/__pycache__/__init__.cpython-37.pyc,, +nacl/__pycache__/encoding.cpython-37.pyc,, +nacl/__pycache__/exceptions.cpython-37.pyc,, +nacl/__pycache__/hash.cpython-37.pyc,, +nacl/__pycache__/hashlib.cpython-37.pyc,, +nacl/__pycache__/public.cpython-37.pyc,, +nacl/__pycache__/secret.cpython-37.pyc,, +nacl/__pycache__/signing.cpython-37.pyc,, +nacl/__pycache__/utils.cpython-37.pyc,, +nacl/_sodium.abi3.so,sha256=sv2gxhpN8C_iHXhPays7QUezt3axiim7W9zm6ddQzH8,2486075 +nacl/bindings/__init__.py,sha256=dNH1zFjW87qszsld5oy6xMf2S1w2v_qshQwYHp66pz4,14943 +nacl/bindings/__pycache__/__init__.cpython-37.pyc,, +nacl/bindings/__pycache__/crypto_aead.cpython-37.pyc,, +nacl/bindings/__pycache__/crypto_box.cpython-37.pyc,, +nacl/bindings/__pycache__/crypto_generichash.cpython-37.pyc,, +nacl/bindings/__pycache__/crypto_hash.cpython-37.pyc,, +nacl/bindings/__pycache__/crypto_kx.cpython-37.pyc,, +nacl/bindings/__pycache__/crypto_pwhash.cpython-37.pyc,, +nacl/bindings/__pycache__/crypto_scalarmult.cpython-37.pyc,, +nacl/bindings/__pycache__/crypto_secretbox.cpython-37.pyc,, +nacl/bindings/__pycache__/crypto_secretstream.cpython-37.pyc,, +nacl/bindings/__pycache__/crypto_shorthash.cpython-37.pyc,, +nacl/bindings/__pycache__/crypto_sign.cpython-37.pyc,, +nacl/bindings/__pycache__/randombytes.cpython-37.pyc,, +nacl/bindings/__pycache__/sodium_core.cpython-37.pyc,, +nacl/bindings/__pycache__/utils.cpython-37.pyc,, +nacl/bindings/crypto_aead.py,sha256=DE5zdi09GeHZxvmrhHtxVuTqF61y1cs8trTGh_6uP8Q,17335 +nacl/bindings/crypto_box.py,sha256=hbHJetr9id5OvkbJwJoeqRQAhqSIGwWC2aXRAF5oPE4,9708 +nacl/bindings/crypto_generichash.py,sha256=-e4b4DaopLBQHhEjLSjEoumy5fOs4QdTb-hou1S34C4,8010 +nacl/bindings/crypto_hash.py,sha256=7Xp4mpXr4cpn-hAOU66KlYVUCVHP6deT0v_eW4UZZXo,2243 +nacl/bindings/crypto_kx.py,sha256=2Gjxu5c7IKAwW2MOJa9zEn1EgpIVQ0tbZQs33REZb38,6937 +nacl/bindings/crypto_pwhash.py,sha256=lWhEFKmXzFhKnzzxtWDwozs0CseZDkGgTJaI4YQ5rak,16898 +nacl/bindings/crypto_scalarmult.py,sha256=VA2khmlUrnR24KK0CAdDw2dQ0jiYkku9-_NA-f1p21c,1803 +nacl/bindings/crypto_secretbox.py,sha256=luvzB3lwBwXxKm63e9nA2neGtOXeeG8R9SyWEckIqdI,2864 +nacl/bindings/crypto_secretstream.py,sha256=gdKinW10jP3CZ51hanE40s6e39rz8iuajdXTSBSKVcM,10474 +nacl/bindings/crypto_shorthash.py,sha256=eVUE8byB1RjI0AoHib5BdZSSLtSqtdIcHgPCPWf2OZM,2189 +nacl/bindings/crypto_sign.py,sha256=uA0RdHM4vsBDNhph2f7fcuI_9K8vvW-4hNHjajTIVU0,9641 +nacl/bindings/randombytes.py,sha256=eThts6s-9xBXOl3GNzT57fV1dZUhzPjjAmAVIUHfcrc,988 +nacl/bindings/sodium_core.py,sha256=52z0K7y6Ge6IlXcysWDVN7UdYcTOij6v0Cb0OLo8_Qc,1079 +nacl/bindings/utils.py,sha256=jOKsDbsjxN9v_HI8DOib72chyU3byqbynXxbiV909-g,4420 +nacl/encoding.py,sha256=tOiyIQVVpGU6A4Lzr0tMuqomhc_Aj0V_c1t56a-ZtPw,1928 +nacl/exceptions.py,sha256=SG0BNtXnzmppI9in6xMTSizh1ryfgUIvIVMQv_A0bs8,1858 +nacl/hash.py,sha256=4DKlmqpWOZJLhzTPk7_JSGXQ32lJULsS3AzJCGsibus,5928 +nacl/hashlib.py,sha256=gMxOu-lIlKYr3ywSCjsJRBksYgpU2dvXgaAEfQz7PEg,3909 +nacl/public.py,sha256=-nwQof5ov-wSSdvvoXh-FavTtjfpRnYykZkatNKyLd0,13442 +nacl/pwhash/__init__.py,sha256=CN0mP6yteSYp3ui-DyWR1vjULNrXVN_gQ72CmTPoao0,2695 +nacl/pwhash/__pycache__/__init__.cpython-37.pyc,, +nacl/pwhash/__pycache__/_argon2.cpython-37.pyc,, +nacl/pwhash/__pycache__/argon2i.cpython-37.pyc,, +nacl/pwhash/__pycache__/argon2id.cpython-37.pyc,, +nacl/pwhash/__pycache__/scrypt.cpython-37.pyc,, +nacl/pwhash/_argon2.py,sha256=Eu3-juLws3_v1gNy5aeSVPEwuRVFdGOrfeF0wPH9VHA,1878 +nacl/pwhash/argon2i.py,sha256=EpheK0UHJvZYca_EMhhOcX5GXaOr0xCjFDTIgmSCSDo,4598 +nacl/pwhash/argon2id.py,sha256=IqNm5RQNEd1Z9F-bEWT-_Y9noU26QoTR5YdWONg1uuI,4610 +nacl/pwhash/scrypt.py,sha256=F9iUKbzZUMG2ZXuuk70p4KXI_nItue3VA39zmwOESE8,6025 +nacl/secret.py,sha256=jf4WuUjnnXTekZ2elGgQozZl6zGzxGY_0Nw0fwehUlg,5430 +nacl/signing.py,sha256=ZwA1l31ZgOIw_sAjiUPkzEo07uYYi8SE7Ni0G_R8ksQ,7302 +nacl/utils.py,sha256=hhmIriBM7Bwyh3beTrqVqDDucai5gXlSliAMVrxIHPI,1691 diff --git a/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/WHEEL b/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/WHEEL new file mode 100644 index 0000000..c7b5e65 --- /dev/null +++ b/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: false +Tag: cp34-abi3-manylinux1_x86_64 + diff --git a/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/top_level.txt b/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/top_level.txt new file mode 100644 index 0000000..f52507f --- /dev/null +++ b/venv/lib/python3.7/site-packages/PyNaCl-1.3.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_sodium +nacl diff --git a/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/PKG-INFO b/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/PKG-INFO new file mode 100644 index 0000000..f0423cd --- /dev/null +++ b/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/PKG-INFO @@ -0,0 +1,181 @@ +Metadata-Version: 2.1 +Name: SQLAlchemy +Version: 1.3.11 +Summary: Database Abstraction Library +Home-page: http://www.sqlalchemy.org +Author: Mike Bayer +Author-email: mike_mp@zzzcomputing.com +License: MIT +Project-URL: Documentation, https://docs.sqlalchemy.org +Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/ +Description: SQLAlchemy + ========== + + The Python SQL Toolkit and Object Relational Mapper + + Introduction + ------------- + + SQLAlchemy is the Python SQL toolkit and Object Relational Mapper + that gives application developers the full power and + flexibility of SQL. SQLAlchemy provides a full suite + of well known enterprise-level persistence patterns, + designed for efficient and high-performing database + access, adapted into a simple and Pythonic domain + language. + + Major SQLAlchemy features include: + + * An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. + * A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. + * A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. + * A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. + * All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. + * Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + + SQLAlchemy's philosophy: + + * SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. + * An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. + * The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. + * With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer- + initiated decision. + * Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. + * Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. + * Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + + Documentation + ------------- + + Latest documentation is at: + + http://www.sqlalchemy.org/docs/ + + Installation / Requirements + --------------------------- + + Full documentation for installation is at + `Installation `_. + + Getting Help / Development / Bug reporting + ------------------------------------------ + + Please refer to the `SQLAlchemy Community Guide `_. + + Code of Conduct + --------------- + + Above all, SQLAlchemy places great emphasis on polite, thoughtful, and + constructive communication between users and developers. + Please see our current Code of Conduct at + `Code of Conduct `_. + + License + ------- + + SQLAlchemy is distributed under the `MIT license + `_. + + +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Classifier: Operating System :: OS Independent +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Provides-Extra: mssql +Provides-Extra: postgresql +Provides-Extra: postgresql_psycopg2binary +Provides-Extra: postgresql_psycopg2cffi +Provides-Extra: mssql_pyodbc +Provides-Extra: pymysql +Provides-Extra: oracle +Provides-Extra: mssql_pymssql +Provides-Extra: mysql +Provides-Extra: postgresql_pg8000 diff --git a/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/SOURCES.txt b/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/SOURCES.txt new file mode 100644 index 0000000..7e5c116 --- /dev/null +++ b/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/SOURCES.txt @@ -0,0 +1,847 @@ +AUTHORS +CHANGES +LICENSE +MANIFEST.in +README.dialects.rst +README.rst +README.unittests.rst +setup.cfg +setup.py +tox.ini +doc/contents.html +doc/copyright.html +doc/errors.html +doc/genindex.html +doc/glossary.html +doc/index.html +doc/intro.html +doc/notfound.html +doc/search.html +doc/searchindex.js +doc/_images/sqla_arch_small.png +doc/_images/sqla_engine_arch.png +doc/_modules/index.html +doc/_modules/examples/adjacency_list/adjacency_list.html +doc/_modules/examples/association/basic_association.html +doc/_modules/examples/association/dict_of_sets_with_default.html +doc/_modules/examples/association/proxied_association.html +doc/_modules/examples/custom_attributes/active_column_defaults.html +doc/_modules/examples/custom_attributes/custom_management.html +doc/_modules/examples/custom_attributes/listen_for_events.html +doc/_modules/examples/dogpile_caching/advanced.html +doc/_modules/examples/dogpile_caching/caching_query.html +doc/_modules/examples/dogpile_caching/environment.html +doc/_modules/examples/dogpile_caching/fixture_data.html +doc/_modules/examples/dogpile_caching/helloworld.html +doc/_modules/examples/dogpile_caching/local_session_caching.html +doc/_modules/examples/dogpile_caching/model.html +doc/_modules/examples/dogpile_caching/relationship_caching.html +doc/_modules/examples/dynamic_dict/dynamic_dict.html +doc/_modules/examples/elementtree/adjacency_list.html +doc/_modules/examples/elementtree/optimized_al.html +doc/_modules/examples/elementtree/pickle_type.html +doc/_modules/examples/generic_associations/discriminator_on_association.html +doc/_modules/examples/generic_associations/generic_fk.html +doc/_modules/examples/generic_associations/table_per_association.html +doc/_modules/examples/generic_associations/table_per_related.html +doc/_modules/examples/graphs/directed_graph.html +doc/_modules/examples/inheritance/concrete.html +doc/_modules/examples/inheritance/joined.html +doc/_modules/examples/inheritance/single.html +doc/_modules/examples/join_conditions/cast.html +doc/_modules/examples/join_conditions/threeway.html +doc/_modules/examples/large_collection/large_collection.html +doc/_modules/examples/materialized_paths/materialized_paths.html +doc/_modules/examples/nested_sets/nested_sets.html +doc/_modules/examples/performance/__main__.html +doc/_modules/examples/performance/bulk_inserts.html +doc/_modules/examples/performance/bulk_updates.html +doc/_modules/examples/performance/large_resultsets.html +doc/_modules/examples/performance/short_selects.html +doc/_modules/examples/performance/single_inserts.html +doc/_modules/examples/postgis/postgis.html +doc/_modules/examples/sharding/attribute_shard.html +doc/_modules/examples/space_invaders/space_invaders.html +doc/_modules/examples/versioned_history/history_meta.html +doc/_modules/examples/versioned_history/test_versioning.html +doc/_modules/examples/versioned_rows/versioned_map.html +doc/_modules/examples/versioned_rows/versioned_rows.html +doc/_modules/examples/versioned_rows/versioned_rows_w_versionid.html +doc/_modules/examples/versioned_rows/versioned_update_old_row.html +doc/_modules/examples/vertical/dictlike-polymorphic.html +doc/_modules/examples/vertical/dictlike.html +doc/_static/basic.css +doc/_static/changelog.css +doc/_static/detectmobile.js +doc/_static/docs.css +doc/_static/doctools.js +doc/_static/documentation_options.js +doc/_static/dragons.png +doc/_static/file.png +doc/_static/init.js +doc/_static/jquery-3.4.1.js +doc/_static/jquery.js +doc/_static/language_data.js +doc/_static/minus.png +doc/_static/plus.png +doc/_static/pygments.css +doc/_static/searchtools.js +doc/_static/sphinx_paramlinks.css +doc/_static/underscore-1.3.1.js +doc/_static/underscore.js +doc/build/Makefile +doc/build/conf.py +doc/build/contents.rst +doc/build/copyright.rst +doc/build/corrections.py +doc/build/errors.rst +doc/build/glossary.rst +doc/build/index.rst +doc/build/intro.rst +doc/build/requirements.txt +doc/build/sqla_arch_small.png +doc/build/changelog/changelog_01.rst +doc/build/changelog/changelog_02.rst +doc/build/changelog/changelog_03.rst +doc/build/changelog/changelog_04.rst +doc/build/changelog/changelog_05.rst +doc/build/changelog/changelog_06.rst +doc/build/changelog/changelog_07.rst +doc/build/changelog/changelog_08.rst +doc/build/changelog/changelog_09.rst +doc/build/changelog/changelog_10.rst +doc/build/changelog/changelog_11.rst +doc/build/changelog/changelog_12.rst +doc/build/changelog/changelog_13.rst +doc/build/changelog/index.rst +doc/build/changelog/migration_04.rst +doc/build/changelog/migration_05.rst +doc/build/changelog/migration_06.rst +doc/build/changelog/migration_07.rst +doc/build/changelog/migration_08.rst +doc/build/changelog/migration_09.rst +doc/build/changelog/migration_10.rst +doc/build/changelog/migration_11.rst +doc/build/changelog/migration_12.rst +doc/build/changelog/migration_13.rst +doc/build/changelog/unreleased_10/4065.rst +doc/build/changelog/unreleased_10/README.txt +doc/build/changelog/unreleased_11/README.txt +doc/build/changelog/unreleased_12/README.txt +doc/build/changelog/unreleased_13/README.txt +doc/build/core/api_basics.rst +doc/build/core/compiler.rst +doc/build/core/connections.rst +doc/build/core/constraints.rst +doc/build/core/custom_types.rst +doc/build/core/ddl.rst +doc/build/core/defaults.rst +doc/build/core/dml.rst +doc/build/core/engines.rst +doc/build/core/engines_connections.rst +doc/build/core/event.rst +doc/build/core/events.rst +doc/build/core/exceptions.rst +doc/build/core/expression_api.rst +doc/build/core/functions.rst +doc/build/core/index.rst +doc/build/core/inspection.rst +doc/build/core/interfaces.rst +doc/build/core/internals.rst +doc/build/core/metadata.rst +doc/build/core/pooling.rst +doc/build/core/reflection.rst +doc/build/core/schema.rst +doc/build/core/selectable.rst +doc/build/core/serializer.rst +doc/build/core/sqla_engine_arch.png +doc/build/core/sqlelement.rst +doc/build/core/tutorial.rst +doc/build/core/type_api.rst +doc/build/core/type_basics.rst +doc/build/core/types.rst +doc/build/core/visitors.rst +doc/build/dialects/firebird.rst +doc/build/dialects/index.rst +doc/build/dialects/mssql.rst +doc/build/dialects/mysql.rst +doc/build/dialects/oracle.rst +doc/build/dialects/postgresql.rst +doc/build/dialects/sqlite.rst +doc/build/dialects/sybase.rst +doc/build/faq/connections.rst +doc/build/faq/index.rst +doc/build/faq/metadata_schema.rst +doc/build/faq/ormconfiguration.rst +doc/build/faq/performance.rst +doc/build/faq/sessions.rst +doc/build/faq/sqlexpressions.rst +doc/build/orm/backref.rst +doc/build/orm/basic_relationships.rst +doc/build/orm/cascades.rst +doc/build/orm/classical.rst +doc/build/orm/collections.rst +doc/build/orm/composites.rst +doc/build/orm/constructors.rst +doc/build/orm/contextual.rst +doc/build/orm/deprecated.rst +doc/build/orm/events.rst +doc/build/orm/examples.rst +doc/build/orm/exceptions.rst +doc/build/orm/extending.rst +doc/build/orm/index.rst +doc/build/orm/inheritance.rst +doc/build/orm/inheritance_loading.rst +doc/build/orm/internals.rst +doc/build/orm/join_conditions.rst +doc/build/orm/loading.rst +doc/build/orm/loading_columns.rst +doc/build/orm/loading_objects.rst +doc/build/orm/loading_relationships.rst +doc/build/orm/mapped_attributes.rst +doc/build/orm/mapped_sql_expr.rst +doc/build/orm/mapper_config.rst +doc/build/orm/mapping_api.rst +doc/build/orm/mapping_columns.rst +doc/build/orm/mapping_styles.rst +doc/build/orm/nonstandard_mappings.rst +doc/build/orm/persistence_techniques.rst +doc/build/orm/query.rst +doc/build/orm/relationship_api.rst +doc/build/orm/relationship_persistence.rst +doc/build/orm/relationships.rst +doc/build/orm/scalar_mapping.rst +doc/build/orm/self_referential.rst +doc/build/orm/session.rst +doc/build/orm/session_api.rst +doc/build/orm/session_basics.rst +doc/build/orm/session_events.rst +doc/build/orm/session_state_management.rst +doc/build/orm/session_transaction.rst +doc/build/orm/tutorial.rst +doc/build/orm/versioning.rst +doc/build/orm/extensions/associationproxy.rst +doc/build/orm/extensions/automap.rst +doc/build/orm/extensions/baked.rst +doc/build/orm/extensions/horizontal_shard.rst +doc/build/orm/extensions/hybrid.rst +doc/build/orm/extensions/index.rst +doc/build/orm/extensions/indexable.rst +doc/build/orm/extensions/instrumentation.rst +doc/build/orm/extensions/mutable.rst +doc/build/orm/extensions/orderinglist.rst +doc/build/orm/extensions/declarative/api.rst +doc/build/orm/extensions/declarative/basic_use.rst +doc/build/orm/extensions/declarative/index.rst +doc/build/orm/extensions/declarative/inheritance.rst +doc/build/orm/extensions/declarative/mixins.rst +doc/build/orm/extensions/declarative/relationships.rst +doc/build/orm/extensions/declarative/table_config.rst +doc/build/texinputs/Makefile +doc/build/texinputs/sphinx.sty +doc/changelog/changelog_01.html +doc/changelog/changelog_02.html +doc/changelog/changelog_03.html +doc/changelog/changelog_04.html +doc/changelog/changelog_05.html +doc/changelog/changelog_06.html +doc/changelog/changelog_07.html +doc/changelog/changelog_08.html +doc/changelog/changelog_09.html +doc/changelog/changelog_10.html +doc/changelog/changelog_11.html +doc/changelog/changelog_12.html +doc/changelog/changelog_13.html +doc/changelog/index.html +doc/changelog/migration_04.html +doc/changelog/migration_05.html +doc/changelog/migration_06.html +doc/changelog/migration_07.html +doc/changelog/migration_08.html +doc/changelog/migration_09.html +doc/changelog/migration_10.html +doc/changelog/migration_11.html +doc/changelog/migration_12.html +doc/changelog/migration_13.html +doc/core/api_basics.html +doc/core/compiler.html +doc/core/connections.html +doc/core/constraints.html +doc/core/custom_types.html +doc/core/ddl.html +doc/core/defaults.html +doc/core/dml.html +doc/core/engines.html +doc/core/engines_connections.html +doc/core/event.html +doc/core/events.html +doc/core/exceptions.html +doc/core/expression_api.html +doc/core/functions.html +doc/core/index.html +doc/core/inspection.html +doc/core/interfaces.html +doc/core/internals.html +doc/core/metadata.html +doc/core/pooling.html +doc/core/reflection.html +doc/core/schema.html +doc/core/selectable.html +doc/core/serializer.html +doc/core/sqlelement.html +doc/core/tutorial.html +doc/core/type_api.html +doc/core/type_basics.html +doc/core/types.html +doc/core/visitors.html +doc/dialects/firebird.html +doc/dialects/index.html +doc/dialects/mssql.html +doc/dialects/mysql.html +doc/dialects/oracle.html +doc/dialects/postgresql.html +doc/dialects/sqlite.html +doc/dialects/sybase.html +doc/faq/connections.html +doc/faq/index.html +doc/faq/metadata_schema.html +doc/faq/ormconfiguration.html +doc/faq/performance.html +doc/faq/sessions.html +doc/faq/sqlexpressions.html +doc/orm/backref.html +doc/orm/basic_relationships.html +doc/orm/cascades.html +doc/orm/classical.html +doc/orm/collections.html +doc/orm/composites.html +doc/orm/constructors.html +doc/orm/contextual.html +doc/orm/deprecated.html +doc/orm/events.html +doc/orm/examples.html +doc/orm/exceptions.html +doc/orm/extending.html +doc/orm/index.html +doc/orm/inheritance.html +doc/orm/inheritance_loading.html +doc/orm/internals.html +doc/orm/join_conditions.html +doc/orm/loading.html +doc/orm/loading_columns.html +doc/orm/loading_objects.html +doc/orm/loading_relationships.html +doc/orm/mapped_attributes.html +doc/orm/mapped_sql_expr.html +doc/orm/mapper_config.html +doc/orm/mapping_api.html +doc/orm/mapping_columns.html +doc/orm/mapping_styles.html +doc/orm/nonstandard_mappings.html +doc/orm/persistence_techniques.html +doc/orm/query.html +doc/orm/relationship_api.html +doc/orm/relationship_persistence.html +doc/orm/relationships.html +doc/orm/scalar_mapping.html +doc/orm/self_referential.html +doc/orm/session.html +doc/orm/session_api.html +doc/orm/session_basics.html +doc/orm/session_events.html +doc/orm/session_state_management.html +doc/orm/session_transaction.html +doc/orm/tutorial.html +doc/orm/versioning.html +doc/orm/extensions/associationproxy.html +doc/orm/extensions/automap.html +doc/orm/extensions/baked.html +doc/orm/extensions/horizontal_shard.html +doc/orm/extensions/hybrid.html +doc/orm/extensions/index.html +doc/orm/extensions/indexable.html +doc/orm/extensions/instrumentation.html +doc/orm/extensions/mutable.html +doc/orm/extensions/orderinglist.html +doc/orm/extensions/declarative/api.html +doc/orm/extensions/declarative/basic_use.html +doc/orm/extensions/declarative/index.html +doc/orm/extensions/declarative/inheritance.html +doc/orm/extensions/declarative/mixins.html +doc/orm/extensions/declarative/relationships.html +doc/orm/extensions/declarative/table_config.html +examples/__init__.py +examples/adjacency_list/__init__.py +examples/adjacency_list/adjacency_list.py +examples/association/__init__.py +examples/association/basic_association.py +examples/association/dict_of_sets_with_default.py +examples/association/proxied_association.py +examples/custom_attributes/__init__.py +examples/custom_attributes/active_column_defaults.py +examples/custom_attributes/custom_management.py +examples/custom_attributes/listen_for_events.py +examples/dogpile_caching/__init__.py +examples/dogpile_caching/advanced.py +examples/dogpile_caching/caching_query.py +examples/dogpile_caching/environment.py +examples/dogpile_caching/fixture_data.py +examples/dogpile_caching/helloworld.py +examples/dogpile_caching/local_session_caching.py +examples/dogpile_caching/model.py +examples/dogpile_caching/relationship_caching.py +examples/dynamic_dict/__init__.py +examples/dynamic_dict/dynamic_dict.py +examples/elementtree/__init__.py +examples/elementtree/adjacency_list.py +examples/elementtree/optimized_al.py +examples/elementtree/pickle_type.py +examples/elementtree/test.xml +examples/elementtree/test2.xml +examples/elementtree/test3.xml +examples/generic_associations/__init__.py +examples/generic_associations/discriminator_on_association.py +examples/generic_associations/generic_fk.py +examples/generic_associations/table_per_association.py +examples/generic_associations/table_per_related.py +examples/graphs/__init__.py +examples/graphs/directed_graph.py +examples/inheritance/__init__.py +examples/inheritance/concrete.py +examples/inheritance/joined.py +examples/inheritance/single.py +examples/join_conditions/__init__.py +examples/join_conditions/cast.py +examples/join_conditions/threeway.py +examples/large_collection/__init__.py +examples/large_collection/large_collection.py +examples/materialized_paths/__init__.py +examples/materialized_paths/materialized_paths.py +examples/nested_sets/__init__.py +examples/nested_sets/nested_sets.py +examples/performance/__init__.py +examples/performance/__main__.py +examples/performance/bulk_inserts.py +examples/performance/bulk_updates.py +examples/performance/large_resultsets.py +examples/performance/short_selects.py +examples/performance/single_inserts.py +examples/postgis/__init__.py +examples/postgis/postgis.py +examples/sharding/__init__.py +examples/sharding/attribute_shard.py +examples/space_invaders/__init__.py +examples/space_invaders/space_invaders.py +examples/versioned_history/__init__.py +examples/versioned_history/history_meta.py +examples/versioned_history/test_versioning.py +examples/versioned_rows/__init__.py +examples/versioned_rows/versioned_map.py +examples/versioned_rows/versioned_rows.py +examples/versioned_rows/versioned_rows_w_versionid.py +examples/versioned_rows/versioned_update_old_row.py +examples/vertical/__init__.py +examples/vertical/dictlike-polymorphic.py +examples/vertical/dictlike.py +lib/SQLAlchemy.egg-info/PKG-INFO +lib/SQLAlchemy.egg-info/SOURCES.txt +lib/SQLAlchemy.egg-info/dependency_links.txt +lib/SQLAlchemy.egg-info/requires.txt +lib/SQLAlchemy.egg-info/top_level.txt +lib/sqlalchemy/__init__.py +lib/sqlalchemy/events.py +lib/sqlalchemy/exc.py +lib/sqlalchemy/inspection.py +lib/sqlalchemy/interfaces.py +lib/sqlalchemy/log.py +lib/sqlalchemy/processors.py +lib/sqlalchemy/schema.py +lib/sqlalchemy/types.py +lib/sqlalchemy/cextension/processors.c +lib/sqlalchemy/cextension/resultproxy.c +lib/sqlalchemy/cextension/utils.c +lib/sqlalchemy/connectors/__init__.py +lib/sqlalchemy/connectors/mxodbc.py +lib/sqlalchemy/connectors/pyodbc.py +lib/sqlalchemy/connectors/zxJDBC.py +lib/sqlalchemy/databases/__init__.py +lib/sqlalchemy/dialects/__init__.py +lib/sqlalchemy/dialects/type_migration_guidelines.txt +lib/sqlalchemy/dialects/firebird/__init__.py +lib/sqlalchemy/dialects/firebird/base.py +lib/sqlalchemy/dialects/firebird/fdb.py +lib/sqlalchemy/dialects/firebird/kinterbasdb.py +lib/sqlalchemy/dialects/mssql/__init__.py +lib/sqlalchemy/dialects/mssql/adodbapi.py +lib/sqlalchemy/dialects/mssql/base.py +lib/sqlalchemy/dialects/mssql/information_schema.py +lib/sqlalchemy/dialects/mssql/mxodbc.py +lib/sqlalchemy/dialects/mssql/pymssql.py +lib/sqlalchemy/dialects/mssql/pyodbc.py +lib/sqlalchemy/dialects/mssql/zxjdbc.py +lib/sqlalchemy/dialects/mysql/__init__.py +lib/sqlalchemy/dialects/mysql/base.py +lib/sqlalchemy/dialects/mysql/cymysql.py +lib/sqlalchemy/dialects/mysql/dml.py +lib/sqlalchemy/dialects/mysql/enumerated.py +lib/sqlalchemy/dialects/mysql/gaerdbms.py +lib/sqlalchemy/dialects/mysql/json.py +lib/sqlalchemy/dialects/mysql/mysqlconnector.py +lib/sqlalchemy/dialects/mysql/mysqldb.py +lib/sqlalchemy/dialects/mysql/oursql.py +lib/sqlalchemy/dialects/mysql/pymysql.py +lib/sqlalchemy/dialects/mysql/pyodbc.py +lib/sqlalchemy/dialects/mysql/reflection.py +lib/sqlalchemy/dialects/mysql/types.py +lib/sqlalchemy/dialects/mysql/zxjdbc.py +lib/sqlalchemy/dialects/oracle/__init__.py +lib/sqlalchemy/dialects/oracle/base.py +lib/sqlalchemy/dialects/oracle/cx_oracle.py +lib/sqlalchemy/dialects/oracle/zxjdbc.py +lib/sqlalchemy/dialects/postgresql/__init__.py +lib/sqlalchemy/dialects/postgresql/array.py +lib/sqlalchemy/dialects/postgresql/base.py +lib/sqlalchemy/dialects/postgresql/dml.py +lib/sqlalchemy/dialects/postgresql/ext.py +lib/sqlalchemy/dialects/postgresql/hstore.py +lib/sqlalchemy/dialects/postgresql/json.py +lib/sqlalchemy/dialects/postgresql/pg8000.py +lib/sqlalchemy/dialects/postgresql/psycopg2.py +lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py +lib/sqlalchemy/dialects/postgresql/pygresql.py +lib/sqlalchemy/dialects/postgresql/pypostgresql.py +lib/sqlalchemy/dialects/postgresql/ranges.py +lib/sqlalchemy/dialects/postgresql/zxjdbc.py +lib/sqlalchemy/dialects/sqlite/__init__.py +lib/sqlalchemy/dialects/sqlite/base.py +lib/sqlalchemy/dialects/sqlite/json.py +lib/sqlalchemy/dialects/sqlite/pysqlcipher.py +lib/sqlalchemy/dialects/sqlite/pysqlite.py +lib/sqlalchemy/dialects/sybase/__init__.py +lib/sqlalchemy/dialects/sybase/base.py +lib/sqlalchemy/dialects/sybase/mxodbc.py +lib/sqlalchemy/dialects/sybase/pyodbc.py +lib/sqlalchemy/dialects/sybase/pysybase.py +lib/sqlalchemy/engine/__init__.py +lib/sqlalchemy/engine/base.py +lib/sqlalchemy/engine/default.py +lib/sqlalchemy/engine/interfaces.py +lib/sqlalchemy/engine/reflection.py +lib/sqlalchemy/engine/result.py +lib/sqlalchemy/engine/strategies.py +lib/sqlalchemy/engine/threadlocal.py +lib/sqlalchemy/engine/url.py +lib/sqlalchemy/engine/util.py +lib/sqlalchemy/event/__init__.py +lib/sqlalchemy/event/api.py +lib/sqlalchemy/event/attr.py +lib/sqlalchemy/event/base.py +lib/sqlalchemy/event/legacy.py +lib/sqlalchemy/event/registry.py +lib/sqlalchemy/ext/__init__.py +lib/sqlalchemy/ext/associationproxy.py +lib/sqlalchemy/ext/automap.py +lib/sqlalchemy/ext/baked.py +lib/sqlalchemy/ext/compiler.py +lib/sqlalchemy/ext/horizontal_shard.py +lib/sqlalchemy/ext/hybrid.py +lib/sqlalchemy/ext/indexable.py +lib/sqlalchemy/ext/instrumentation.py +lib/sqlalchemy/ext/mutable.py +lib/sqlalchemy/ext/orderinglist.py +lib/sqlalchemy/ext/serializer.py +lib/sqlalchemy/ext/declarative/__init__.py +lib/sqlalchemy/ext/declarative/api.py +lib/sqlalchemy/ext/declarative/base.py +lib/sqlalchemy/ext/declarative/clsregistry.py +lib/sqlalchemy/orm/__init__.py +lib/sqlalchemy/orm/attributes.py +lib/sqlalchemy/orm/base.py +lib/sqlalchemy/orm/collections.py +lib/sqlalchemy/orm/dependency.py +lib/sqlalchemy/orm/deprecated_interfaces.py +lib/sqlalchemy/orm/descriptor_props.py +lib/sqlalchemy/orm/dynamic.py +lib/sqlalchemy/orm/evaluator.py +lib/sqlalchemy/orm/events.py +lib/sqlalchemy/orm/exc.py +lib/sqlalchemy/orm/identity.py +lib/sqlalchemy/orm/instrumentation.py +lib/sqlalchemy/orm/interfaces.py +lib/sqlalchemy/orm/loading.py +lib/sqlalchemy/orm/mapper.py +lib/sqlalchemy/orm/path_registry.py +lib/sqlalchemy/orm/persistence.py +lib/sqlalchemy/orm/properties.py +lib/sqlalchemy/orm/query.py +lib/sqlalchemy/orm/relationships.py +lib/sqlalchemy/orm/scoping.py +lib/sqlalchemy/orm/session.py +lib/sqlalchemy/orm/state.py +lib/sqlalchemy/orm/strategies.py +lib/sqlalchemy/orm/strategy_options.py +lib/sqlalchemy/orm/sync.py +lib/sqlalchemy/orm/unitofwork.py +lib/sqlalchemy/orm/util.py +lib/sqlalchemy/pool/__init__.py +lib/sqlalchemy/pool/base.py +lib/sqlalchemy/pool/dbapi_proxy.py +lib/sqlalchemy/pool/impl.py +lib/sqlalchemy/sql/__init__.py +lib/sqlalchemy/sql/annotation.py +lib/sqlalchemy/sql/base.py +lib/sqlalchemy/sql/compiler.py +lib/sqlalchemy/sql/crud.py +lib/sqlalchemy/sql/ddl.py +lib/sqlalchemy/sql/default_comparator.py +lib/sqlalchemy/sql/dml.py +lib/sqlalchemy/sql/elements.py +lib/sqlalchemy/sql/expression.py +lib/sqlalchemy/sql/functions.py +lib/sqlalchemy/sql/naming.py +lib/sqlalchemy/sql/operators.py +lib/sqlalchemy/sql/schema.py +lib/sqlalchemy/sql/selectable.py +lib/sqlalchemy/sql/sqltypes.py +lib/sqlalchemy/sql/type_api.py +lib/sqlalchemy/sql/util.py +lib/sqlalchemy/sql/visitors.py +lib/sqlalchemy/testing/__init__.py +lib/sqlalchemy/testing/assertions.py +lib/sqlalchemy/testing/assertsql.py +lib/sqlalchemy/testing/config.py +lib/sqlalchemy/testing/engines.py +lib/sqlalchemy/testing/entities.py +lib/sqlalchemy/testing/exclusions.py +lib/sqlalchemy/testing/fixtures.py +lib/sqlalchemy/testing/mock.py +lib/sqlalchemy/testing/pickleable.py +lib/sqlalchemy/testing/profiling.py +lib/sqlalchemy/testing/provision.py +lib/sqlalchemy/testing/replay_fixture.py +lib/sqlalchemy/testing/requirements.py +lib/sqlalchemy/testing/schema.py +lib/sqlalchemy/testing/util.py +lib/sqlalchemy/testing/warnings.py +lib/sqlalchemy/testing/plugin/__init__.py +lib/sqlalchemy/testing/plugin/bootstrap.py +lib/sqlalchemy/testing/plugin/plugin_base.py +lib/sqlalchemy/testing/plugin/pytestplugin.py +lib/sqlalchemy/testing/suite/__init__.py +lib/sqlalchemy/testing/suite/test_cte.py +lib/sqlalchemy/testing/suite/test_ddl.py +lib/sqlalchemy/testing/suite/test_dialect.py +lib/sqlalchemy/testing/suite/test_insert.py +lib/sqlalchemy/testing/suite/test_reflection.py +lib/sqlalchemy/testing/suite/test_results.py +lib/sqlalchemy/testing/suite/test_select.py +lib/sqlalchemy/testing/suite/test_sequence.py +lib/sqlalchemy/testing/suite/test_types.py +lib/sqlalchemy/testing/suite/test_update_delete.py +lib/sqlalchemy/util/__init__.py +lib/sqlalchemy/util/_collections.py +lib/sqlalchemy/util/compat.py +lib/sqlalchemy/util/deprecations.py +lib/sqlalchemy/util/langhelpers.py +lib/sqlalchemy/util/queue.py +lib/sqlalchemy/util/topological.py +test/__init__.py +test/binary_data_one.dat +test/binary_data_two.dat +test/conftest.py +test/requirements.py +test/aaa_profiling/__init__.py +test/aaa_profiling/test_compiler.py +test/aaa_profiling/test_memusage.py +test/aaa_profiling/test_misc.py +test/aaa_profiling/test_orm.py +test/aaa_profiling/test_pool.py +test/aaa_profiling/test_resultset.py +test/aaa_profiling/test_zoomark.py +test/aaa_profiling/test_zoomark_orm.py +test/base/__init__.py +test/base/test_dependency.py +test/base/test_events.py +test/base/test_except.py +test/base/test_inspect.py +test/base/test_tutorials.py +test/base/test_utils.py +test/dialect/__init__.py +test/dialect/test_all.py +test/dialect/test_firebird.py +test/dialect/test_mxodbc.py +test/dialect/test_pyodbc.py +test/dialect/test_sqlite.py +test/dialect/test_suite.py +test/dialect/test_sybase.py +test/dialect/mssql/__init__.py +test/dialect/mssql/test_compiler.py +test/dialect/mssql/test_engine.py +test/dialect/mssql/test_query.py +test/dialect/mssql/test_reflection.py +test/dialect/mssql/test_types.py +test/dialect/mysql/__init__.py +test/dialect/mysql/test_compiler.py +test/dialect/mysql/test_dialect.py +test/dialect/mysql/test_for_update.py +test/dialect/mysql/test_on_duplicate.py +test/dialect/mysql/test_query.py +test/dialect/mysql/test_reflection.py +test/dialect/mysql/test_types.py +test/dialect/oracle/__init__.py +test/dialect/oracle/test_compiler.py +test/dialect/oracle/test_dialect.py +test/dialect/oracle/test_reflection.py +test/dialect/oracle/test_types.py +test/dialect/postgresql/__init__.py +test/dialect/postgresql/test_compiler.py +test/dialect/postgresql/test_dialect.py +test/dialect/postgresql/test_on_conflict.py +test/dialect/postgresql/test_query.py +test/dialect/postgresql/test_reflection.py +test/dialect/postgresql/test_types.py +test/engine/__init__.py +test/engine/test_bind.py +test/engine/test_ddlevents.py +test/engine/test_deprecations.py +test/engine/test_execute.py +test/engine/test_logging.py +test/engine/test_parseconnect.py +test/engine/test_pool.py +test/engine/test_processors.py +test/engine/test_reconnect.py +test/engine/test_reflection.py +test/engine/test_transaction.py +test/ext/__init__.py +test/ext/test_associationproxy.py +test/ext/test_automap.py +test/ext/test_baked.py +test/ext/test_compiler.py +test/ext/test_deprecations.py +test/ext/test_extendedattr.py +test/ext/test_horizontal_shard.py +test/ext/test_hybrid.py +test/ext/test_indexable.py +test/ext/test_mutable.py +test/ext/test_orderinglist.py +test/ext/test_serializer.py +test/ext/declarative/__init__.py +test/ext/declarative/test_basic.py +test/ext/declarative/test_clsregistry.py +test/ext/declarative/test_concurrency.py +test/ext/declarative/test_inheritance.py +test/ext/declarative/test_mixin.py +test/ext/declarative/test_reflection.py +test/orm/__init__.py +test/orm/_fixtures.py +test/orm/test_ac_relationships.py +test/orm/test_association.py +test/orm/test_assorted_eager.py +test/orm/test_attributes.py +test/orm/test_backref_mutations.py +test/orm/test_bind.py +test/orm/test_bulk.py +test/orm/test_bundle.py +test/orm/test_cascade.py +test/orm/test_collection.py +test/orm/test_compile.py +test/orm/test_composites.py +test/orm/test_cycles.py +test/orm/test_default_strategies.py +test/orm/test_defaults.py +test/orm/test_deferred.py +test/orm/test_deprecations.py +test/orm/test_descriptor.py +test/orm/test_dynamic.py +test/orm/test_eager_relations.py +test/orm/test_evaluator.py +test/orm/test_events.py +test/orm/test_expire.py +test/orm/test_froms.py +test/orm/test_generative.py +test/orm/test_hasparent.py +test/orm/test_immediate_load.py +test/orm/test_inspect.py +test/orm/test_instrumentation.py +test/orm/test_joins.py +test/orm/test_lazy_relations.py +test/orm/test_load_on_fks.py +test/orm/test_loading.py +test/orm/test_lockmode.py +test/orm/test_manytomany.py +test/orm/test_mapper.py +test/orm/test_merge.py +test/orm/test_naturalpks.py +test/orm/test_of_type.py +test/orm/test_onetoone.py +test/orm/test_options.py +test/orm/test_pickled.py +test/orm/test_query.py +test/orm/test_rel_fn.py +test/orm/test_relationships.py +test/orm/test_scoping.py +test/orm/test_selectable.py +test/orm/test_selectin_relations.py +test/orm/test_session.py +test/orm/test_subquery_relations.py +test/orm/test_sync.py +test/orm/test_transaction.py +test/orm/test_unitofwork.py +test/orm/test_unitofworkv2.py +test/orm/test_update_delete.py +test/orm/test_utils.py +test/orm/test_validators.py +test/orm/test_versioning.py +test/orm/inheritance/__init__.py +test/orm/inheritance/_poly_fixtures.py +test/orm/inheritance/test_abc_inheritance.py +test/orm/inheritance/test_abc_polymorphic.py +test/orm/inheritance/test_assorted_poly.py +test/orm/inheritance/test_basic.py +test/orm/inheritance/test_concrete.py +test/orm/inheritance/test_magazine.py +test/orm/inheritance/test_manytomany.py +test/orm/inheritance/test_poly_linked_list.py +test/orm/inheritance/test_poly_loading.py +test/orm/inheritance/test_poly_persistence.py +test/orm/inheritance/test_polymorphic_rel.py +test/orm/inheritance/test_productspec.py +test/orm/inheritance/test_relationship.py +test/orm/inheritance/test_selects.py +test/orm/inheritance/test_single.py +test/orm/inheritance/test_with_poly.py +test/perf/invalidate_stresstest.py +test/perf/orm2010.py +test/sql/__init__.py +test/sql/test_case_statement.py +test/sql/test_compiler.py +test/sql/test_computed.py +test/sql/test_constraints.py +test/sql/test_cte.py +test/sql/test_ddlemit.py +test/sql/test_defaults.py +test/sql/test_delete.py +test/sql/test_deprecations.py +test/sql/test_functions.py +test/sql/test_generative.py +test/sql/test_insert.py +test/sql/test_insert_exec.py +test/sql/test_inspect.py +test/sql/test_join_rewriting.py +test/sql/test_labels.py +test/sql/test_lateral.py +test/sql/test_metadata.py +test/sql/test_operators.py +test/sql/test_query.py +test/sql/test_quote.py +test/sql/test_resultset.py +test/sql/test_returning.py +test/sql/test_rowcount.py +test/sql/test_selectable.py +test/sql/test_tablesample.py +test/sql/test_text.py +test/sql/test_type_expressions.py +test/sql/test_types.py +test/sql/test_unicode.py +test/sql/test_update.py +test/sql/test_utils.py \ No newline at end of file diff --git a/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/dependency_links.txt b/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/installed-files.txt b/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/installed-files.txt new file mode 100644 index 0000000..3d69885 --- /dev/null +++ b/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/installed-files.txt @@ -0,0 +1,394 @@ +../sqlalchemy/__init__.py +../sqlalchemy/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/__pycache__/events.cpython-37.pyc +../sqlalchemy/__pycache__/exc.cpython-37.pyc +../sqlalchemy/__pycache__/inspection.cpython-37.pyc +../sqlalchemy/__pycache__/interfaces.cpython-37.pyc +../sqlalchemy/__pycache__/log.cpython-37.pyc +../sqlalchemy/__pycache__/processors.cpython-37.pyc +../sqlalchemy/__pycache__/schema.cpython-37.pyc +../sqlalchemy/__pycache__/types.cpython-37.pyc +../sqlalchemy/connectors/__init__.py +../sqlalchemy/connectors/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/connectors/__pycache__/mxodbc.cpython-37.pyc +../sqlalchemy/connectors/__pycache__/pyodbc.cpython-37.pyc +../sqlalchemy/connectors/__pycache__/zxJDBC.cpython-37.pyc +../sqlalchemy/connectors/mxodbc.py +../sqlalchemy/connectors/pyodbc.py +../sqlalchemy/connectors/zxJDBC.py +../sqlalchemy/cprocessors.cpython-37m-x86_64-linux-gnu.so +../sqlalchemy/cresultproxy.cpython-37m-x86_64-linux-gnu.so +../sqlalchemy/cutils.cpython-37m-x86_64-linux-gnu.so +../sqlalchemy/databases/__init__.py +../sqlalchemy/databases/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/dialects/__init__.py +../sqlalchemy/dialects/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/dialects/firebird/__init__.py +../sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/dialects/firebird/__pycache__/base.cpython-37.pyc +../sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-37.pyc +../sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-37.pyc +../sqlalchemy/dialects/firebird/base.py +../sqlalchemy/dialects/firebird/fdb.py +../sqlalchemy/dialects/firebird/kinterbasdb.py +../sqlalchemy/dialects/mssql/__init__.py +../sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-37.pyc +../sqlalchemy/dialects/mssql/__pycache__/base.cpython-37.pyc +../sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-37.pyc +../sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-37.pyc +../sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-37.pyc +../sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-37.pyc +../sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-37.pyc +../sqlalchemy/dialects/mssql/adodbapi.py +../sqlalchemy/dialects/mssql/base.py +../sqlalchemy/dialects/mssql/information_schema.py +../sqlalchemy/dialects/mssql/mxodbc.py +../sqlalchemy/dialects/mssql/pymssql.py +../sqlalchemy/dialects/mssql/pyodbc.py +../sqlalchemy/dialects/mssql/zxjdbc.py +../sqlalchemy/dialects/mysql/__init__.py +../sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/dialects/mysql/__pycache__/base.cpython-37.pyc +../sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-37.pyc +../sqlalchemy/dialects/mysql/__pycache__/dml.cpython-37.pyc +../sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-37.pyc +../sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-37.pyc +../sqlalchemy/dialects/mysql/__pycache__/json.cpython-37.pyc +../sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-37.pyc +../sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-37.pyc +../sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-37.pyc +../sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-37.pyc +../sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-37.pyc +../sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-37.pyc +../sqlalchemy/dialects/mysql/__pycache__/types.cpython-37.pyc +../sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-37.pyc +../sqlalchemy/dialects/mysql/base.py +../sqlalchemy/dialects/mysql/cymysql.py +../sqlalchemy/dialects/mysql/dml.py +../sqlalchemy/dialects/mysql/enumerated.py +../sqlalchemy/dialects/mysql/gaerdbms.py +../sqlalchemy/dialects/mysql/json.py +../sqlalchemy/dialects/mysql/mysqlconnector.py +../sqlalchemy/dialects/mysql/mysqldb.py +../sqlalchemy/dialects/mysql/oursql.py +../sqlalchemy/dialects/mysql/pymysql.py +../sqlalchemy/dialects/mysql/pyodbc.py +../sqlalchemy/dialects/mysql/reflection.py +../sqlalchemy/dialects/mysql/types.py +../sqlalchemy/dialects/mysql/zxjdbc.py +../sqlalchemy/dialects/oracle/__init__.py +../sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/dialects/oracle/__pycache__/base.cpython-37.pyc +../sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-37.pyc +../sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-37.pyc +../sqlalchemy/dialects/oracle/base.py +../sqlalchemy/dialects/oracle/cx_oracle.py +../sqlalchemy/dialects/oracle/zxjdbc.py +../sqlalchemy/dialects/postgresql/__init__.py +../sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/dialects/postgresql/__pycache__/array.cpython-37.pyc +../sqlalchemy/dialects/postgresql/__pycache__/base.cpython-37.pyc +../sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-37.pyc +../sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-37.pyc +../sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-37.pyc +../sqlalchemy/dialects/postgresql/__pycache__/json.cpython-37.pyc +../sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-37.pyc +../sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-37.pyc +../sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-37.pyc +../sqlalchemy/dialects/postgresql/__pycache__/pygresql.cpython-37.pyc +../sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-37.pyc +../sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-37.pyc +../sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-37.pyc +../sqlalchemy/dialects/postgresql/array.py +../sqlalchemy/dialects/postgresql/base.py +../sqlalchemy/dialects/postgresql/dml.py +../sqlalchemy/dialects/postgresql/ext.py +../sqlalchemy/dialects/postgresql/hstore.py +../sqlalchemy/dialects/postgresql/json.py +../sqlalchemy/dialects/postgresql/pg8000.py +../sqlalchemy/dialects/postgresql/psycopg2.py +../sqlalchemy/dialects/postgresql/psycopg2cffi.py +../sqlalchemy/dialects/postgresql/pygresql.py +../sqlalchemy/dialects/postgresql/pypostgresql.py +../sqlalchemy/dialects/postgresql/ranges.py +../sqlalchemy/dialects/postgresql/zxjdbc.py +../sqlalchemy/dialects/sqlite/__init__.py +../sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/dialects/sqlite/__pycache__/base.cpython-37.pyc +../sqlalchemy/dialects/sqlite/__pycache__/json.cpython-37.pyc +../sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-37.pyc +../sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-37.pyc +../sqlalchemy/dialects/sqlite/base.py +../sqlalchemy/dialects/sqlite/json.py +../sqlalchemy/dialects/sqlite/pysqlcipher.py +../sqlalchemy/dialects/sqlite/pysqlite.py +../sqlalchemy/dialects/sybase/__init__.py +../sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/dialects/sybase/__pycache__/base.cpython-37.pyc +../sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-37.pyc +../sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-37.pyc +../sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-37.pyc +../sqlalchemy/dialects/sybase/base.py +../sqlalchemy/dialects/sybase/mxodbc.py +../sqlalchemy/dialects/sybase/pyodbc.py +../sqlalchemy/dialects/sybase/pysybase.py +../sqlalchemy/engine/__init__.py +../sqlalchemy/engine/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/engine/__pycache__/base.cpython-37.pyc +../sqlalchemy/engine/__pycache__/default.cpython-37.pyc +../sqlalchemy/engine/__pycache__/interfaces.cpython-37.pyc +../sqlalchemy/engine/__pycache__/reflection.cpython-37.pyc +../sqlalchemy/engine/__pycache__/result.cpython-37.pyc +../sqlalchemy/engine/__pycache__/strategies.cpython-37.pyc +../sqlalchemy/engine/__pycache__/threadlocal.cpython-37.pyc +../sqlalchemy/engine/__pycache__/url.cpython-37.pyc +../sqlalchemy/engine/__pycache__/util.cpython-37.pyc +../sqlalchemy/engine/base.py +../sqlalchemy/engine/default.py +../sqlalchemy/engine/interfaces.py +../sqlalchemy/engine/reflection.py +../sqlalchemy/engine/result.py +../sqlalchemy/engine/strategies.py +../sqlalchemy/engine/threadlocal.py +../sqlalchemy/engine/url.py +../sqlalchemy/engine/util.py +../sqlalchemy/event/__init__.py +../sqlalchemy/event/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/event/__pycache__/api.cpython-37.pyc +../sqlalchemy/event/__pycache__/attr.cpython-37.pyc +../sqlalchemy/event/__pycache__/base.cpython-37.pyc +../sqlalchemy/event/__pycache__/legacy.cpython-37.pyc +../sqlalchemy/event/__pycache__/registry.cpython-37.pyc +../sqlalchemy/event/api.py +../sqlalchemy/event/attr.py +../sqlalchemy/event/base.py +../sqlalchemy/event/legacy.py +../sqlalchemy/event/registry.py +../sqlalchemy/events.py +../sqlalchemy/exc.py +../sqlalchemy/ext/__init__.py +../sqlalchemy/ext/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/ext/__pycache__/associationproxy.cpython-37.pyc +../sqlalchemy/ext/__pycache__/automap.cpython-37.pyc +../sqlalchemy/ext/__pycache__/baked.cpython-37.pyc +../sqlalchemy/ext/__pycache__/compiler.cpython-37.pyc +../sqlalchemy/ext/__pycache__/horizontal_shard.cpython-37.pyc +../sqlalchemy/ext/__pycache__/hybrid.cpython-37.pyc +../sqlalchemy/ext/__pycache__/indexable.cpython-37.pyc +../sqlalchemy/ext/__pycache__/instrumentation.cpython-37.pyc +../sqlalchemy/ext/__pycache__/mutable.cpython-37.pyc +../sqlalchemy/ext/__pycache__/orderinglist.cpython-37.pyc +../sqlalchemy/ext/__pycache__/serializer.cpython-37.pyc +../sqlalchemy/ext/associationproxy.py +../sqlalchemy/ext/automap.py +../sqlalchemy/ext/baked.py +../sqlalchemy/ext/compiler.py +../sqlalchemy/ext/declarative/__init__.py +../sqlalchemy/ext/declarative/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/ext/declarative/__pycache__/api.cpython-37.pyc +../sqlalchemy/ext/declarative/__pycache__/base.cpython-37.pyc +../sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-37.pyc +../sqlalchemy/ext/declarative/api.py +../sqlalchemy/ext/declarative/base.py +../sqlalchemy/ext/declarative/clsregistry.py +../sqlalchemy/ext/horizontal_shard.py +../sqlalchemy/ext/hybrid.py +../sqlalchemy/ext/indexable.py +../sqlalchemy/ext/instrumentation.py +../sqlalchemy/ext/mutable.py +../sqlalchemy/ext/orderinglist.py +../sqlalchemy/ext/serializer.py +../sqlalchemy/inspection.py +../sqlalchemy/interfaces.py +../sqlalchemy/log.py +../sqlalchemy/orm/__init__.py +../sqlalchemy/orm/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/orm/__pycache__/attributes.cpython-37.pyc +../sqlalchemy/orm/__pycache__/base.cpython-37.pyc +../sqlalchemy/orm/__pycache__/collections.cpython-37.pyc +../sqlalchemy/orm/__pycache__/dependency.cpython-37.pyc +../sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-37.pyc +../sqlalchemy/orm/__pycache__/descriptor_props.cpython-37.pyc +../sqlalchemy/orm/__pycache__/dynamic.cpython-37.pyc +../sqlalchemy/orm/__pycache__/evaluator.cpython-37.pyc +../sqlalchemy/orm/__pycache__/events.cpython-37.pyc +../sqlalchemy/orm/__pycache__/exc.cpython-37.pyc +../sqlalchemy/orm/__pycache__/identity.cpython-37.pyc +../sqlalchemy/orm/__pycache__/instrumentation.cpython-37.pyc +../sqlalchemy/orm/__pycache__/interfaces.cpython-37.pyc +../sqlalchemy/orm/__pycache__/loading.cpython-37.pyc +../sqlalchemy/orm/__pycache__/mapper.cpython-37.pyc +../sqlalchemy/orm/__pycache__/path_registry.cpython-37.pyc +../sqlalchemy/orm/__pycache__/persistence.cpython-37.pyc +../sqlalchemy/orm/__pycache__/properties.cpython-37.pyc +../sqlalchemy/orm/__pycache__/query.cpython-37.pyc +../sqlalchemy/orm/__pycache__/relationships.cpython-37.pyc +../sqlalchemy/orm/__pycache__/scoping.cpython-37.pyc +../sqlalchemy/orm/__pycache__/session.cpython-37.pyc +../sqlalchemy/orm/__pycache__/state.cpython-37.pyc +../sqlalchemy/orm/__pycache__/strategies.cpython-37.pyc +../sqlalchemy/orm/__pycache__/strategy_options.cpython-37.pyc +../sqlalchemy/orm/__pycache__/sync.cpython-37.pyc +../sqlalchemy/orm/__pycache__/unitofwork.cpython-37.pyc +../sqlalchemy/orm/__pycache__/util.cpython-37.pyc +../sqlalchemy/orm/attributes.py +../sqlalchemy/orm/base.py +../sqlalchemy/orm/collections.py +../sqlalchemy/orm/dependency.py +../sqlalchemy/orm/deprecated_interfaces.py +../sqlalchemy/orm/descriptor_props.py +../sqlalchemy/orm/dynamic.py +../sqlalchemy/orm/evaluator.py +../sqlalchemy/orm/events.py +../sqlalchemy/orm/exc.py +../sqlalchemy/orm/identity.py +../sqlalchemy/orm/instrumentation.py +../sqlalchemy/orm/interfaces.py +../sqlalchemy/orm/loading.py +../sqlalchemy/orm/mapper.py +../sqlalchemy/orm/path_registry.py +../sqlalchemy/orm/persistence.py +../sqlalchemy/orm/properties.py +../sqlalchemy/orm/query.py +../sqlalchemy/orm/relationships.py +../sqlalchemy/orm/scoping.py +../sqlalchemy/orm/session.py +../sqlalchemy/orm/state.py +../sqlalchemy/orm/strategies.py +../sqlalchemy/orm/strategy_options.py +../sqlalchemy/orm/sync.py +../sqlalchemy/orm/unitofwork.py +../sqlalchemy/orm/util.py +../sqlalchemy/pool/__init__.py +../sqlalchemy/pool/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/pool/__pycache__/base.cpython-37.pyc +../sqlalchemy/pool/__pycache__/dbapi_proxy.cpython-37.pyc +../sqlalchemy/pool/__pycache__/impl.cpython-37.pyc +../sqlalchemy/pool/base.py +../sqlalchemy/pool/dbapi_proxy.py +../sqlalchemy/pool/impl.py +../sqlalchemy/processors.py +../sqlalchemy/schema.py +../sqlalchemy/sql/__init__.py +../sqlalchemy/sql/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/sql/__pycache__/annotation.cpython-37.pyc +../sqlalchemy/sql/__pycache__/base.cpython-37.pyc +../sqlalchemy/sql/__pycache__/compiler.cpython-37.pyc +../sqlalchemy/sql/__pycache__/crud.cpython-37.pyc +../sqlalchemy/sql/__pycache__/ddl.cpython-37.pyc +../sqlalchemy/sql/__pycache__/default_comparator.cpython-37.pyc +../sqlalchemy/sql/__pycache__/dml.cpython-37.pyc +../sqlalchemy/sql/__pycache__/elements.cpython-37.pyc +../sqlalchemy/sql/__pycache__/expression.cpython-37.pyc +../sqlalchemy/sql/__pycache__/functions.cpython-37.pyc +../sqlalchemy/sql/__pycache__/naming.cpython-37.pyc +../sqlalchemy/sql/__pycache__/operators.cpython-37.pyc +../sqlalchemy/sql/__pycache__/schema.cpython-37.pyc +../sqlalchemy/sql/__pycache__/selectable.cpython-37.pyc +../sqlalchemy/sql/__pycache__/sqltypes.cpython-37.pyc +../sqlalchemy/sql/__pycache__/type_api.cpython-37.pyc +../sqlalchemy/sql/__pycache__/util.cpython-37.pyc +../sqlalchemy/sql/__pycache__/visitors.cpython-37.pyc +../sqlalchemy/sql/annotation.py +../sqlalchemy/sql/base.py +../sqlalchemy/sql/compiler.py +../sqlalchemy/sql/crud.py +../sqlalchemy/sql/ddl.py +../sqlalchemy/sql/default_comparator.py +../sqlalchemy/sql/dml.py +../sqlalchemy/sql/elements.py +../sqlalchemy/sql/expression.py +../sqlalchemy/sql/functions.py +../sqlalchemy/sql/naming.py +../sqlalchemy/sql/operators.py +../sqlalchemy/sql/schema.py +../sqlalchemy/sql/selectable.py +../sqlalchemy/sql/sqltypes.py +../sqlalchemy/sql/type_api.py +../sqlalchemy/sql/util.py +../sqlalchemy/sql/visitors.py +../sqlalchemy/testing/__init__.py +../sqlalchemy/testing/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/testing/__pycache__/assertions.cpython-37.pyc +../sqlalchemy/testing/__pycache__/assertsql.cpython-37.pyc +../sqlalchemy/testing/__pycache__/config.cpython-37.pyc +../sqlalchemy/testing/__pycache__/engines.cpython-37.pyc +../sqlalchemy/testing/__pycache__/entities.cpython-37.pyc +../sqlalchemy/testing/__pycache__/exclusions.cpython-37.pyc +../sqlalchemy/testing/__pycache__/fixtures.cpython-37.pyc +../sqlalchemy/testing/__pycache__/mock.cpython-37.pyc +../sqlalchemy/testing/__pycache__/pickleable.cpython-37.pyc +../sqlalchemy/testing/__pycache__/profiling.cpython-37.pyc +../sqlalchemy/testing/__pycache__/provision.cpython-37.pyc +../sqlalchemy/testing/__pycache__/replay_fixture.cpython-37.pyc +../sqlalchemy/testing/__pycache__/requirements.cpython-37.pyc +../sqlalchemy/testing/__pycache__/schema.cpython-37.pyc +../sqlalchemy/testing/__pycache__/util.cpython-37.pyc +../sqlalchemy/testing/__pycache__/warnings.cpython-37.pyc +../sqlalchemy/testing/assertions.py +../sqlalchemy/testing/assertsql.py +../sqlalchemy/testing/config.py +../sqlalchemy/testing/engines.py +../sqlalchemy/testing/entities.py +../sqlalchemy/testing/exclusions.py +../sqlalchemy/testing/fixtures.py +../sqlalchemy/testing/mock.py +../sqlalchemy/testing/pickleable.py +../sqlalchemy/testing/plugin/__init__.py +../sqlalchemy/testing/plugin/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-37.pyc +../sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-37.pyc +../sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-37.pyc +../sqlalchemy/testing/plugin/bootstrap.py +../sqlalchemy/testing/plugin/plugin_base.py +../sqlalchemy/testing/plugin/pytestplugin.py +../sqlalchemy/testing/profiling.py +../sqlalchemy/testing/provision.py +../sqlalchemy/testing/replay_fixture.py +../sqlalchemy/testing/requirements.py +../sqlalchemy/testing/schema.py +../sqlalchemy/testing/suite/__init__.py +../sqlalchemy/testing/suite/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/testing/suite/__pycache__/test_cte.cpython-37.pyc +../sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-37.pyc +../sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-37.pyc +../sqlalchemy/testing/suite/__pycache__/test_insert.cpython-37.pyc +../sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-37.pyc +../sqlalchemy/testing/suite/__pycache__/test_results.cpython-37.pyc +../sqlalchemy/testing/suite/__pycache__/test_select.cpython-37.pyc +../sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-37.pyc +../sqlalchemy/testing/suite/__pycache__/test_types.cpython-37.pyc +../sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-37.pyc +../sqlalchemy/testing/suite/test_cte.py +../sqlalchemy/testing/suite/test_ddl.py +../sqlalchemy/testing/suite/test_dialect.py +../sqlalchemy/testing/suite/test_insert.py +../sqlalchemy/testing/suite/test_reflection.py +../sqlalchemy/testing/suite/test_results.py +../sqlalchemy/testing/suite/test_select.py +../sqlalchemy/testing/suite/test_sequence.py +../sqlalchemy/testing/suite/test_types.py +../sqlalchemy/testing/suite/test_update_delete.py +../sqlalchemy/testing/util.py +../sqlalchemy/testing/warnings.py +../sqlalchemy/types.py +../sqlalchemy/util/__init__.py +../sqlalchemy/util/__pycache__/__init__.cpython-37.pyc +../sqlalchemy/util/__pycache__/_collections.cpython-37.pyc +../sqlalchemy/util/__pycache__/compat.cpython-37.pyc +../sqlalchemy/util/__pycache__/deprecations.cpython-37.pyc +../sqlalchemy/util/__pycache__/langhelpers.cpython-37.pyc +../sqlalchemy/util/__pycache__/queue.cpython-37.pyc +../sqlalchemy/util/__pycache__/topological.cpython-37.pyc +../sqlalchemy/util/_collections.py +../sqlalchemy/util/compat.py +../sqlalchemy/util/deprecations.py +../sqlalchemy/util/langhelpers.py +../sqlalchemy/util/queue.py +../sqlalchemy/util/topological.py +PKG-INFO +SOURCES.txt +dependency_links.txt +requires.txt +top_level.txt diff --git a/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/requires.txt b/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/requires.txt new file mode 100644 index 0000000..7d73cfc --- /dev/null +++ b/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/requires.txt @@ -0,0 +1,30 @@ + +[mssql] +pyodbc + +[mssql_pymssql] +pymssql + +[mssql_pyodbc] +pyodbc + +[mysql] +mysqlclient + +[oracle] +cx_oracle + +[postgresql] +psycopg2 + +[postgresql_pg8000] +pg8000 + +[postgresql_psycopg2binary] +psycopg2-binary + +[postgresql_psycopg2cffi] +psycopg2cffi + +[pymysql] +pymysql diff --git a/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/top_level.txt b/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/top_level.txt new file mode 100644 index 0000000..39fb2be --- /dev/null +++ b/venv/lib/python3.7/site-packages/SQLAlchemy-1.3.11-py3.7.egg-info/top_level.txt @@ -0,0 +1 @@ +sqlalchemy diff --git a/venv/lib/python3.7/site-packages/_cffi_backend.cpython-37m-x86_64-linux-gnu.so b/venv/lib/python3.7/site-packages/_cffi_backend.cpython-37m-x86_64-linux-gnu.so new file mode 100755 index 0000000..7b041a6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/_cffi_backend.cpython-37m-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/INSTALLER b/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/LICENSE.txt b/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/LICENSE.txt new file mode 100644 index 0000000..7082a2d --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/LICENSE.txt @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2013-2019 Nikolay Kim and Andrew Svetlov + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/METADATA b/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/METADATA new file mode 100644 index 0000000..0476359 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/METADATA @@ -0,0 +1,433 @@ +Metadata-Version: 2.1 +Name: aiohttp +Version: 3.5.4 +Summary: Async http client/server framework (asyncio) +Home-page: https://github.com/aio-libs/aiohttp +Author: Nikolay Kim +Author-email: fafhrd91@gmail.com +Maintainer: Nikolay Kim , Andrew Svetlov +Maintainer-email: aio-libs@googlegroups.com +License: Apache 2 +Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby +Project-URL: CI: AppVeyor, https://ci.appveyor.com/project/aio-libs/aiohttp +Project-URL: CI: Circle, https://circleci.com/gh/aio-libs/aiohttp +Project-URL: CI: Shippable, https://app.shippable.com/github/aio-libs/aiohttp +Project-URL: CI: Travis, https://travis-ci.com/aio-libs/aiohttp +Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp +Project-URL: Docs: RTD, https://docs.aiohttp.org +Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues +Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp +Platform: UNKNOWN +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Framework :: AsyncIO +Requires-Python: >=3.5.3 +Requires-Dist: attrs (>=17.3.0) +Requires-Dist: chardet (<4.0,>=2.0) +Requires-Dist: multidict (<5.0,>=4.0) +Requires-Dist: async-timeout (<4.0,>=3.0) +Requires-Dist: yarl (<2.0,>=1.0) +Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7" +Requires-Dist: typing-extensions (>=3.6.5) ; python_version < "3.7" +Provides-Extra: speedups +Requires-Dist: aiodns ; extra == 'speedups' +Requires-Dist: brotlipy ; extra == 'speedups' +Requires-Dist: cchardet ; extra == 'speedups' + +================================== +Async http client/server framework +================================== + +.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png + :height: 64px + :width: 64px + :alt: aiohttp logo + +| + +.. image:: https://travis-ci.com/aio-libs/aiohttp.svg?branch=master + :target: https://travis-ci.com/aio-libs/aiohttp + :align: right + :alt: Travis status for master branch + +.. image:: https://ci.appveyor.com/api/projects/status/tnddy9k6pphl8w7k/branch/master?svg=true + :target: https://ci.appveyor.com/project/aio-libs/aiohttp + :align: right + :alt: AppVeyor status for master branch + +.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg + :target: https://codecov.io/gh/aio-libs/aiohttp + :alt: codecov.io status for master branch + +.. image:: https://badge.fury.io/py/aiohttp.svg + :target: https://pypi.org/project/aiohttp + :alt: Latest PyPI package version + +.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest + :target: https://docs.aiohttp.org/ + :alt: Latest Read The Docs + +.. image:: https://badges.gitter.im/Join%20Chat.svg + :target: https://gitter.im/aio-libs/Lobby + :alt: Chat on Gitter + +Key Features +============ + +- Supports both client and server side of HTTP protocol. +- Supports both client and server Web-Sockets out-of-the-box and avoids + Callback Hell. +- Provides Web-server with middlewares and pluggable routing. + + +Getting started +=============== + +Client +------ + +To get something from the web: + +.. code-block:: python + + import aiohttp + import asyncio + + async def fetch(session, url): + async with session.get(url) as response: + return await response.text() + + async def main(): + async with aiohttp.ClientSession() as session: + html = await fetch(session, 'http://python.org') + print(html) + + if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(main()) + + +Server +------ + +An example using a simple server: + +.. code-block:: python + + # examples/server_simple.py + from aiohttp import web + + async def handle(request): + name = request.match_info.get('name', "Anonymous") + text = "Hello, " + name + return web.Response(text=text) + + async def wshandle(request): + ws = web.WebSocketResponse() + await ws.prepare(request) + + async for msg in ws: + if msg.type == web.WSMsgType.text: + await ws.send_str("Hello, {}".format(msg.data)) + elif msg.type == web.WSMsgType.binary: + await ws.send_bytes(msg.data) + elif msg.type == web.WSMsgType.close: + break + + return ws + + + app = web.Application() + app.add_routes([web.get('/', handle), + web.get('/echo', wshandle), + web.get('/{name}', handle)]) + + web.run_app(app) + + +Documentation +============= + +https://aiohttp.readthedocs.io/ + + +Demos +===== + +https://github.com/aio-libs/aiohttp-demos + + +External links +============== + +* `Third party libraries + `_ +* `Built with aiohttp + `_ +* `Powered by aiohttp + `_ + +Feel free to make a Pull Request for adding your link to these pages! + + +Communication channels +====================== + +*aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs + +Feel free to post your questions and ideas here. + +*gitter chat* https://gitter.im/aio-libs/Lobby + +We support `Stack Overflow +`_. +Please add *aiohttp* tag to your question there. + +Requirements +============ + +- Python >= 3.5.3 +- async-timeout_ +- attrs_ +- chardet_ +- multidict_ +- yarl_ + +Optionally you may install the cChardet_ and aiodns_ libraries (highly +recommended for sake of speed). + +.. _chardet: https://pypi.python.org/pypi/chardet +.. _aiodns: https://pypi.python.org/pypi/aiodns +.. _attrs: https://github.com/python-attrs/attrs +.. _multidict: https://pypi.python.org/pypi/multidict +.. _yarl: https://pypi.python.org/pypi/yarl +.. _async-timeout: https://pypi.python.org/pypi/async_timeout +.. _cChardet: https://pypi.python.org/pypi/cchardet + +License +======= + +``aiohttp`` is offered under the Apache 2 license. + + +Keepsafe +======== + +The aiohttp community would like to thank Keepsafe +(https://www.getkeepsafe.com) for its support in the early days of +the project. + + +Source code +=========== + +The latest developer version is available in a GitHub repository: +https://github.com/aio-libs/aiohttp + +Benchmarks +========== + +If you are interested in efficiency, the AsyncIO community maintains a +list of benchmarks on the official wiki: +https://github.com/python/asyncio/wiki/Benchmarks + +========= +Changelog +========= + +.. + You should *NOT* be adding new change log entries to this file, this + file is managed by towncrier. You *may* edit previous change logs to + fix problems like typo corrections or such. + To add a new change log entry, please see + https://pip.pypa.io/en/latest/development/#adding-a-news-entry + we named the news folder "changes". + + WARNING: Don't drop the next directive! + +.. towncrier release notes start + +3.5.4 (2019-01-12) +================== + +Bugfixes +-------- + +- Fix stream ``.read()`` / ``.readany()`` / ``.iter_any()`` which used to return a + partial content only in case of compressed content + `#3525 `_ + + +3.5.3 (2019-01-10) +================== + +Bugfixes +-------- + +- Fix type stubs for ``aiohttp.web.run_app(access_log=True)`` and fix edge case of ``access_log=True`` and the event loop being in debug mode. + `#3504 `_ +- Fix ``aiohttp.ClientTimeout`` type annotations to accept ``None`` for fields + `#3511 `_ +- Send custom per-request cookies even if session jar is empty + `#3515 `_ +- Restore Linux binary wheels publishing on PyPI + +---- + + +3.5.2 (2019-01-08) +================== + +Features +-------- + +- ``FileResponse`` from ``web_fileresponse.py`` uses a ``ThreadPoolExecutor`` to work with files asynchronously. + I/O based payloads from ``payload.py`` uses a ``ThreadPoolExecutor`` to work with I/O objects asynchronously. + `#3313 `_ +- Internal Server Errors in plain text if the browser does not support HTML. + `#3483 `_ + + +Bugfixes +-------- + +- Preserve MultipartWriter parts headers on write. + + Refactor the way how ``Payload.headers`` are handled. Payload instances now always + have headers and Content-Type defined. + + Fix Payload Content-Disposition header reset after initial creation. + `#3035 `_ +- Log suppressed exceptions in ``GunicornWebWorker``. + `#3464 `_ +- Remove wildcard imports. + `#3468 `_ +- Use the same task for app initialization and web server handling in gunicorn workers. + It allows to use Python3.7 context vars smoothly. + `#3471 `_ +- Fix handling of chunked+gzipped response when first chunk does not give uncompressed data + `#3477 `_ +- Replace ``collections.MutableMapping`` with ``collections.abc.MutableMapping`` to avoid a deprecation warning. + `#3480 `_ +- ``Payload.size`` type annotation changed from `Optional[float]` to `Optional[int]`. + `#3484 `_ +- Ignore done tasks when cancels pending activities on ``web.run_app`` finalization. + `#3497 `_ + + +Improved Documentation +---------------------- + +- Add documentation for ``aiohttp.web.HTTPException``. + `#3490 `_ + + +Misc +---- + +- `#3487 `_ + + +---- + + +3.5.1 (2018-12-24) +==================== + +- Fix a regression about ``ClientSession._requote_redirect_url`` modification in debug + mode. + +3.5.0 (2018-12-22) +==================== + +Features +-------- + +- The library type annotations are checked in strict mode now. +- Add support for setting cookies for individual request (`#2387 `_) +- Application.add_domain implementation (`#2809 `_) +- The default ``app`` in the request returned by ``test_utils.make_mocked_request`` + can now have objects assigned to it and retrieved using the ``[]`` operator. (`#3174 `_) +- Make ``request.url`` accessible when transport is closed. (`#3177 `_) +- Add ``zlib_executor_size`` argument to ``Response`` constructor to allow compression to run in a background executor to avoid blocking the main thread and potentially triggering health check failures. (`#3205 `_) +- Enable users to set `ClientTimeout` in `aiohttp.request` (`#3213 `_) +- Don't raise a warning if ``NETRC`` environment variable is not set and ``~/.netrc`` file + doesn't exist. (`#3267 `_) +- Add default logging handler to web.run_app + + If the `Application.debug` flag is set and the default logger `aiohttp.access` is used, access logs will now be output using a `stderr` `StreamHandler` if no handlers are attached. Furthermore, if the default logger has no log level set, the log level will be set to `DEBUG`. (`#3324 `_) +- Add method argument to ``session.ws_connect()``. + + Sometimes server API requires a different HTTP method for WebSocket connection establishment. + + For example, ``Docker exec`` needs POST. (`#3378 `_) +- Create a task per request handling. (`#3406 `_) + + +Bugfixes +-------- + +- Enable passing `access_log_class` via `handler_args` (`#3158 `_) +- Return empty bytes with end-of-chunk marker in empty stream reader. (`#3186 `_) +- Accept ``CIMultiDictProxy`` instances for ``headers`` argument in ``web.Response`` + constructor. (`#3207 `_) +- Don't uppercase HTTP method in parser (`#3233 `_) +- Make method match regexp RFC-7230 compliant (`#3235 `_) +- Add ``app.pre_frozen`` state to properly handle startup signals in sub-applications. (`#3237 `_) +- Enhanced parsing and validation of helpers.BasicAuth.decode. (`#3239 `_) +- Change imports from collections module in preparation for 3.8. (`#3258 `_) +- Ensure Host header is added first to ClientRequest to better replicate browser (`#3265 `_) +- Fix forward compatibility with Python 3.8: importing ABCs directly from the collections module will not be supported anymore. (`#3273 `_) +- Keep the query string by `normalize_path_middleware`. (`#3278 `_) +- Fix missing parameter ``raise_for_status`` for aiohttp.request() (`#3290 `_) +- Bracket IPv6 addresses in the HOST header (`#3304 `_) +- Fix default message for server ping and pong frames. (`#3308 `_) +- Fix tests/test_connector.py typo and tests/autobahn/server.py duplicate loop def. (`#3337 `_) +- Fix false-negative indicator end_of_HTTP_chunk in StreamReader.readchunk function (`#3361 `_) +- Release HTTP response before raising status exception (`#3364 `_) +- Fix task cancellation when ``sendfile()`` syscall is used by static file handling. (`#3383 `_) +- Fix stack trace for ``asyncio.TimeoutError`` which was not logged, when it is caught + in the handler. (`#3414 `_) + + +Improved Documentation +---------------------- + +- Improve documentation of ``Application.make_handler`` parameters. (`#3152 `_) +- Fix BaseRequest.raw_headers doc. (`#3215 `_) +- Fix typo in TypeError exception reason in ``web.Application._handle`` (`#3229 `_) +- Make server access log format placeholder %b documentation reflect + behavior and docstring. (`#3307 `_) + + +Deprecations and Removals +------------------------- + +- Deprecate modification of ``session.requote_redirect_url`` (`#2278 `_) +- Deprecate ``stream.unread_data()`` (`#3260 `_) +- Deprecated use of boolean in ``resp.enable_compression()`` (`#3318 `_) +- Encourage creation of aiohttp public objects inside a coroutine (`#3331 `_) +- Drop dead ``Connection.detach()`` and ``Connection.writer``. Both methods were broken + for more than 2 years. (`#3358 `_) +- Deprecate ``app.loop``, ``request.loop``, ``client.loop`` and ``connector.loop`` properties. (`#3374 `_) +- Deprecate explicit debug argument. Use asyncio debug mode instead. (`#3381 `_) +- Deprecate body parameter in HTTPException (and derived classes) constructor. (`#3385 `_) +- Deprecate bare connector close, use ``async with connector:`` and ``await connector.close()`` instead. (`#3417 `_) +- Deprecate obsolete ``read_timeout`` and ``conn_timeout`` in ``ClientSession`` constructor. (`#3438 `_) + + +Misc +---- + +- #3341, #3351 + diff --git a/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/RECORD b/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/RECORD new file mode 100644 index 0000000..79dc7a3 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/RECORD @@ -0,0 +1,124 @@ +aiohttp-3.5.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +aiohttp-3.5.4.dist-info/LICENSE.txt,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332 +aiohttp-3.5.4.dist-info/METADATA,sha256=vpBjLbRZ9Tbi4DEj6aDUlGbj-HJPHa8Wihktdh4Z9U0,16950 +aiohttp-3.5.4.dist-info/RECORD,, +aiohttp-3.5.4.dist-info/WHEEL,sha256=0XRAUr92PGDyTl_2nDyFZS1y0mC0Tb6FBjKSU09tHPA,109 +aiohttp-3.5.4.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8 +aiohttp/__init__.py,sha256=GdkiBfeUQa38wScKQYLH4mRL-20MqARHC0ljV6Naf8w,4948 +aiohttp/__pycache__/__init__.cpython-37.pyc,, +aiohttp/__pycache__/abc.cpython-37.pyc,, +aiohttp/__pycache__/base_protocol.cpython-37.pyc,, +aiohttp/__pycache__/client.cpython-37.pyc,, +aiohttp/__pycache__/client_exceptions.cpython-37.pyc,, +aiohttp/__pycache__/client_proto.cpython-37.pyc,, +aiohttp/__pycache__/client_reqrep.cpython-37.pyc,, +aiohttp/__pycache__/client_ws.cpython-37.pyc,, +aiohttp/__pycache__/connector.cpython-37.pyc,, +aiohttp/__pycache__/cookiejar.cpython-37.pyc,, +aiohttp/__pycache__/formdata.cpython-37.pyc,, +aiohttp/__pycache__/frozenlist.cpython-37.pyc,, +aiohttp/__pycache__/hdrs.cpython-37.pyc,, +aiohttp/__pycache__/helpers.cpython-37.pyc,, +aiohttp/__pycache__/http.cpython-37.pyc,, +aiohttp/__pycache__/http_exceptions.cpython-37.pyc,, +aiohttp/__pycache__/http_parser.cpython-37.pyc,, +aiohttp/__pycache__/http_websocket.cpython-37.pyc,, +aiohttp/__pycache__/http_writer.cpython-37.pyc,, +aiohttp/__pycache__/locks.cpython-37.pyc,, +aiohttp/__pycache__/log.cpython-37.pyc,, +aiohttp/__pycache__/multipart.cpython-37.pyc,, +aiohttp/__pycache__/payload.cpython-37.pyc,, +aiohttp/__pycache__/payload_streamer.cpython-37.pyc,, +aiohttp/__pycache__/pytest_plugin.cpython-37.pyc,, +aiohttp/__pycache__/resolver.cpython-37.pyc,, +aiohttp/__pycache__/signals.cpython-37.pyc,, +aiohttp/__pycache__/streams.cpython-37.pyc,, +aiohttp/__pycache__/tcp_helpers.cpython-37.pyc,, +aiohttp/__pycache__/test_utils.cpython-37.pyc,, +aiohttp/__pycache__/tracing.cpython-37.pyc,, +aiohttp/__pycache__/typedefs.cpython-37.pyc,, +aiohttp/__pycache__/web.cpython-37.pyc,, +aiohttp/__pycache__/web_app.cpython-37.pyc,, +aiohttp/__pycache__/web_exceptions.cpython-37.pyc,, +aiohttp/__pycache__/web_fileresponse.cpython-37.pyc,, +aiohttp/__pycache__/web_log.cpython-37.pyc,, +aiohttp/__pycache__/web_middlewares.cpython-37.pyc,, +aiohttp/__pycache__/web_protocol.cpython-37.pyc,, +aiohttp/__pycache__/web_request.cpython-37.pyc,, +aiohttp/__pycache__/web_response.cpython-37.pyc,, +aiohttp/__pycache__/web_routedef.cpython-37.pyc,, +aiohttp/__pycache__/web_runner.cpython-37.pyc,, +aiohttp/__pycache__/web_server.cpython-37.pyc,, +aiohttp/__pycache__/web_urldispatcher.cpython-37.pyc,, +aiohttp/__pycache__/web_ws.cpython-37.pyc,, +aiohttp/__pycache__/worker.cpython-37.pyc,, +aiohttp/_cparser.pxd,sha256=tgw30SL6kQSczzGMlMhx2Cuhf_O8P8ZPimVCb85xILc,3959 +aiohttp/_find_header.c,sha256=lWc5w3UZiVd3ni60DuFDSSPzsaQUhAQcERDGBOqeML8,189932 +aiohttp/_find_header.h,sha256=5oOgQ85nF6V7rpU8NhyE5vyGkTo1Cgf1GIYrtxSTzQI,170 +aiohttp/_find_header.pxd,sha256=0GfwFCPN2zxEKTO1_MA5sYq2UfzsG8kcV3aTqvwlz3g,68 +aiohttp/_frozenlist.c,sha256=y98wE-b4rxP9GCcT-WCx5Pt_WU3RBrs-w-QeKZ7XE34,287338 +aiohttp/_frozenlist.cpython-37m-x86_64-linux-gnu.so,sha256=N43hZAJdoc0SQZnsQ72IIh3ODDDEZR8dcHArr6Wb0G4,310748 +aiohttp/_frozenlist.pyx,sha256=BD8LcERExsWdo4qzuuQ84f-L_pHVzkUQO0lEAOe3Fog,2605 +aiohttp/_headers.pxi,sha256=XgJL5FQRwL4uZQfegYShPclsErUlvG_xuMHs7dp_2-o,2027 +aiohttp/_helpers.c,sha256=QhGjJ2v5NuDgkX23Bmcs7HpowupE1tz1im8PuZdISbI,207048 +aiohttp/_helpers.cpython-37m-x86_64-linux-gnu.so,sha256=lobzEWAei1Lkxx1SFiFVHXm4eNxgIDJi03yEUfzZ3RM,213821 +aiohttp/_helpers.pyi,sha256=mJRb5YdG8DxYbPfVddGRGmi93qqaJM30L1qFpgSKQuA,204 +aiohttp/_helpers.pyx,sha256=XeLbNft5X_4ifi8QB8i6TyrRuayijMSO3IDHeSA89uM,1049 +aiohttp/_http_parser.c,sha256=WIyFvlOUcAmTRpBPeFl57lbj9hkQfuMWNTPDzOku-cc,994097 +aiohttp/_http_parser.cpython-37m-x86_64-linux-gnu.so,sha256=eRzfo0GowqCsVVxG_7s2qOmR4XiMUE1jKtqS5JMpqq8,1558531 +aiohttp/_http_parser.pyx,sha256=qAeXR88_UXU2ontoLIq7hg7M2KHjY982iJeH_u7aXXs,28672 +aiohttp/_http_writer.c,sha256=YyFijS8A3erHfOa8hb95PHfnZJnxEnLdHHkVME0o8bE,205820 +aiohttp/_http_writer.cpython-37m-x86_64-linux-gnu.so,sha256=CqASRWdGlkowylQgw8FMYyGT5BL48qc_zhRAxRcFhzo,204055 +aiohttp/_http_writer.pyx,sha256=vnanyXytNqyi6oqxELg5ARJ8LhtB8mDGxNfz6DdvH6E,4193 +aiohttp/_websocket.c,sha256=uMC3H3T9yqN7fsOcYC0gLNfcrk3QkV199WLaHoshW0U,135134 +aiohttp/_websocket.cpython-37m-x86_64-linux-gnu.so,sha256=LdzRUpovLfMOVKb6IezQvaZFTMvT7X8awC2bz3MZKt8,104661 +aiohttp/_websocket.pyx,sha256=tJfygcVwKF_Xb6Pg48a6t50YO2xY4Rg0Wj7LcJJMi-U,1559 +aiohttp/abc.py,sha256=lsf2bz-9KtqLhtI-e-tmgp3ynziMypYyEHvwOnFg7lQ,5392 +aiohttp/base_protocol.py,sha256=kv6AbDw8ZQOyB9Hm2bOaPZyXcAbUUwFOO2lbAmArpfw,2644 +aiohttp/client.py,sha256=hXh0WgGqhl80gVDlkuzgrHVaCxxkg_A9_mrhOkdSb-s,42549 +aiohttp/client_exceptions.py,sha256=3e7SWwDXDhUO5npOhwgdL6K8tXMTdVyv58rjQboY4Yo,7547 +aiohttp/client_proto.py,sha256=l1bLzhVx8hHOuS8nBPH6wNU15S-P6z_OMtpx_tPRi54,8001 +aiohttp/client_reqrep.py,sha256=LUhjuCGyJs55LcH_Sr3AMcAhS1XlcCPM73rc8C3_GV0,35793 +aiohttp/client_ws.py,sha256=AQlj-peBA0mGyra1t38sWlfV28MEM0SAATRXp1TsF9I,10694 +aiohttp/connector.py,sha256=AORmJFz8WLuAjca5O582FKCC74f6emuXdZfhWzvPpx4,39556 +aiohttp/cookiejar.py,sha256=ghkcBC9JhqKFz3InpJ4l2_stXLVv6qORX1303vepQUI,11268 +aiohttp/formdata.py,sha256=VZCo9kmDb50lQUcRMDfAH3d5lnRxBq_AX38ge8vFI00,5807 +aiohttp/frozenlist.py,sha256=I4zR368wRHXp402Z3f5lhd5i48b6A66MhHncW1JGkb4,1781 +aiohttp/frozenlist.pyi,sha256=fkQEKqDR6nOjXDx2cXvfCcetoMQQdzjXs2uoA7uVaP4,1431 +aiohttp/hdrs.py,sha256=iaXnHXOR_Dx0rvVkvmIZhc-7Egf2ByuSDI9tqskS0kQ,3449 +aiohttp/helpers.py,sha256=q_AZMU7hOJBvtTklhQpwa1DTH3uR5h2ZA0vLlsVGSQs,22633 +aiohttp/http.py,sha256=mYXbwDI8bF9D1RShF0EGtVTx7OgIyksbmKR4b_4RgBo,1385 +aiohttp/http_exceptions.py,sha256=yb2XryY_kktgiADcYn1nS0Dm-RVhhy0J6R0qfg-JyWo,2358 +aiohttp/http_parser.py,sha256=v9csKsBv-rmOir1ikRBcDJDAaPMsFen1HoP8_Viz6xE,27912 +aiohttp/http_websocket.py,sha256=GpysCWVOOQyRzvLSq0IHhVG0goWSnv5Rmwf91uUwowI,24594 +aiohttp/http_writer.py,sha256=XhGCqy_lzdLyxIzjQ_ufPFfJKTTWx1sb6YZWvrOFUPA,5239 +aiohttp/locks.py,sha256=l-cW8wUbIkHaovghT7gpY8Yp5Vlo-u2G7_CR5xQqEQ8,1234 +aiohttp/log.py,sha256=kOWU6EcyBQESISm27vc8dVEz_h9zxozLa5WCya1RzhQ,325 +aiohttp/multipart.py,sha256=h76ZKaEdP2moxWK0qNydR7zYMgGMoyqkkRssTmrtx1A,32277 +aiohttp/payload.py,sha256=QjzdcLJ89GGqFSN_SdMgEvw_Id4UEXZ9mL_2fAGF4gk,14027 +aiohttp/payload_streamer.py,sha256=ZNWaWwAxOIricwfjH4-YrkCqehowVizM6fJ_JVDR480,2103 +aiohttp/py.typed,sha256=E84IaZyFwfLqvXjOVW4LS6WH7QOaKEFpNh9TFyzHNQc,6 +aiohttp/pytest_plugin.py,sha256=8KOUt8KXu_3NkPQ8DYwgqKfdAvVZ--zHnm0EQiKFPkI,10332 +aiohttp/resolver.py,sha256=pRF91jOjTNuCll5TMRjTe1OxnGZK4wjAggYLgvzXkGQ,3626 +aiohttp/signals.py,sha256=_ge2XQXBDWHoyCI4E-nXC-sOEJGVrJm0zYGHH0E5woQ,948 +aiohttp/signals.pyi,sha256=mrEA9Ve08W22L_yI8_F7PkdQUjid_VsL3o9tcC0Ud0E,325 +aiohttp/streams.py,sha256=i1Q7_RzolpEQ63AkalkeeSHsMPOaHAfjnwlxvRmYi-k,20371 +aiohttp/tcp_helpers.py,sha256=1WVYM2C-HZQpgcksTyadRsl2_WeuXh_ECUxCcwji5d8,1631 +aiohttp/test_utils.py,sha256=0a0034sQM72grdRxjTnYpHtkUvMwstshfc9jVPXsZ1U,20525 +aiohttp/tracing.py,sha256=yfOJWzRQgRdDcdjsDLqPul3anYyVFhztDeyoM01oIq8,12662 +aiohttp/typedefs.py,sha256=6HXEWJNZGUuNewFQUjSkCzKP8rQVZSKqfdNnIgofZWs,1259 +aiohttp/web.py,sha256=2edP5uK2BU6wTXAWzGp2lgYq_CyU3vzLaQa0I_Ehg_0,15121 +aiohttp/web_app.py,sha256=vKuHVhH9d-Qg5Pg1A8MbaZPeJttkSsghpuo2JYvUJks,17212 +aiohttp/web_exceptions.py,sha256=-CQI325lMa9W-1WeJ2RlHApOOQ74ctHd6OyeKG_EyT4,10079 +aiohttp/web_fileresponse.py,sha256=0Oln1kTqD80EhftG2jqVbsuSLr0Gbjpuk4T3D06fFjk,12712 +aiohttp/web_log.py,sha256=J33FXqV36hWcyk8YfFNXDj3SI40uoOQzEX2Fhni7bzc,8269 +aiohttp/web_middlewares.py,sha256=BY05dLo9rsRZttRmjDUHEokiHQLzW_ffENZL9q-Grf4,4188 +aiohttp/web_protocol.py,sha256=q0zEVHMSLdmUw_KdI6zVeOj_k3lLZWMj4PJHo8h9c54,21394 +aiohttp/web_request.py,sha256=M8ARRuEso-V7G675-xWY-lqLBGDmBVRGPujaufKZGuo,25234 +aiohttp/web_response.py,sha256=nmldFBqLLaCECoaYUw54-2BVHB6Xz6XgGMK0O5ymrjo,25511 +aiohttp/web_routedef.py,sha256=jQ8Y0hDHYuMBTtsuo17qjkQLBMoacbkh4zaUdwSJJ8s,6077 +aiohttp/web_runner.py,sha256=_LUDpAc6vDOWfNJ-DBj3NZPtID0gBPH6JeMXtGSt4OU,10088 +aiohttp/web_server.py,sha256=527MjryEIqWArFHMJlEABg3TcZgYtyJIFHY19Yvf3AI,2165 +aiohttp/web_urldispatcher.py,sha256=x-O0Tqxn6xqMdQ5Qrg0hxIli-DbOfxLEDpgX_j_FGQU,38788 +aiohttp/web_ws.py,sha256=7UpGsVFZw_YtpJOWPLeDnGmL6PtirxAkc8r-pGUQbt0,17082 +aiohttp/worker.py,sha256=hekSLWLEJVrHrIrZ3dQga7Jzgtx_Cf3ZW7Zfd1J1G3A,8178 diff --git a/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/WHEEL b/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/WHEEL new file mode 100644 index 0000000..f5f6b9e --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.32.3) +Root-Is-Purelib: false +Tag: cp37-cp37m-manylinux1_x86_64 + diff --git a/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/top_level.txt b/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/top_level.txt new file mode 100644 index 0000000..ee4ba4f --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp-3.5.4.dist-info/top_level.txt @@ -0,0 +1 @@ +aiohttp diff --git a/venv/lib/python3.7/site-packages/aiohttp/__init__.py b/venv/lib/python3.7/site-packages/aiohttp/__init__.py new file mode 100644 index 0000000..84a9d39 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/__init__.py @@ -0,0 +1,216 @@ +__version__ = '3.5.4' + +from typing import Tuple # noqa + +from . import hdrs +from .client import ( + BaseConnector, + ClientConnectionError, + ClientConnectorCertificateError, + ClientConnectorError, + ClientConnectorSSLError, + ClientError, + ClientHttpProxyError, + ClientOSError, + ClientPayloadError, + ClientProxyConnectionError, + ClientResponse, + ClientRequest, + ClientResponseError, + ClientSSLError, + ClientSession, + ClientTimeout, + ClientWebSocketResponse, + ContentTypeError, + Fingerprint, + InvalidURL, + RequestInfo, + ServerConnectionError, + ServerDisconnectedError, + ServerFingerprintMismatch, + ServerTimeoutError, + TCPConnector, + UnixConnector, + WSServerHandshakeError, + request +) + +from .cookiejar import CookieJar, DummyCookieJar +from .formdata import FormData +from .helpers import BasicAuth, ChainMapProxy +from .http import ( + HttpVersion, + HttpVersion10, + HttpVersion11, + WSMsgType, + WSCloseCode, + WSMessage, + WebSocketError +) + +from .multipart import ( + BadContentDispositionHeader, + BadContentDispositionParam, + BodyPartReader, + MultipartReader, + MultipartWriter, + content_disposition_filename, + parse_content_disposition +) + +from .payload import ( + AsyncIterablePayload, + BufferedReaderPayload, + BytesIOPayload, + BytesPayload, + IOBasePayload, + JsonPayload, + PAYLOAD_REGISTRY, + Payload, + StringIOPayload, + StringPayload, + TextIOPayload, + get_payload, + payload_type +) + +from .payload_streamer import streamer + +from .resolver import AsyncResolver, DefaultResolver, ThreadedResolver + +from .signals import Signal + +from .streams import ( + DataQueue, + EMPTY_PAYLOAD, + EofStream, + FlowControlDataQueue, + StreamReader +) + +from .tracing import ( + TraceConfig, + TraceConnectionCreateEndParams, + TraceConnectionCreateStartParams, + TraceConnectionQueuedEndParams, + TraceConnectionQueuedStartParams, + TraceConnectionReuseconnParams, + TraceDnsCacheHitParams, + TraceDnsCacheMissParams, + TraceDnsResolveHostEndParams, + TraceDnsResolveHostStartParams, + TraceRequestChunkSentParams, + TraceRequestEndParams, + TraceRequestExceptionParams, + TraceRequestRedirectParams, + TraceRequestStartParams, + TraceResponseChunkReceivedParams +) + +__all__ = ( + 'hdrs', + # client + 'BaseConnector', + 'ClientConnectionError', + 'ClientConnectorCertificateError', + 'ClientConnectorError', + 'ClientConnectorSSLError', + 'ClientError', + 'ClientHttpProxyError', + 'ClientOSError', + 'ClientPayloadError', + 'ClientProxyConnectionError', + 'ClientResponse', + 'ClientRequest', + 'ClientResponseError', + 'ClientSSLError', + 'ClientSession', + 'ClientTimeout', + 'ClientWebSocketResponse', + 'ContentTypeError', + 'Fingerprint', + 'InvalidURL', + 'RequestInfo', + 'ServerConnectionError', + 'ServerDisconnectedError', + 'ServerFingerprintMismatch', + 'ServerTimeoutError', + 'TCPConnector', + 'UnixConnector', + 'WSServerHandshakeError', + 'request', + # cookiejar + 'CookieJar', + 'DummyCookieJar', + # formdata + 'FormData', + # helpers + 'BasicAuth', + 'ChainMapProxy', + # http + 'HttpVersion', + 'HttpVersion10', + 'HttpVersion11', + 'WSMsgType', + 'WSCloseCode', + 'WSMessage', + 'WebSocketError', + # multipart + 'BadContentDispositionHeader', + 'BadContentDispositionParam', + 'BodyPartReader', + 'MultipartReader', + 'MultipartWriter', + 'content_disposition_filename', + 'parse_content_disposition', + # payload + 'AsyncIterablePayload', + 'BufferedReaderPayload', + 'BytesIOPayload', + 'BytesPayload', + 'IOBasePayload', + 'JsonPayload', + 'PAYLOAD_REGISTRY', + 'Payload', + 'StringIOPayload', + 'StringPayload', + 'TextIOPayload', + 'get_payload', + 'payload_type', + # payload_streamer + 'streamer', + # resolver + 'AsyncResolver', + 'DefaultResolver', + 'ThreadedResolver', + # signals + 'Signal', + 'DataQueue', + 'EMPTY_PAYLOAD', + 'EofStream', + 'FlowControlDataQueue', + 'StreamReader', + # tracing + 'TraceConfig', + 'TraceConnectionCreateEndParams', + 'TraceConnectionCreateStartParams', + 'TraceConnectionQueuedEndParams', + 'TraceConnectionQueuedStartParams', + 'TraceConnectionReuseconnParams', + 'TraceDnsCacheHitParams', + 'TraceDnsCacheMissParams', + 'TraceDnsResolveHostEndParams', + 'TraceDnsResolveHostStartParams', + 'TraceRequestChunkSentParams', + 'TraceRequestEndParams', + 'TraceRequestExceptionParams', + 'TraceRequestRedirectParams', + 'TraceRequestStartParams', + 'TraceResponseChunkReceivedParams', +) # type: Tuple[str, ...] + +try: + from .worker import GunicornWebWorker, GunicornUVLoopWebWorker # noqa + __all__ += ('GunicornWebWorker', 'GunicornUVLoopWebWorker') +except ImportError: # pragma: no cover + pass diff --git a/venv/lib/python3.7/site-packages/aiohttp/_cparser.pxd b/venv/lib/python3.7/site-packages/aiohttp/_cparser.pxd new file mode 100644 index 0000000..0f9fc00 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_cparser.pxd @@ -0,0 +1,140 @@ +from libc.stdint cimport uint16_t, uint32_t, uint64_t + + +cdef extern from "../vendor/http-parser/http_parser.h": + ctypedef int (*http_data_cb) (http_parser*, + const char *at, + size_t length) except -1 + + ctypedef int (*http_cb) (http_parser*) except -1 + + struct http_parser: + unsigned int type + unsigned int flags + unsigned int state + unsigned int header_state + unsigned int index + + uint32_t nread + uint64_t content_length + + unsigned short http_major + unsigned short http_minor + unsigned int status_code + unsigned int method + unsigned int http_errno + + unsigned int upgrade + + void *data + + struct http_parser_settings: + http_cb on_message_begin + http_data_cb on_url + http_data_cb on_status + http_data_cb on_header_field + http_data_cb on_header_value + http_cb on_headers_complete + http_data_cb on_body + http_cb on_message_complete + http_cb on_chunk_header + http_cb on_chunk_complete + + enum http_parser_type: + HTTP_REQUEST, + HTTP_RESPONSE, + HTTP_BOTH + + enum http_errno: + HPE_OK, + HPE_CB_message_begin, + HPE_CB_url, + HPE_CB_header_field, + HPE_CB_header_value, + HPE_CB_headers_complete, + HPE_CB_body, + HPE_CB_message_complete, + HPE_CB_status, + HPE_CB_chunk_header, + HPE_CB_chunk_complete, + HPE_INVALID_EOF_STATE, + HPE_HEADER_OVERFLOW, + HPE_CLOSED_CONNECTION, + HPE_INVALID_VERSION, + HPE_INVALID_STATUS, + HPE_INVALID_METHOD, + HPE_INVALID_URL, + HPE_INVALID_HOST, + HPE_INVALID_PORT, + HPE_INVALID_PATH, + HPE_INVALID_QUERY_STRING, + HPE_INVALID_FRAGMENT, + HPE_LF_EXPECTED, + HPE_INVALID_HEADER_TOKEN, + HPE_INVALID_CONTENT_LENGTH, + HPE_INVALID_CHUNK_SIZE, + HPE_INVALID_CONSTANT, + HPE_INVALID_INTERNAL_STATE, + HPE_STRICT, + HPE_PAUSED, + HPE_UNKNOWN + + enum flags: + F_CHUNKED, + F_CONNECTION_KEEP_ALIVE, + F_CONNECTION_CLOSE, + F_CONNECTION_UPGRADE, + F_TRAILING, + F_UPGRADE, + F_SKIPBODY, + F_CONTENTLENGTH + + enum http_method: + DELETE, GET, HEAD, POST, PUT, CONNECT, OPTIONS, TRACE, COPY, + LOCK, MKCOL, MOVE, PROPFIND, PROPPATCH, SEARCH, UNLOCK, BIND, + REBIND, UNBIND, ACL, REPORT, MKACTIVITY, CHECKOUT, MERGE, + MSEARCH, NOTIFY, SUBSCRIBE, UNSUBSCRIBE, PATCH, PURGE, MKCALENDAR, + LINK, UNLINK + + void http_parser_init(http_parser *parser, http_parser_type type) + + size_t http_parser_execute(http_parser *parser, + const http_parser_settings *settings, + const char *data, + size_t len) + + int http_should_keep_alive(const http_parser *parser) + + void http_parser_settings_init(http_parser_settings *settings) + + const char *http_errno_name(http_errno err) + const char *http_errno_description(http_errno err) + const char *http_method_str(http_method m) + + # URL Parser + + enum http_parser_url_fields: + UF_SCHEMA = 0, + UF_HOST = 1, + UF_PORT = 2, + UF_PATH = 3, + UF_QUERY = 4, + UF_FRAGMENT = 5, + UF_USERINFO = 6, + UF_MAX = 7 + + struct http_parser_url_field_data: + uint16_t off + uint16_t len + + struct http_parser_url: + uint16_t field_set + uint16_t port + http_parser_url_field_data[UF_MAX] field_data + + void http_parser_url_init(http_parser_url *u) + + int http_parser_parse_url(const char *buf, + size_t buflen, + int is_connect, + http_parser_url *u) diff --git a/venv/lib/python3.7/site-packages/aiohttp/_find_header.c b/venv/lib/python3.7/site-packages/aiohttp/_find_header.c new file mode 100644 index 0000000..fbc6c4f --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_find_header.c @@ -0,0 +1,10000 @@ +/* The file is autogenerated from aiohttp/hdrs.py +Run ./tools/gen.py to update it after the origin changing. */ + +#include "_find_header.h" + +#define NEXT_CHAR() \ +{ \ + count++; \ + if (count == size) { \ + /* end of search */ \ + return -1; \ + } \ + pchar++; \ + ch = *pchar; \ + last = (count == size -1); \ +} while(0); + +int +find_header(const char *str, int size) +{ + char *pchar = str; + int last; + char ch; + int count = -1; + pchar--; + +INITIAL: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto A; + case 'a': + if (last) { + return -1; + } + goto A; + case 'C': + if (last) { + return -1; + } + goto C; + case 'c': + if (last) { + return -1; + } + goto C; + case 'D': + if (last) { + return -1; + } + goto D; + case 'd': + if (last) { + return -1; + } + goto D; + case 'E': + if (last) { + return -1; + } + goto E; + case 'e': + if (last) { + return -1; + } + goto E; + case 'F': + if (last) { + return -1; + } + goto F; + case 'f': + if (last) { + return -1; + } + goto F; + case 'H': + if (last) { + return -1; + } + goto H; + case 'h': + if (last) { + return -1; + } + goto H; + case 'I': + if (last) { + return -1; + } + goto I; + case 'i': + if (last) { + return -1; + } + goto I; + case 'K': + if (last) { + return -1; + } + goto K; + case 'k': + if (last) { + return -1; + } + goto K; + case 'L': + if (last) { + return -1; + } + goto L; + case 'l': + if (last) { + return -1; + } + goto L; + case 'M': + if (last) { + return -1; + } + goto M; + case 'm': + if (last) { + return -1; + } + goto M; + case 'O': + if (last) { + return -1; + } + goto O; + case 'o': + if (last) { + return -1; + } + goto O; + case 'P': + if (last) { + return -1; + } + goto P; + case 'p': + if (last) { + return -1; + } + goto P; + case 'R': + if (last) { + return -1; + } + goto R; + case 'r': + if (last) { + return -1; + } + goto R; + case 'S': + if (last) { + return -1; + } + goto S; + case 's': + if (last) { + return -1; + } + goto S; + case 'T': + if (last) { + return -1; + } + goto T; + case 't': + if (last) { + return -1; + } + goto T; + case 'U': + if (last) { + return -1; + } + goto U; + case 'u': + if (last) { + return -1; + } + goto U; + case 'V': + if (last) { + return -1; + } + goto V; + case 'v': + if (last) { + return -1; + } + goto V; + case 'W': + if (last) { + return -1; + } + goto W; + case 'w': + if (last) { + return -1; + } + goto W; + case 'X': + if (last) { + return -1; + } + goto X; + case 'x': + if (last) { + return -1; + } + goto X; + default: + return -1; + } + +A: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto AC; + case 'c': + if (last) { + return -1; + } + goto AC; + case 'G': + if (last) { + return -1; + } + goto AG; + case 'g': + if (last) { + return -1; + } + goto AG; + case 'L': + if (last) { + return -1; + } + goto AL; + case 'l': + if (last) { + return -1; + } + goto AL; + case 'U': + if (last) { + return -1; + } + goto AU; + case 'u': + if (last) { + return -1; + } + goto AU; + default: + return -1; + } + +AC: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto ACC; + case 'c': + if (last) { + return -1; + } + goto ACC; + default: + return -1; + } + +ACC: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCE; + case 'e': + if (last) { + return -1; + } + goto ACCE; + default: + return -1; + } + +ACCE: + NEXT_CHAR(); + switch (ch) { + case 'P': + if (last) { + return -1; + } + goto ACCEP; + case 'p': + if (last) { + return -1; + } + goto ACCEP; + case 'S': + if (last) { + return -1; + } + goto ACCES; + case 's': + if (last) { + return -1; + } + goto ACCES; + default: + return -1; + } + +ACCEP: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return 0; + } + goto ACCEPT; + case 't': + if (last) { + return 0; + } + goto ACCEPT; + default: + return -1; + } + +ACCEPT: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto ACCEPT_; + default: + return -1; + } + +ACCEPT_: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto ACCEPT_C; + case 'c': + if (last) { + return -1; + } + goto ACCEPT_C; + case 'E': + if (last) { + return -1; + } + goto ACCEPT_E; + case 'e': + if (last) { + return -1; + } + goto ACCEPT_E; + case 'L': + if (last) { + return -1; + } + goto ACCEPT_L; + case 'l': + if (last) { + return -1; + } + goto ACCEPT_L; + case 'R': + if (last) { + return -1; + } + goto ACCEPT_R; + case 'r': + if (last) { + return -1; + } + goto ACCEPT_R; + default: + return -1; + } + +ACCEPT_C: + NEXT_CHAR(); + switch (ch) { + case 'H': + if (last) { + return -1; + } + goto ACCEPT_CH; + case 'h': + if (last) { + return -1; + } + goto ACCEPT_CH; + default: + return -1; + } + +ACCEPT_CH: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto ACCEPT_CHA; + case 'a': + if (last) { + return -1; + } + goto ACCEPT_CHA; + default: + return -1; + } + +ACCEPT_CHA: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto ACCEPT_CHAR; + case 'r': + if (last) { + return -1; + } + goto ACCEPT_CHAR; + default: + return -1; + } + +ACCEPT_CHAR: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto ACCEPT_CHARS; + case 's': + if (last) { + return -1; + } + goto ACCEPT_CHARS; + default: + return -1; + } + +ACCEPT_CHARS: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCEPT_CHARSE; + case 'e': + if (last) { + return -1; + } + goto ACCEPT_CHARSE; + default: + return -1; + } + +ACCEPT_CHARSE: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return 1; + } + goto ACCEPT_CHARSET; + case 't': + if (last) { + return 1; + } + goto ACCEPT_CHARSET; + default: + return -1; + } + +ACCEPT_E: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto ACCEPT_EN; + case 'n': + if (last) { + return -1; + } + goto ACCEPT_EN; + default: + return -1; + } + +ACCEPT_EN: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto ACCEPT_ENC; + case 'c': + if (last) { + return -1; + } + goto ACCEPT_ENC; + default: + return -1; + } + +ACCEPT_ENC: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto ACCEPT_ENCO; + case 'o': + if (last) { + return -1; + } + goto ACCEPT_ENCO; + default: + return -1; + } + +ACCEPT_ENCO: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto ACCEPT_ENCOD; + case 'd': + if (last) { + return -1; + } + goto ACCEPT_ENCOD; + default: + return -1; + } + +ACCEPT_ENCOD: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto ACCEPT_ENCODI; + case 'i': + if (last) { + return -1; + } + goto ACCEPT_ENCODI; + default: + return -1; + } + +ACCEPT_ENCODI: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto ACCEPT_ENCODIN; + case 'n': + if (last) { + return -1; + } + goto ACCEPT_ENCODIN; + default: + return -1; + } + +ACCEPT_ENCODIN: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return 2; + } + goto ACCEPT_ENCODING; + case 'g': + if (last) { + return 2; + } + goto ACCEPT_ENCODING; + default: + return -1; + } + +ACCEPT_L: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto ACCEPT_LA; + case 'a': + if (last) { + return -1; + } + goto ACCEPT_LA; + default: + return -1; + } + +ACCEPT_LA: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto ACCEPT_LAN; + case 'n': + if (last) { + return -1; + } + goto ACCEPT_LAN; + default: + return -1; + } + +ACCEPT_LAN: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto ACCEPT_LANG; + case 'g': + if (last) { + return -1; + } + goto ACCEPT_LANG; + default: + return -1; + } + +ACCEPT_LANG: + NEXT_CHAR(); + switch (ch) { + case 'U': + if (last) { + return -1; + } + goto ACCEPT_LANGU; + case 'u': + if (last) { + return -1; + } + goto ACCEPT_LANGU; + default: + return -1; + } + +ACCEPT_LANGU: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto ACCEPT_LANGUA; + case 'a': + if (last) { + return -1; + } + goto ACCEPT_LANGUA; + default: + return -1; + } + +ACCEPT_LANGUA: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto ACCEPT_LANGUAG; + case 'g': + if (last) { + return -1; + } + goto ACCEPT_LANGUAG; + default: + return -1; + } + +ACCEPT_LANGUAG: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 3; + } + goto ACCEPT_LANGUAGE; + case 'e': + if (last) { + return 3; + } + goto ACCEPT_LANGUAGE; + default: + return -1; + } + +ACCEPT_R: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto ACCEPT_RA; + case 'a': + if (last) { + return -1; + } + goto ACCEPT_RA; + default: + return -1; + } + +ACCEPT_RA: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto ACCEPT_RAN; + case 'n': + if (last) { + return -1; + } + goto ACCEPT_RAN; + default: + return -1; + } + +ACCEPT_RAN: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto ACCEPT_RANG; + case 'g': + if (last) { + return -1; + } + goto ACCEPT_RANG; + default: + return -1; + } + +ACCEPT_RANG: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCEPT_RANGE; + case 'e': + if (last) { + return -1; + } + goto ACCEPT_RANGE; + default: + return -1; + } + +ACCEPT_RANGE: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return 4; + } + goto ACCEPT_RANGES; + case 's': + if (last) { + return 4; + } + goto ACCEPT_RANGES; + default: + return -1; + } + +ACCES: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto ACCESS; + case 's': + if (last) { + return -1; + } + goto ACCESS; + default: + return -1; + } + +ACCESS: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto ACCESS_; + default: + return -1; + } + +ACCESS_: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto ACCESS_C; + case 'c': + if (last) { + return -1; + } + goto ACCESS_C; + default: + return -1; + } + +ACCESS_C: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto ACCESS_CO; + case 'o': + if (last) { + return -1; + } + goto ACCESS_CO; + default: + return -1; + } + +ACCESS_CO: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto ACCESS_CON; + case 'n': + if (last) { + return -1; + } + goto ACCESS_CON; + default: + return -1; + } + +ACCESS_CON: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto ACCESS_CONT; + case 't': + if (last) { + return -1; + } + goto ACCESS_CONT; + default: + return -1; + } + +ACCESS_CONT: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto ACCESS_CONTR; + case 'r': + if (last) { + return -1; + } + goto ACCESS_CONTR; + default: + return -1; + } + +ACCESS_CONTR: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto ACCESS_CONTRO; + case 'o': + if (last) { + return -1; + } + goto ACCESS_CONTRO; + default: + return -1; + } + +ACCESS_CONTRO: + NEXT_CHAR(); + switch (ch) { + case 'L': + if (last) { + return -1; + } + goto ACCESS_CONTROL; + case 'l': + if (last) { + return -1; + } + goto ACCESS_CONTROL; + default: + return -1; + } + +ACCESS_CONTROL: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto ACCESS_CONTROL_; + default: + return -1; + } + +ACCESS_CONTROL_: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto ACCESS_CONTROL_A; + case 'a': + if (last) { + return -1; + } + goto ACCESS_CONTROL_A; + case 'E': + if (last) { + return -1; + } + goto ACCESS_CONTROL_E; + case 'e': + if (last) { + return -1; + } + goto ACCESS_CONTROL_E; + case 'M': + if (last) { + return -1; + } + goto ACCESS_CONTROL_M; + case 'm': + if (last) { + return -1; + } + goto ACCESS_CONTROL_M; + case 'R': + if (last) { + return -1; + } + goto ACCESS_CONTROL_R; + case 'r': + if (last) { + return -1; + } + goto ACCESS_CONTROL_R; + default: + return -1; + } + +ACCESS_CONTROL_A: + NEXT_CHAR(); + switch (ch) { + case 'L': + if (last) { + return -1; + } + goto ACCESS_CONTROL_AL; + case 'l': + if (last) { + return -1; + } + goto ACCESS_CONTROL_AL; + default: + return -1; + } + +ACCESS_CONTROL_AL: + NEXT_CHAR(); + switch (ch) { + case 'L': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALL; + case 'l': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALL; + default: + return -1; + } + +ACCESS_CONTROL_ALL: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLO; + case 'o': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLO; + default: + return -1; + } + +ACCESS_CONTROL_ALLO: + NEXT_CHAR(); + switch (ch) { + case 'W': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW; + case 'w': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_C; + case 'c': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_C; + case 'H': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_H; + case 'h': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_H; + case 'M': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_M; + case 'm': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_M; + case 'O': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_O; + case 'o': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_O; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_C: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CR; + case 'r': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CR; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_CR: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CRE; + case 'e': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CRE; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_CRE: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CRED; + case 'd': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CRED; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_CRED: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CREDE; + case 'e': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CREDE; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_CREDE: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CREDEN; + case 'n': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CREDEN; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_CREDEN: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CREDENT; + case 't': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CREDENT; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_CREDENT: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CREDENTI; + case 'i': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CREDENTI; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_CREDENTI: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CREDENTIA; + case 'a': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CREDENTIA; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_CREDENTIA: + NEXT_CHAR(); + switch (ch) { + case 'L': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CREDENTIAL; + case 'l': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_CREDENTIAL; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_CREDENTIAL: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return 5; + } + goto ACCESS_CONTROL_ALLOW_CREDENTIALS; + case 's': + if (last) { + return 5; + } + goto ACCESS_CONTROL_ALLOW_CREDENTIALS; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_H: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_HE; + case 'e': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_HE; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_HE: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_HEA; + case 'a': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_HEA; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_HEA: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_HEAD; + case 'd': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_HEAD; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_HEAD: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_HEADE; + case 'e': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_HEADE; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_HEADE: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_HEADER; + case 'r': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_HEADER; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_HEADER: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return 6; + } + goto ACCESS_CONTROL_ALLOW_HEADERS; + case 's': + if (last) { + return 6; + } + goto ACCESS_CONTROL_ALLOW_HEADERS; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_M: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_ME; + case 'e': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_ME; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_ME: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_MET; + case 't': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_MET; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_MET: + NEXT_CHAR(); + switch (ch) { + case 'H': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_METH; + case 'h': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_METH; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_METH: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_METHO; + case 'o': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_METHO; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_METHO: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_METHOD; + case 'd': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_METHOD; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_METHOD: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return 7; + } + goto ACCESS_CONTROL_ALLOW_METHODS; + case 's': + if (last) { + return 7; + } + goto ACCESS_CONTROL_ALLOW_METHODS; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_O: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_OR; + case 'r': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_OR; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_OR: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_ORI; + case 'i': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_ORI; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_ORI: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_ORIG; + case 'g': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_ORIG; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_ORIG: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_ORIGI; + case 'i': + if (last) { + return -1; + } + goto ACCESS_CONTROL_ALLOW_ORIGI; + default: + return -1; + } + +ACCESS_CONTROL_ALLOW_ORIGI: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return 8; + } + goto ACCESS_CONTROL_ALLOW_ORIGIN; + case 'n': + if (last) { + return 8; + } + goto ACCESS_CONTROL_ALLOW_ORIGIN; + default: + return -1; + } + +ACCESS_CONTROL_E: + NEXT_CHAR(); + switch (ch) { + case 'X': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EX; + case 'x': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EX; + default: + return -1; + } + +ACCESS_CONTROL_EX: + NEXT_CHAR(); + switch (ch) { + case 'P': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXP; + case 'p': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXP; + default: + return -1; + } + +ACCESS_CONTROL_EXP: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPO; + case 'o': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPO; + default: + return -1; + } + +ACCESS_CONTROL_EXPO: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOS; + case 's': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOS; + default: + return -1; + } + +ACCESS_CONTROL_EXPOS: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOSE; + case 'e': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOSE; + default: + return -1; + } + +ACCESS_CONTROL_EXPOSE: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOSE_; + default: + return -1; + } + +ACCESS_CONTROL_EXPOSE_: + NEXT_CHAR(); + switch (ch) { + case 'H': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOSE_H; + case 'h': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOSE_H; + default: + return -1; + } + +ACCESS_CONTROL_EXPOSE_H: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOSE_HE; + case 'e': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOSE_HE; + default: + return -1; + } + +ACCESS_CONTROL_EXPOSE_HE: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOSE_HEA; + case 'a': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOSE_HEA; + default: + return -1; + } + +ACCESS_CONTROL_EXPOSE_HEA: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOSE_HEAD; + case 'd': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOSE_HEAD; + default: + return -1; + } + +ACCESS_CONTROL_EXPOSE_HEAD: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOSE_HEADE; + case 'e': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOSE_HEADE; + default: + return -1; + } + +ACCESS_CONTROL_EXPOSE_HEADE: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOSE_HEADER; + case 'r': + if (last) { + return -1; + } + goto ACCESS_CONTROL_EXPOSE_HEADER; + default: + return -1; + } + +ACCESS_CONTROL_EXPOSE_HEADER: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return 9; + } + goto ACCESS_CONTROL_EXPOSE_HEADERS; + case 's': + if (last) { + return 9; + } + goto ACCESS_CONTROL_EXPOSE_HEADERS; + default: + return -1; + } + +ACCESS_CONTROL_M: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto ACCESS_CONTROL_MA; + case 'a': + if (last) { + return -1; + } + goto ACCESS_CONTROL_MA; + default: + return -1; + } + +ACCESS_CONTROL_MA: + NEXT_CHAR(); + switch (ch) { + case 'X': + if (last) { + return -1; + } + goto ACCESS_CONTROL_MAX; + case 'x': + if (last) { + return -1; + } + goto ACCESS_CONTROL_MAX; + default: + return -1; + } + +ACCESS_CONTROL_MAX: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto ACCESS_CONTROL_MAX_; + default: + return -1; + } + +ACCESS_CONTROL_MAX_: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto ACCESS_CONTROL_MAX_A; + case 'a': + if (last) { + return -1; + } + goto ACCESS_CONTROL_MAX_A; + default: + return -1; + } + +ACCESS_CONTROL_MAX_A: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto ACCESS_CONTROL_MAX_AG; + case 'g': + if (last) { + return -1; + } + goto ACCESS_CONTROL_MAX_AG; + default: + return -1; + } + +ACCESS_CONTROL_MAX_AG: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 10; + } + goto ACCESS_CONTROL_MAX_AGE; + case 'e': + if (last) { + return 10; + } + goto ACCESS_CONTROL_MAX_AGE; + default: + return -1; + } + +ACCESS_CONTROL_R: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCESS_CONTROL_RE; + case 'e': + if (last) { + return -1; + } + goto ACCESS_CONTROL_RE; + default: + return -1; + } + +ACCESS_CONTROL_RE: + NEXT_CHAR(); + switch (ch) { + case 'Q': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQ; + case 'q': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQ; + default: + return -1; + } + +ACCESS_CONTROL_REQ: + NEXT_CHAR(); + switch (ch) { + case 'U': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQU; + case 'u': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQU; + default: + return -1; + } + +ACCESS_CONTROL_REQU: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUE; + case 'e': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUE; + default: + return -1; + } + +ACCESS_CONTROL_REQUE: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUES; + case 's': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUES; + default: + return -1; + } + +ACCESS_CONTROL_REQUES: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST; + case 't': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST; + default: + return -1; + } + +ACCESS_CONTROL_REQUEST: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_; + default: + return -1; + } + +ACCESS_CONTROL_REQUEST_: + NEXT_CHAR(); + switch (ch) { + case 'H': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_H; + case 'h': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_H; + case 'M': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_M; + case 'm': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_M; + default: + return -1; + } + +ACCESS_CONTROL_REQUEST_H: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_HE; + case 'e': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_HE; + default: + return -1; + } + +ACCESS_CONTROL_REQUEST_HE: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_HEA; + case 'a': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_HEA; + default: + return -1; + } + +ACCESS_CONTROL_REQUEST_HEA: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_HEAD; + case 'd': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_HEAD; + default: + return -1; + } + +ACCESS_CONTROL_REQUEST_HEAD: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_HEADE; + case 'e': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_HEADE; + default: + return -1; + } + +ACCESS_CONTROL_REQUEST_HEADE: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_HEADER; + case 'r': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_HEADER; + default: + return -1; + } + +ACCESS_CONTROL_REQUEST_HEADER: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return 11; + } + goto ACCESS_CONTROL_REQUEST_HEADERS; + case 's': + if (last) { + return 11; + } + goto ACCESS_CONTROL_REQUEST_HEADERS; + default: + return -1; + } + +ACCESS_CONTROL_REQUEST_M: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_ME; + case 'e': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_ME; + default: + return -1; + } + +ACCESS_CONTROL_REQUEST_ME: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_MET; + case 't': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_MET; + default: + return -1; + } + +ACCESS_CONTROL_REQUEST_MET: + NEXT_CHAR(); + switch (ch) { + case 'H': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_METH; + case 'h': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_METH; + default: + return -1; + } + +ACCESS_CONTROL_REQUEST_METH: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_METHO; + case 'o': + if (last) { + return -1; + } + goto ACCESS_CONTROL_REQUEST_METHO; + default: + return -1; + } + +ACCESS_CONTROL_REQUEST_METHO: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return 12; + } + goto ACCESS_CONTROL_REQUEST_METHOD; + case 'd': + if (last) { + return 12; + } + goto ACCESS_CONTROL_REQUEST_METHOD; + default: + return -1; + } + +AG: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 13; + } + goto AGE; + case 'e': + if (last) { + return 13; + } + goto AGE; + default: + return -1; + } + +AL: + NEXT_CHAR(); + switch (ch) { + case 'L': + if (last) { + return -1; + } + goto ALL; + case 'l': + if (last) { + return -1; + } + goto ALL; + default: + return -1; + } + +ALL: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto ALLO; + case 'o': + if (last) { + return -1; + } + goto ALLO; + default: + return -1; + } + +ALLO: + NEXT_CHAR(); + switch (ch) { + case 'W': + if (last) { + return 14; + } + goto ALLOW; + case 'w': + if (last) { + return 14; + } + goto ALLOW; + default: + return -1; + } + +AU: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto AUT; + case 't': + if (last) { + return -1; + } + goto AUT; + default: + return -1; + } + +AUT: + NEXT_CHAR(); + switch (ch) { + case 'H': + if (last) { + return -1; + } + goto AUTH; + case 'h': + if (last) { + return -1; + } + goto AUTH; + default: + return -1; + } + +AUTH: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto AUTHO; + case 'o': + if (last) { + return -1; + } + goto AUTHO; + default: + return -1; + } + +AUTHO: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto AUTHOR; + case 'r': + if (last) { + return -1; + } + goto AUTHOR; + default: + return -1; + } + +AUTHOR: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto AUTHORI; + case 'i': + if (last) { + return -1; + } + goto AUTHORI; + default: + return -1; + } + +AUTHORI: + NEXT_CHAR(); + switch (ch) { + case 'Z': + if (last) { + return -1; + } + goto AUTHORIZ; + case 'z': + if (last) { + return -1; + } + goto AUTHORIZ; + default: + return -1; + } + +AUTHORIZ: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto AUTHORIZA; + case 'a': + if (last) { + return -1; + } + goto AUTHORIZA; + default: + return -1; + } + +AUTHORIZA: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto AUTHORIZAT; + case 't': + if (last) { + return -1; + } + goto AUTHORIZAT; + default: + return -1; + } + +AUTHORIZAT: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto AUTHORIZATI; + case 'i': + if (last) { + return -1; + } + goto AUTHORIZATI; + default: + return -1; + } + +AUTHORIZATI: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto AUTHORIZATIO; + case 'o': + if (last) { + return -1; + } + goto AUTHORIZATIO; + default: + return -1; + } + +AUTHORIZATIO: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return 15; + } + goto AUTHORIZATION; + case 'n': + if (last) { + return 15; + } + goto AUTHORIZATION; + default: + return -1; + } + +C: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto CA; + case 'a': + if (last) { + return -1; + } + goto CA; + case 'O': + if (last) { + return -1; + } + goto CO; + case 'o': + if (last) { + return -1; + } + goto CO; + default: + return -1; + } + +CA: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto CAC; + case 'c': + if (last) { + return -1; + } + goto CAC; + default: + return -1; + } + +CAC: + NEXT_CHAR(); + switch (ch) { + case 'H': + if (last) { + return -1; + } + goto CACH; + case 'h': + if (last) { + return -1; + } + goto CACH; + default: + return -1; + } + +CACH: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto CACHE; + case 'e': + if (last) { + return -1; + } + goto CACHE; + default: + return -1; + } + +CACHE: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto CACHE_; + default: + return -1; + } + +CACHE_: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto CACHE_C; + case 'c': + if (last) { + return -1; + } + goto CACHE_C; + default: + return -1; + } + +CACHE_C: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto CACHE_CO; + case 'o': + if (last) { + return -1; + } + goto CACHE_CO; + default: + return -1; + } + +CACHE_CO: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto CACHE_CON; + case 'n': + if (last) { + return -1; + } + goto CACHE_CON; + default: + return -1; + } + +CACHE_CON: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto CACHE_CONT; + case 't': + if (last) { + return -1; + } + goto CACHE_CONT; + default: + return -1; + } + +CACHE_CONT: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto CACHE_CONTR; + case 'r': + if (last) { + return -1; + } + goto CACHE_CONTR; + default: + return -1; + } + +CACHE_CONTR: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto CACHE_CONTRO; + case 'o': + if (last) { + return -1; + } + goto CACHE_CONTRO; + default: + return -1; + } + +CACHE_CONTRO: + NEXT_CHAR(); + switch (ch) { + case 'L': + if (last) { + return 16; + } + goto CACHE_CONTROL; + case 'l': + if (last) { + return 16; + } + goto CACHE_CONTROL; + default: + return -1; + } + +CO: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto CON; + case 'n': + if (last) { + return -1; + } + goto CON; + case 'O': + if (last) { + return -1; + } + goto COO; + case 'o': + if (last) { + return -1; + } + goto COO; + default: + return -1; + } + +CON: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto CONN; + case 'n': + if (last) { + return -1; + } + goto CONN; + case 'T': + if (last) { + return -1; + } + goto CONT; + case 't': + if (last) { + return -1; + } + goto CONT; + default: + return -1; + } + +CONN: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto CONNE; + case 'e': + if (last) { + return -1; + } + goto CONNE; + default: + return -1; + } + +CONNE: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto CONNEC; + case 'c': + if (last) { + return -1; + } + goto CONNEC; + default: + return -1; + } + +CONNEC: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto CONNECT; + case 't': + if (last) { + return -1; + } + goto CONNECT; + default: + return -1; + } + +CONNECT: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto CONNECTI; + case 'i': + if (last) { + return -1; + } + goto CONNECTI; + default: + return -1; + } + +CONNECTI: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto CONNECTIO; + case 'o': + if (last) { + return -1; + } + goto CONNECTIO; + default: + return -1; + } + +CONNECTIO: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return 17; + } + goto CONNECTION; + case 'n': + if (last) { + return 17; + } + goto CONNECTION; + default: + return -1; + } + +CONT: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto CONTE; + case 'e': + if (last) { + return -1; + } + goto CONTE; + default: + return -1; + } + +CONTE: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto CONTEN; + case 'n': + if (last) { + return -1; + } + goto CONTEN; + default: + return -1; + } + +CONTEN: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto CONTENT; + case 't': + if (last) { + return -1; + } + goto CONTENT; + default: + return -1; + } + +CONTENT: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto CONTENT_; + default: + return -1; + } + +CONTENT_: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto CONTENT_D; + case 'd': + if (last) { + return -1; + } + goto CONTENT_D; + case 'E': + if (last) { + return -1; + } + goto CONTENT_E; + case 'e': + if (last) { + return -1; + } + goto CONTENT_E; + case 'L': + if (last) { + return -1; + } + goto CONTENT_L; + case 'l': + if (last) { + return -1; + } + goto CONTENT_L; + case 'M': + if (last) { + return -1; + } + goto CONTENT_M; + case 'm': + if (last) { + return -1; + } + goto CONTENT_M; + case 'R': + if (last) { + return -1; + } + goto CONTENT_R; + case 'r': + if (last) { + return -1; + } + goto CONTENT_R; + case 'T': + if (last) { + return -1; + } + goto CONTENT_T; + case 't': + if (last) { + return -1; + } + goto CONTENT_T; + default: + return -1; + } + +CONTENT_D: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto CONTENT_DI; + case 'i': + if (last) { + return -1; + } + goto CONTENT_DI; + default: + return -1; + } + +CONTENT_DI: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto CONTENT_DIS; + case 's': + if (last) { + return -1; + } + goto CONTENT_DIS; + default: + return -1; + } + +CONTENT_DIS: + NEXT_CHAR(); + switch (ch) { + case 'P': + if (last) { + return -1; + } + goto CONTENT_DISP; + case 'p': + if (last) { + return -1; + } + goto CONTENT_DISP; + default: + return -1; + } + +CONTENT_DISP: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto CONTENT_DISPO; + case 'o': + if (last) { + return -1; + } + goto CONTENT_DISPO; + default: + return -1; + } + +CONTENT_DISPO: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto CONTENT_DISPOS; + case 's': + if (last) { + return -1; + } + goto CONTENT_DISPOS; + default: + return -1; + } + +CONTENT_DISPOS: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto CONTENT_DISPOSI; + case 'i': + if (last) { + return -1; + } + goto CONTENT_DISPOSI; + default: + return -1; + } + +CONTENT_DISPOSI: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto CONTENT_DISPOSIT; + case 't': + if (last) { + return -1; + } + goto CONTENT_DISPOSIT; + default: + return -1; + } + +CONTENT_DISPOSIT: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto CONTENT_DISPOSITI; + case 'i': + if (last) { + return -1; + } + goto CONTENT_DISPOSITI; + default: + return -1; + } + +CONTENT_DISPOSITI: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto CONTENT_DISPOSITIO; + case 'o': + if (last) { + return -1; + } + goto CONTENT_DISPOSITIO; + default: + return -1; + } + +CONTENT_DISPOSITIO: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return 18; + } + goto CONTENT_DISPOSITION; + case 'n': + if (last) { + return 18; + } + goto CONTENT_DISPOSITION; + default: + return -1; + } + +CONTENT_E: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto CONTENT_EN; + case 'n': + if (last) { + return -1; + } + goto CONTENT_EN; + default: + return -1; + } + +CONTENT_EN: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto CONTENT_ENC; + case 'c': + if (last) { + return -1; + } + goto CONTENT_ENC; + default: + return -1; + } + +CONTENT_ENC: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto CONTENT_ENCO; + case 'o': + if (last) { + return -1; + } + goto CONTENT_ENCO; + default: + return -1; + } + +CONTENT_ENCO: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto CONTENT_ENCOD; + case 'd': + if (last) { + return -1; + } + goto CONTENT_ENCOD; + default: + return -1; + } + +CONTENT_ENCOD: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto CONTENT_ENCODI; + case 'i': + if (last) { + return -1; + } + goto CONTENT_ENCODI; + default: + return -1; + } + +CONTENT_ENCODI: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto CONTENT_ENCODIN; + case 'n': + if (last) { + return -1; + } + goto CONTENT_ENCODIN; + default: + return -1; + } + +CONTENT_ENCODIN: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return 19; + } + goto CONTENT_ENCODING; + case 'g': + if (last) { + return 19; + } + goto CONTENT_ENCODING; + default: + return -1; + } + +CONTENT_L: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto CONTENT_LA; + case 'a': + if (last) { + return -1; + } + goto CONTENT_LA; + case 'E': + if (last) { + return -1; + } + goto CONTENT_LE; + case 'e': + if (last) { + return -1; + } + goto CONTENT_LE; + case 'O': + if (last) { + return -1; + } + goto CONTENT_LO; + case 'o': + if (last) { + return -1; + } + goto CONTENT_LO; + default: + return -1; + } + +CONTENT_LA: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto CONTENT_LAN; + case 'n': + if (last) { + return -1; + } + goto CONTENT_LAN; + default: + return -1; + } + +CONTENT_LAN: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto CONTENT_LANG; + case 'g': + if (last) { + return -1; + } + goto CONTENT_LANG; + default: + return -1; + } + +CONTENT_LANG: + NEXT_CHAR(); + switch (ch) { + case 'U': + if (last) { + return -1; + } + goto CONTENT_LANGU; + case 'u': + if (last) { + return -1; + } + goto CONTENT_LANGU; + default: + return -1; + } + +CONTENT_LANGU: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto CONTENT_LANGUA; + case 'a': + if (last) { + return -1; + } + goto CONTENT_LANGUA; + default: + return -1; + } + +CONTENT_LANGUA: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto CONTENT_LANGUAG; + case 'g': + if (last) { + return -1; + } + goto CONTENT_LANGUAG; + default: + return -1; + } + +CONTENT_LANGUAG: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 20; + } + goto CONTENT_LANGUAGE; + case 'e': + if (last) { + return 20; + } + goto CONTENT_LANGUAGE; + default: + return -1; + } + +CONTENT_LE: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto CONTENT_LEN; + case 'n': + if (last) { + return -1; + } + goto CONTENT_LEN; + default: + return -1; + } + +CONTENT_LEN: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto CONTENT_LENG; + case 'g': + if (last) { + return -1; + } + goto CONTENT_LENG; + default: + return -1; + } + +CONTENT_LENG: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto CONTENT_LENGT; + case 't': + if (last) { + return -1; + } + goto CONTENT_LENGT; + default: + return -1; + } + +CONTENT_LENGT: + NEXT_CHAR(); + switch (ch) { + case 'H': + if (last) { + return 21; + } + goto CONTENT_LENGTH; + case 'h': + if (last) { + return 21; + } + goto CONTENT_LENGTH; + default: + return -1; + } + +CONTENT_LO: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto CONTENT_LOC; + case 'c': + if (last) { + return -1; + } + goto CONTENT_LOC; + default: + return -1; + } + +CONTENT_LOC: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto CONTENT_LOCA; + case 'a': + if (last) { + return -1; + } + goto CONTENT_LOCA; + default: + return -1; + } + +CONTENT_LOCA: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto CONTENT_LOCAT; + case 't': + if (last) { + return -1; + } + goto CONTENT_LOCAT; + default: + return -1; + } + +CONTENT_LOCAT: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto CONTENT_LOCATI; + case 'i': + if (last) { + return -1; + } + goto CONTENT_LOCATI; + default: + return -1; + } + +CONTENT_LOCATI: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto CONTENT_LOCATIO; + case 'o': + if (last) { + return -1; + } + goto CONTENT_LOCATIO; + default: + return -1; + } + +CONTENT_LOCATIO: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return 22; + } + goto CONTENT_LOCATION; + case 'n': + if (last) { + return 22; + } + goto CONTENT_LOCATION; + default: + return -1; + } + +CONTENT_M: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto CONTENT_MD; + case 'd': + if (last) { + return -1; + } + goto CONTENT_MD; + default: + return -1; + } + +CONTENT_MD: + NEXT_CHAR(); + switch (ch) { + case '5': + if (last) { + return 23; + } + goto CONTENT_MD5; + default: + return -1; + } + +CONTENT_R: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto CONTENT_RA; + case 'a': + if (last) { + return -1; + } + goto CONTENT_RA; + default: + return -1; + } + +CONTENT_RA: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto CONTENT_RAN; + case 'n': + if (last) { + return -1; + } + goto CONTENT_RAN; + default: + return -1; + } + +CONTENT_RAN: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto CONTENT_RANG; + case 'g': + if (last) { + return -1; + } + goto CONTENT_RANG; + default: + return -1; + } + +CONTENT_RANG: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 24; + } + goto CONTENT_RANGE; + case 'e': + if (last) { + return 24; + } + goto CONTENT_RANGE; + default: + return -1; + } + +CONTENT_T: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto CONTENT_TR; + case 'r': + if (last) { + return -1; + } + goto CONTENT_TR; + case 'Y': + if (last) { + return -1; + } + goto CONTENT_TY; + case 'y': + if (last) { + return -1; + } + goto CONTENT_TY; + default: + return -1; + } + +CONTENT_TR: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto CONTENT_TRA; + case 'a': + if (last) { + return -1; + } + goto CONTENT_TRA; + default: + return -1; + } + +CONTENT_TRA: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto CONTENT_TRAN; + case 'n': + if (last) { + return -1; + } + goto CONTENT_TRAN; + default: + return -1; + } + +CONTENT_TRAN: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto CONTENT_TRANS; + case 's': + if (last) { + return -1; + } + goto CONTENT_TRANS; + default: + return -1; + } + +CONTENT_TRANS: + NEXT_CHAR(); + switch (ch) { + case 'F': + if (last) { + return -1; + } + goto CONTENT_TRANSF; + case 'f': + if (last) { + return -1; + } + goto CONTENT_TRANSF; + default: + return -1; + } + +CONTENT_TRANSF: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto CONTENT_TRANSFE; + case 'e': + if (last) { + return -1; + } + goto CONTENT_TRANSFE; + default: + return -1; + } + +CONTENT_TRANSFE: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto CONTENT_TRANSFER; + case 'r': + if (last) { + return -1; + } + goto CONTENT_TRANSFER; + default: + return -1; + } + +CONTENT_TRANSFER: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto CONTENT_TRANSFER_; + default: + return -1; + } + +CONTENT_TRANSFER_: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto CONTENT_TRANSFER_E; + case 'e': + if (last) { + return -1; + } + goto CONTENT_TRANSFER_E; + default: + return -1; + } + +CONTENT_TRANSFER_E: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto CONTENT_TRANSFER_EN; + case 'n': + if (last) { + return -1; + } + goto CONTENT_TRANSFER_EN; + default: + return -1; + } + +CONTENT_TRANSFER_EN: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto CONTENT_TRANSFER_ENC; + case 'c': + if (last) { + return -1; + } + goto CONTENT_TRANSFER_ENC; + default: + return -1; + } + +CONTENT_TRANSFER_ENC: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto CONTENT_TRANSFER_ENCO; + case 'o': + if (last) { + return -1; + } + goto CONTENT_TRANSFER_ENCO; + default: + return -1; + } + +CONTENT_TRANSFER_ENCO: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto CONTENT_TRANSFER_ENCOD; + case 'd': + if (last) { + return -1; + } + goto CONTENT_TRANSFER_ENCOD; + default: + return -1; + } + +CONTENT_TRANSFER_ENCOD: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto CONTENT_TRANSFER_ENCODI; + case 'i': + if (last) { + return -1; + } + goto CONTENT_TRANSFER_ENCODI; + default: + return -1; + } + +CONTENT_TRANSFER_ENCODI: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto CONTENT_TRANSFER_ENCODIN; + case 'n': + if (last) { + return -1; + } + goto CONTENT_TRANSFER_ENCODIN; + default: + return -1; + } + +CONTENT_TRANSFER_ENCODIN: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return 25; + } + goto CONTENT_TRANSFER_ENCODING; + case 'g': + if (last) { + return 25; + } + goto CONTENT_TRANSFER_ENCODING; + default: + return -1; + } + +CONTENT_TY: + NEXT_CHAR(); + switch (ch) { + case 'P': + if (last) { + return -1; + } + goto CONTENT_TYP; + case 'p': + if (last) { + return -1; + } + goto CONTENT_TYP; + default: + return -1; + } + +CONTENT_TYP: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 26; + } + goto CONTENT_TYPE; + case 'e': + if (last) { + return 26; + } + goto CONTENT_TYPE; + default: + return -1; + } + +COO: + NEXT_CHAR(); + switch (ch) { + case 'K': + if (last) { + return -1; + } + goto COOK; + case 'k': + if (last) { + return -1; + } + goto COOK; + default: + return -1; + } + +COOK: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto COOKI; + case 'i': + if (last) { + return -1; + } + goto COOKI; + default: + return -1; + } + +COOKI: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 27; + } + goto COOKIE; + case 'e': + if (last) { + return 27; + } + goto COOKIE; + default: + return -1; + } + +D: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto DA; + case 'a': + if (last) { + return -1; + } + goto DA; + case 'E': + if (last) { + return -1; + } + goto DE; + case 'e': + if (last) { + return -1; + } + goto DE; + case 'I': + if (last) { + return -1; + } + goto DI; + case 'i': + if (last) { + return -1; + } + goto DI; + default: + return -1; + } + +DA: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto DAT; + case 't': + if (last) { + return -1; + } + goto DAT; + default: + return -1; + } + +DAT: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 28; + } + goto DATE; + case 'e': + if (last) { + return 28; + } + goto DATE; + default: + return -1; + } + +DE: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto DES; + case 's': + if (last) { + return -1; + } + goto DES; + default: + return -1; + } + +DES: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto DEST; + case 't': + if (last) { + return -1; + } + goto DEST; + default: + return -1; + } + +DEST: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto DESTI; + case 'i': + if (last) { + return -1; + } + goto DESTI; + default: + return -1; + } + +DESTI: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto DESTIN; + case 'n': + if (last) { + return -1; + } + goto DESTIN; + default: + return -1; + } + +DESTIN: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto DESTINA; + case 'a': + if (last) { + return -1; + } + goto DESTINA; + default: + return -1; + } + +DESTINA: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto DESTINAT; + case 't': + if (last) { + return -1; + } + goto DESTINAT; + default: + return -1; + } + +DESTINAT: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto DESTINATI; + case 'i': + if (last) { + return -1; + } + goto DESTINATI; + default: + return -1; + } + +DESTINATI: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto DESTINATIO; + case 'o': + if (last) { + return -1; + } + goto DESTINATIO; + default: + return -1; + } + +DESTINATIO: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return 29; + } + goto DESTINATION; + case 'n': + if (last) { + return 29; + } + goto DESTINATION; + default: + return -1; + } + +DI: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto DIG; + case 'g': + if (last) { + return -1; + } + goto DIG; + default: + return -1; + } + +DIG: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto DIGE; + case 'e': + if (last) { + return -1; + } + goto DIGE; + default: + return -1; + } + +DIGE: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto DIGES; + case 's': + if (last) { + return -1; + } + goto DIGES; + default: + return -1; + } + +DIGES: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return 30; + } + goto DIGEST; + case 't': + if (last) { + return 30; + } + goto DIGEST; + default: + return -1; + } + +E: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto ET; + case 't': + if (last) { + return -1; + } + goto ET; + case 'X': + if (last) { + return -1; + } + goto EX; + case 'x': + if (last) { + return -1; + } + goto EX; + default: + return -1; + } + +ET: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto ETA; + case 'a': + if (last) { + return -1; + } + goto ETA; + default: + return -1; + } + +ETA: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return 31; + } + goto ETAG; + case 'g': + if (last) { + return 31; + } + goto ETAG; + default: + return -1; + } + +EX: + NEXT_CHAR(); + switch (ch) { + case 'P': + if (last) { + return -1; + } + goto EXP; + case 'p': + if (last) { + return -1; + } + goto EXP; + default: + return -1; + } + +EXP: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto EXPE; + case 'e': + if (last) { + return -1; + } + goto EXPE; + case 'I': + if (last) { + return -1; + } + goto EXPI; + case 'i': + if (last) { + return -1; + } + goto EXPI; + default: + return -1; + } + +EXPE: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto EXPEC; + case 'c': + if (last) { + return -1; + } + goto EXPEC; + default: + return -1; + } + +EXPEC: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return 32; + } + goto EXPECT; + case 't': + if (last) { + return 32; + } + goto EXPECT; + default: + return -1; + } + +EXPI: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto EXPIR; + case 'r': + if (last) { + return -1; + } + goto EXPIR; + default: + return -1; + } + +EXPIR: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto EXPIRE; + case 'e': + if (last) { + return -1; + } + goto EXPIRE; + default: + return -1; + } + +EXPIRE: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return 33; + } + goto EXPIRES; + case 's': + if (last) { + return 33; + } + goto EXPIRES; + default: + return -1; + } + +F: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto FO; + case 'o': + if (last) { + return -1; + } + goto FO; + case 'R': + if (last) { + return -1; + } + goto FR; + case 'r': + if (last) { + return -1; + } + goto FR; + default: + return -1; + } + +FO: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto FOR; + case 'r': + if (last) { + return -1; + } + goto FOR; + default: + return -1; + } + +FOR: + NEXT_CHAR(); + switch (ch) { + case 'W': + if (last) { + return -1; + } + goto FORW; + case 'w': + if (last) { + return -1; + } + goto FORW; + default: + return -1; + } + +FORW: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto FORWA; + case 'a': + if (last) { + return -1; + } + goto FORWA; + default: + return -1; + } + +FORWA: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto FORWAR; + case 'r': + if (last) { + return -1; + } + goto FORWAR; + default: + return -1; + } + +FORWAR: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto FORWARD; + case 'd': + if (last) { + return -1; + } + goto FORWARD; + default: + return -1; + } + +FORWARD: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto FORWARDE; + case 'e': + if (last) { + return -1; + } + goto FORWARDE; + default: + return -1; + } + +FORWARDE: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return 34; + } + goto FORWARDED; + case 'd': + if (last) { + return 34; + } + goto FORWARDED; + default: + return -1; + } + +FR: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto FRO; + case 'o': + if (last) { + return -1; + } + goto FRO; + default: + return -1; + } + +FRO: + NEXT_CHAR(); + switch (ch) { + case 'M': + if (last) { + return 35; + } + goto FROM; + case 'm': + if (last) { + return 35; + } + goto FROM; + default: + return -1; + } + +H: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto HO; + case 'o': + if (last) { + return -1; + } + goto HO; + default: + return -1; + } + +HO: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto HOS; + case 's': + if (last) { + return -1; + } + goto HOS; + default: + return -1; + } + +HOS: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return 36; + } + goto HOST; + case 't': + if (last) { + return 36; + } + goto HOST; + default: + return -1; + } + +I: + NEXT_CHAR(); + switch (ch) { + case 'F': + if (last) { + return -1; + } + goto IF; + case 'f': + if (last) { + return -1; + } + goto IF; + default: + return -1; + } + +IF: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto IF_; + default: + return -1; + } + +IF_: + NEXT_CHAR(); + switch (ch) { + case 'M': + if (last) { + return -1; + } + goto IF_M; + case 'm': + if (last) { + return -1; + } + goto IF_M; + case 'N': + if (last) { + return -1; + } + goto IF_N; + case 'n': + if (last) { + return -1; + } + goto IF_N; + case 'R': + if (last) { + return -1; + } + goto IF_R; + case 'r': + if (last) { + return -1; + } + goto IF_R; + case 'U': + if (last) { + return -1; + } + goto IF_U; + case 'u': + if (last) { + return -1; + } + goto IF_U; + default: + return -1; + } + +IF_M: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto IF_MA; + case 'a': + if (last) { + return -1; + } + goto IF_MA; + case 'O': + if (last) { + return -1; + } + goto IF_MO; + case 'o': + if (last) { + return -1; + } + goto IF_MO; + default: + return -1; + } + +IF_MA: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto IF_MAT; + case 't': + if (last) { + return -1; + } + goto IF_MAT; + default: + return -1; + } + +IF_MAT: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto IF_MATC; + case 'c': + if (last) { + return -1; + } + goto IF_MATC; + default: + return -1; + } + +IF_MATC: + NEXT_CHAR(); + switch (ch) { + case 'H': + if (last) { + return 37; + } + goto IF_MATCH; + case 'h': + if (last) { + return 37; + } + goto IF_MATCH; + default: + return -1; + } + +IF_MO: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto IF_MOD; + case 'd': + if (last) { + return -1; + } + goto IF_MOD; + default: + return -1; + } + +IF_MOD: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto IF_MODI; + case 'i': + if (last) { + return -1; + } + goto IF_MODI; + default: + return -1; + } + +IF_MODI: + NEXT_CHAR(); + switch (ch) { + case 'F': + if (last) { + return -1; + } + goto IF_MODIF; + case 'f': + if (last) { + return -1; + } + goto IF_MODIF; + default: + return -1; + } + +IF_MODIF: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto IF_MODIFI; + case 'i': + if (last) { + return -1; + } + goto IF_MODIFI; + default: + return -1; + } + +IF_MODIFI: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto IF_MODIFIE; + case 'e': + if (last) { + return -1; + } + goto IF_MODIFIE; + default: + return -1; + } + +IF_MODIFIE: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto IF_MODIFIED; + case 'd': + if (last) { + return -1; + } + goto IF_MODIFIED; + default: + return -1; + } + +IF_MODIFIED: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto IF_MODIFIED_; + default: + return -1; + } + +IF_MODIFIED_: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto IF_MODIFIED_S; + case 's': + if (last) { + return -1; + } + goto IF_MODIFIED_S; + default: + return -1; + } + +IF_MODIFIED_S: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto IF_MODIFIED_SI; + case 'i': + if (last) { + return -1; + } + goto IF_MODIFIED_SI; + default: + return -1; + } + +IF_MODIFIED_SI: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto IF_MODIFIED_SIN; + case 'n': + if (last) { + return -1; + } + goto IF_MODIFIED_SIN; + default: + return -1; + } + +IF_MODIFIED_SIN: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto IF_MODIFIED_SINC; + case 'c': + if (last) { + return -1; + } + goto IF_MODIFIED_SINC; + default: + return -1; + } + +IF_MODIFIED_SINC: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 38; + } + goto IF_MODIFIED_SINCE; + case 'e': + if (last) { + return 38; + } + goto IF_MODIFIED_SINCE; + default: + return -1; + } + +IF_N: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto IF_NO; + case 'o': + if (last) { + return -1; + } + goto IF_NO; + default: + return -1; + } + +IF_NO: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto IF_NON; + case 'n': + if (last) { + return -1; + } + goto IF_NON; + default: + return -1; + } + +IF_NON: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto IF_NONE; + case 'e': + if (last) { + return -1; + } + goto IF_NONE; + default: + return -1; + } + +IF_NONE: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto IF_NONE_; + default: + return -1; + } + +IF_NONE_: + NEXT_CHAR(); + switch (ch) { + case 'M': + if (last) { + return -1; + } + goto IF_NONE_M; + case 'm': + if (last) { + return -1; + } + goto IF_NONE_M; + default: + return -1; + } + +IF_NONE_M: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto IF_NONE_MA; + case 'a': + if (last) { + return -1; + } + goto IF_NONE_MA; + default: + return -1; + } + +IF_NONE_MA: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto IF_NONE_MAT; + case 't': + if (last) { + return -1; + } + goto IF_NONE_MAT; + default: + return -1; + } + +IF_NONE_MAT: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto IF_NONE_MATC; + case 'c': + if (last) { + return -1; + } + goto IF_NONE_MATC; + default: + return -1; + } + +IF_NONE_MATC: + NEXT_CHAR(); + switch (ch) { + case 'H': + if (last) { + return 39; + } + goto IF_NONE_MATCH; + case 'h': + if (last) { + return 39; + } + goto IF_NONE_MATCH; + default: + return -1; + } + +IF_R: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto IF_RA; + case 'a': + if (last) { + return -1; + } + goto IF_RA; + default: + return -1; + } + +IF_RA: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto IF_RAN; + case 'n': + if (last) { + return -1; + } + goto IF_RAN; + default: + return -1; + } + +IF_RAN: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto IF_RANG; + case 'g': + if (last) { + return -1; + } + goto IF_RANG; + default: + return -1; + } + +IF_RANG: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 40; + } + goto IF_RANGE; + case 'e': + if (last) { + return 40; + } + goto IF_RANGE; + default: + return -1; + } + +IF_U: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto IF_UN; + case 'n': + if (last) { + return -1; + } + goto IF_UN; + default: + return -1; + } + +IF_UN: + NEXT_CHAR(); + switch (ch) { + case 'M': + if (last) { + return -1; + } + goto IF_UNM; + case 'm': + if (last) { + return -1; + } + goto IF_UNM; + default: + return -1; + } + +IF_UNM: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto IF_UNMO; + case 'o': + if (last) { + return -1; + } + goto IF_UNMO; + default: + return -1; + } + +IF_UNMO: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto IF_UNMOD; + case 'd': + if (last) { + return -1; + } + goto IF_UNMOD; + default: + return -1; + } + +IF_UNMOD: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto IF_UNMODI; + case 'i': + if (last) { + return -1; + } + goto IF_UNMODI; + default: + return -1; + } + +IF_UNMODI: + NEXT_CHAR(); + switch (ch) { + case 'F': + if (last) { + return -1; + } + goto IF_UNMODIF; + case 'f': + if (last) { + return -1; + } + goto IF_UNMODIF; + default: + return -1; + } + +IF_UNMODIF: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto IF_UNMODIFI; + case 'i': + if (last) { + return -1; + } + goto IF_UNMODIFI; + default: + return -1; + } + +IF_UNMODIFI: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto IF_UNMODIFIE; + case 'e': + if (last) { + return -1; + } + goto IF_UNMODIFIE; + default: + return -1; + } + +IF_UNMODIFIE: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto IF_UNMODIFIED; + case 'd': + if (last) { + return -1; + } + goto IF_UNMODIFIED; + default: + return -1; + } + +IF_UNMODIFIED: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto IF_UNMODIFIED_; + default: + return -1; + } + +IF_UNMODIFIED_: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto IF_UNMODIFIED_S; + case 's': + if (last) { + return -1; + } + goto IF_UNMODIFIED_S; + default: + return -1; + } + +IF_UNMODIFIED_S: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto IF_UNMODIFIED_SI; + case 'i': + if (last) { + return -1; + } + goto IF_UNMODIFIED_SI; + default: + return -1; + } + +IF_UNMODIFIED_SI: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto IF_UNMODIFIED_SIN; + case 'n': + if (last) { + return -1; + } + goto IF_UNMODIFIED_SIN; + default: + return -1; + } + +IF_UNMODIFIED_SIN: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto IF_UNMODIFIED_SINC; + case 'c': + if (last) { + return -1; + } + goto IF_UNMODIFIED_SINC; + default: + return -1; + } + +IF_UNMODIFIED_SINC: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 41; + } + goto IF_UNMODIFIED_SINCE; + case 'e': + if (last) { + return 41; + } + goto IF_UNMODIFIED_SINCE; + default: + return -1; + } + +K: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto KE; + case 'e': + if (last) { + return -1; + } + goto KE; + default: + return -1; + } + +KE: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto KEE; + case 'e': + if (last) { + return -1; + } + goto KEE; + default: + return -1; + } + +KEE: + NEXT_CHAR(); + switch (ch) { + case 'P': + if (last) { + return -1; + } + goto KEEP; + case 'p': + if (last) { + return -1; + } + goto KEEP; + default: + return -1; + } + +KEEP: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto KEEP_; + default: + return -1; + } + +KEEP_: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto KEEP_A; + case 'a': + if (last) { + return -1; + } + goto KEEP_A; + default: + return -1; + } + +KEEP_A: + NEXT_CHAR(); + switch (ch) { + case 'L': + if (last) { + return -1; + } + goto KEEP_AL; + case 'l': + if (last) { + return -1; + } + goto KEEP_AL; + default: + return -1; + } + +KEEP_AL: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto KEEP_ALI; + case 'i': + if (last) { + return -1; + } + goto KEEP_ALI; + default: + return -1; + } + +KEEP_ALI: + NEXT_CHAR(); + switch (ch) { + case 'V': + if (last) { + return -1; + } + goto KEEP_ALIV; + case 'v': + if (last) { + return -1; + } + goto KEEP_ALIV; + default: + return -1; + } + +KEEP_ALIV: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 42; + } + goto KEEP_ALIVE; + case 'e': + if (last) { + return 42; + } + goto KEEP_ALIVE; + default: + return -1; + } + +L: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto LA; + case 'a': + if (last) { + return -1; + } + goto LA; + case 'I': + if (last) { + return -1; + } + goto LI; + case 'i': + if (last) { + return -1; + } + goto LI; + case 'O': + if (last) { + return -1; + } + goto LO; + case 'o': + if (last) { + return -1; + } + goto LO; + default: + return -1; + } + +LA: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto LAS; + case 's': + if (last) { + return -1; + } + goto LAS; + default: + return -1; + } + +LAS: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto LAST; + case 't': + if (last) { + return -1; + } + goto LAST; + default: + return -1; + } + +LAST: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto LAST_; + default: + return -1; + } + +LAST_: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto LAST_E; + case 'e': + if (last) { + return -1; + } + goto LAST_E; + case 'M': + if (last) { + return -1; + } + goto LAST_M; + case 'm': + if (last) { + return -1; + } + goto LAST_M; + default: + return -1; + } + +LAST_E: + NEXT_CHAR(); + switch (ch) { + case 'V': + if (last) { + return -1; + } + goto LAST_EV; + case 'v': + if (last) { + return -1; + } + goto LAST_EV; + default: + return -1; + } + +LAST_EV: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto LAST_EVE; + case 'e': + if (last) { + return -1; + } + goto LAST_EVE; + default: + return -1; + } + +LAST_EVE: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto LAST_EVEN; + case 'n': + if (last) { + return -1; + } + goto LAST_EVEN; + default: + return -1; + } + +LAST_EVEN: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto LAST_EVENT; + case 't': + if (last) { + return -1; + } + goto LAST_EVENT; + default: + return -1; + } + +LAST_EVENT: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto LAST_EVENT_; + default: + return -1; + } + +LAST_EVENT_: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto LAST_EVENT_I; + case 'i': + if (last) { + return -1; + } + goto LAST_EVENT_I; + default: + return -1; + } + +LAST_EVENT_I: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return 43; + } + goto LAST_EVENT_ID; + case 'd': + if (last) { + return 43; + } + goto LAST_EVENT_ID; + default: + return -1; + } + +LAST_M: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto LAST_MO; + case 'o': + if (last) { + return -1; + } + goto LAST_MO; + default: + return -1; + } + +LAST_MO: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto LAST_MOD; + case 'd': + if (last) { + return -1; + } + goto LAST_MOD; + default: + return -1; + } + +LAST_MOD: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto LAST_MODI; + case 'i': + if (last) { + return -1; + } + goto LAST_MODI; + default: + return -1; + } + +LAST_MODI: + NEXT_CHAR(); + switch (ch) { + case 'F': + if (last) { + return -1; + } + goto LAST_MODIF; + case 'f': + if (last) { + return -1; + } + goto LAST_MODIF; + default: + return -1; + } + +LAST_MODIF: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto LAST_MODIFI; + case 'i': + if (last) { + return -1; + } + goto LAST_MODIFI; + default: + return -1; + } + +LAST_MODIFI: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto LAST_MODIFIE; + case 'e': + if (last) { + return -1; + } + goto LAST_MODIFIE; + default: + return -1; + } + +LAST_MODIFIE: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return 44; + } + goto LAST_MODIFIED; + case 'd': + if (last) { + return 44; + } + goto LAST_MODIFIED; + default: + return -1; + } + +LI: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto LIN; + case 'n': + if (last) { + return -1; + } + goto LIN; + default: + return -1; + } + +LIN: + NEXT_CHAR(); + switch (ch) { + case 'K': + if (last) { + return 45; + } + goto LINK; + case 'k': + if (last) { + return 45; + } + goto LINK; + default: + return -1; + } + +LO: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto LOC; + case 'c': + if (last) { + return -1; + } + goto LOC; + default: + return -1; + } + +LOC: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto LOCA; + case 'a': + if (last) { + return -1; + } + goto LOCA; + default: + return -1; + } + +LOCA: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto LOCAT; + case 't': + if (last) { + return -1; + } + goto LOCAT; + default: + return -1; + } + +LOCAT: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto LOCATI; + case 'i': + if (last) { + return -1; + } + goto LOCATI; + default: + return -1; + } + +LOCATI: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto LOCATIO; + case 'o': + if (last) { + return -1; + } + goto LOCATIO; + default: + return -1; + } + +LOCATIO: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return 46; + } + goto LOCATION; + case 'n': + if (last) { + return 46; + } + goto LOCATION; + default: + return -1; + } + +M: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto MA; + case 'a': + if (last) { + return -1; + } + goto MA; + default: + return -1; + } + +MA: + NEXT_CHAR(); + switch (ch) { + case 'X': + if (last) { + return -1; + } + goto MAX; + case 'x': + if (last) { + return -1; + } + goto MAX; + default: + return -1; + } + +MAX: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto MAX_; + default: + return -1; + } + +MAX_: + NEXT_CHAR(); + switch (ch) { + case 'F': + if (last) { + return -1; + } + goto MAX_F; + case 'f': + if (last) { + return -1; + } + goto MAX_F; + default: + return -1; + } + +MAX_F: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto MAX_FO; + case 'o': + if (last) { + return -1; + } + goto MAX_FO; + default: + return -1; + } + +MAX_FO: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto MAX_FOR; + case 'r': + if (last) { + return -1; + } + goto MAX_FOR; + default: + return -1; + } + +MAX_FOR: + NEXT_CHAR(); + switch (ch) { + case 'W': + if (last) { + return -1; + } + goto MAX_FORW; + case 'w': + if (last) { + return -1; + } + goto MAX_FORW; + default: + return -1; + } + +MAX_FORW: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto MAX_FORWA; + case 'a': + if (last) { + return -1; + } + goto MAX_FORWA; + default: + return -1; + } + +MAX_FORWA: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto MAX_FORWAR; + case 'r': + if (last) { + return -1; + } + goto MAX_FORWAR; + default: + return -1; + } + +MAX_FORWAR: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto MAX_FORWARD; + case 'd': + if (last) { + return -1; + } + goto MAX_FORWARD; + default: + return -1; + } + +MAX_FORWARD: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return 47; + } + goto MAX_FORWARDS; + case 's': + if (last) { + return 47; + } + goto MAX_FORWARDS; + default: + return -1; + } + +O: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto OR; + case 'r': + if (last) { + return -1; + } + goto OR; + default: + return -1; + } + +OR: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto ORI; + case 'i': + if (last) { + return -1; + } + goto ORI; + default: + return -1; + } + +ORI: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto ORIG; + case 'g': + if (last) { + return -1; + } + goto ORIG; + default: + return -1; + } + +ORIG: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto ORIGI; + case 'i': + if (last) { + return -1; + } + goto ORIGI; + default: + return -1; + } + +ORIGI: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return 48; + } + goto ORIGIN; + case 'n': + if (last) { + return 48; + } + goto ORIGIN; + default: + return -1; + } + +P: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto PR; + case 'r': + if (last) { + return -1; + } + goto PR; + default: + return -1; + } + +PR: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto PRA; + case 'a': + if (last) { + return -1; + } + goto PRA; + case 'O': + if (last) { + return -1; + } + goto PRO; + case 'o': + if (last) { + return -1; + } + goto PRO; + default: + return -1; + } + +PRA: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto PRAG; + case 'g': + if (last) { + return -1; + } + goto PRAG; + default: + return -1; + } + +PRAG: + NEXT_CHAR(); + switch (ch) { + case 'M': + if (last) { + return -1; + } + goto PRAGM; + case 'm': + if (last) { + return -1; + } + goto PRAGM; + default: + return -1; + } + +PRAGM: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return 49; + } + goto PRAGMA; + case 'a': + if (last) { + return 49; + } + goto PRAGMA; + default: + return -1; + } + +PRO: + NEXT_CHAR(); + switch (ch) { + case 'X': + if (last) { + return -1; + } + goto PROX; + case 'x': + if (last) { + return -1; + } + goto PROX; + default: + return -1; + } + +PROX: + NEXT_CHAR(); + switch (ch) { + case 'Y': + if (last) { + return -1; + } + goto PROXY; + case 'y': + if (last) { + return -1; + } + goto PROXY; + default: + return -1; + } + +PROXY: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto PROXY_; + default: + return -1; + } + +PROXY_: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto PROXY_A; + case 'a': + if (last) { + return -1; + } + goto PROXY_A; + default: + return -1; + } + +PROXY_A: + NEXT_CHAR(); + switch (ch) { + case 'U': + if (last) { + return -1; + } + goto PROXY_AU; + case 'u': + if (last) { + return -1; + } + goto PROXY_AU; + default: + return -1; + } + +PROXY_AU: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto PROXY_AUT; + case 't': + if (last) { + return -1; + } + goto PROXY_AUT; + default: + return -1; + } + +PROXY_AUT: + NEXT_CHAR(); + switch (ch) { + case 'H': + if (last) { + return -1; + } + goto PROXY_AUTH; + case 'h': + if (last) { + return -1; + } + goto PROXY_AUTH; + default: + return -1; + } + +PROXY_AUTH: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto PROXY_AUTHE; + case 'e': + if (last) { + return -1; + } + goto PROXY_AUTHE; + case 'O': + if (last) { + return -1; + } + goto PROXY_AUTHO; + case 'o': + if (last) { + return -1; + } + goto PROXY_AUTHO; + default: + return -1; + } + +PROXY_AUTHE: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto PROXY_AUTHEN; + case 'n': + if (last) { + return -1; + } + goto PROXY_AUTHEN; + default: + return -1; + } + +PROXY_AUTHEN: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto PROXY_AUTHENT; + case 't': + if (last) { + return -1; + } + goto PROXY_AUTHENT; + default: + return -1; + } + +PROXY_AUTHENT: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto PROXY_AUTHENTI; + case 'i': + if (last) { + return -1; + } + goto PROXY_AUTHENTI; + default: + return -1; + } + +PROXY_AUTHENTI: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto PROXY_AUTHENTIC; + case 'c': + if (last) { + return -1; + } + goto PROXY_AUTHENTIC; + default: + return -1; + } + +PROXY_AUTHENTIC: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto PROXY_AUTHENTICA; + case 'a': + if (last) { + return -1; + } + goto PROXY_AUTHENTICA; + default: + return -1; + } + +PROXY_AUTHENTICA: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto PROXY_AUTHENTICAT; + case 't': + if (last) { + return -1; + } + goto PROXY_AUTHENTICAT; + default: + return -1; + } + +PROXY_AUTHENTICAT: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 50; + } + goto PROXY_AUTHENTICATE; + case 'e': + if (last) { + return 50; + } + goto PROXY_AUTHENTICATE; + default: + return -1; + } + +PROXY_AUTHO: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto PROXY_AUTHOR; + case 'r': + if (last) { + return -1; + } + goto PROXY_AUTHOR; + default: + return -1; + } + +PROXY_AUTHOR: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto PROXY_AUTHORI; + case 'i': + if (last) { + return -1; + } + goto PROXY_AUTHORI; + default: + return -1; + } + +PROXY_AUTHORI: + NEXT_CHAR(); + switch (ch) { + case 'Z': + if (last) { + return -1; + } + goto PROXY_AUTHORIZ; + case 'z': + if (last) { + return -1; + } + goto PROXY_AUTHORIZ; + default: + return -1; + } + +PROXY_AUTHORIZ: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto PROXY_AUTHORIZA; + case 'a': + if (last) { + return -1; + } + goto PROXY_AUTHORIZA; + default: + return -1; + } + +PROXY_AUTHORIZA: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto PROXY_AUTHORIZAT; + case 't': + if (last) { + return -1; + } + goto PROXY_AUTHORIZAT; + default: + return -1; + } + +PROXY_AUTHORIZAT: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto PROXY_AUTHORIZATI; + case 'i': + if (last) { + return -1; + } + goto PROXY_AUTHORIZATI; + default: + return -1; + } + +PROXY_AUTHORIZATI: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto PROXY_AUTHORIZATIO; + case 'o': + if (last) { + return -1; + } + goto PROXY_AUTHORIZATIO; + default: + return -1; + } + +PROXY_AUTHORIZATIO: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return 51; + } + goto PROXY_AUTHORIZATION; + case 'n': + if (last) { + return 51; + } + goto PROXY_AUTHORIZATION; + default: + return -1; + } + +R: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto RA; + case 'a': + if (last) { + return -1; + } + goto RA; + case 'E': + if (last) { + return -1; + } + goto RE; + case 'e': + if (last) { + return -1; + } + goto RE; + default: + return -1; + } + +RA: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto RAN; + case 'n': + if (last) { + return -1; + } + goto RAN; + default: + return -1; + } + +RAN: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto RANG; + case 'g': + if (last) { + return -1; + } + goto RANG; + default: + return -1; + } + +RANG: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 52; + } + goto RANGE; + case 'e': + if (last) { + return 52; + } + goto RANGE; + default: + return -1; + } + +RE: + NEXT_CHAR(); + switch (ch) { + case 'F': + if (last) { + return -1; + } + goto REF; + case 'f': + if (last) { + return -1; + } + goto REF; + case 'T': + if (last) { + return -1; + } + goto RET; + case 't': + if (last) { + return -1; + } + goto RET; + default: + return -1; + } + +REF: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto REFE; + case 'e': + if (last) { + return -1; + } + goto REFE; + default: + return -1; + } + +REFE: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto REFER; + case 'r': + if (last) { + return -1; + } + goto REFER; + default: + return -1; + } + +REFER: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto REFERE; + case 'e': + if (last) { + return -1; + } + goto REFERE; + default: + return -1; + } + +REFERE: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return 53; + } + goto REFERER; + case 'r': + if (last) { + return 53; + } + goto REFERER; + default: + return -1; + } + +RET: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto RETR; + case 'r': + if (last) { + return -1; + } + goto RETR; + default: + return -1; + } + +RETR: + NEXT_CHAR(); + switch (ch) { + case 'Y': + if (last) { + return -1; + } + goto RETRY; + case 'y': + if (last) { + return -1; + } + goto RETRY; + default: + return -1; + } + +RETRY: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto RETRY_; + default: + return -1; + } + +RETRY_: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto RETRY_A; + case 'a': + if (last) { + return -1; + } + goto RETRY_A; + default: + return -1; + } + +RETRY_A: + NEXT_CHAR(); + switch (ch) { + case 'F': + if (last) { + return -1; + } + goto RETRY_AF; + case 'f': + if (last) { + return -1; + } + goto RETRY_AF; + default: + return -1; + } + +RETRY_AF: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto RETRY_AFT; + case 't': + if (last) { + return -1; + } + goto RETRY_AFT; + default: + return -1; + } + +RETRY_AFT: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto RETRY_AFTE; + case 'e': + if (last) { + return -1; + } + goto RETRY_AFTE; + default: + return -1; + } + +RETRY_AFTE: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return 54; + } + goto RETRY_AFTER; + case 'r': + if (last) { + return 54; + } + goto RETRY_AFTER; + default: + return -1; + } + +S: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto SE; + case 'e': + if (last) { + return -1; + } + goto SE; + default: + return -1; + } + +SE: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto SEC; + case 'c': + if (last) { + return -1; + } + goto SEC; + case 'R': + if (last) { + return -1; + } + goto SER; + case 'r': + if (last) { + return -1; + } + goto SER; + case 'T': + if (last) { + return -1; + } + goto SET; + case 't': + if (last) { + return -1; + } + goto SET; + default: + return -1; + } + +SEC: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto SEC_; + default: + return -1; + } + +SEC_: + NEXT_CHAR(); + switch (ch) { + case 'W': + if (last) { + return -1; + } + goto SEC_W; + case 'w': + if (last) { + return -1; + } + goto SEC_W; + default: + return -1; + } + +SEC_W: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto SEC_WE; + case 'e': + if (last) { + return -1; + } + goto SEC_WE; + default: + return -1; + } + +SEC_WE: + NEXT_CHAR(); + switch (ch) { + case 'B': + if (last) { + return -1; + } + goto SEC_WEB; + case 'b': + if (last) { + return -1; + } + goto SEC_WEB; + default: + return -1; + } + +SEC_WEB: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto SEC_WEBS; + case 's': + if (last) { + return -1; + } + goto SEC_WEBS; + default: + return -1; + } + +SEC_WEBS: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto SEC_WEBSO; + case 'o': + if (last) { + return -1; + } + goto SEC_WEBSO; + default: + return -1; + } + +SEC_WEBSO: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto SEC_WEBSOC; + case 'c': + if (last) { + return -1; + } + goto SEC_WEBSOC; + default: + return -1; + } + +SEC_WEBSOC: + NEXT_CHAR(); + switch (ch) { + case 'K': + if (last) { + return -1; + } + goto SEC_WEBSOCK; + case 'k': + if (last) { + return -1; + } + goto SEC_WEBSOCK; + default: + return -1; + } + +SEC_WEBSOCK: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto SEC_WEBSOCKE; + case 'e': + if (last) { + return -1; + } + goto SEC_WEBSOCKE; + default: + return -1; + } + +SEC_WEBSOCKE: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto SEC_WEBSOCKET; + case 't': + if (last) { + return -1; + } + goto SEC_WEBSOCKET; + default: + return -1; + } + +SEC_WEBSOCKET: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_; + default: + return -1; + } + +SEC_WEBSOCKET_: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_A; + case 'a': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_A; + case 'E': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_E; + case 'e': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_E; + case 'K': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_K; + case 'k': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_K; + case 'P': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_P; + case 'p': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_P; + case 'V': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_V; + case 'v': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_V; + default: + return -1; + } + +SEC_WEBSOCKET_A: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_AC; + case 'c': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_AC; + default: + return -1; + } + +SEC_WEBSOCKET_AC: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_ACC; + case 'c': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_ACC; + default: + return -1; + } + +SEC_WEBSOCKET_ACC: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_ACCE; + case 'e': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_ACCE; + default: + return -1; + } + +SEC_WEBSOCKET_ACCE: + NEXT_CHAR(); + switch (ch) { + case 'P': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_ACCEP; + case 'p': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_ACCEP; + default: + return -1; + } + +SEC_WEBSOCKET_ACCEP: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return 55; + } + goto SEC_WEBSOCKET_ACCEPT; + case 't': + if (last) { + return 55; + } + goto SEC_WEBSOCKET_ACCEPT; + default: + return -1; + } + +SEC_WEBSOCKET_E: + NEXT_CHAR(); + switch (ch) { + case 'X': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EX; + case 'x': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EX; + default: + return -1; + } + +SEC_WEBSOCKET_EX: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EXT; + case 't': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EXT; + default: + return -1; + } + +SEC_WEBSOCKET_EXT: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EXTE; + case 'e': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EXTE; + default: + return -1; + } + +SEC_WEBSOCKET_EXTE: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EXTEN; + case 'n': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EXTEN; + default: + return -1; + } + +SEC_WEBSOCKET_EXTEN: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EXTENS; + case 's': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EXTENS; + default: + return -1; + } + +SEC_WEBSOCKET_EXTENS: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EXTENSI; + case 'i': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EXTENSI; + default: + return -1; + } + +SEC_WEBSOCKET_EXTENSI: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EXTENSIO; + case 'o': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EXTENSIO; + default: + return -1; + } + +SEC_WEBSOCKET_EXTENSIO: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EXTENSION; + case 'n': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_EXTENSION; + default: + return -1; + } + +SEC_WEBSOCKET_EXTENSION: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return 56; + } + goto SEC_WEBSOCKET_EXTENSIONS; + case 's': + if (last) { + return 56; + } + goto SEC_WEBSOCKET_EXTENSIONS; + default: + return -1; + } + +SEC_WEBSOCKET_K: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_KE; + case 'e': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_KE; + default: + return -1; + } + +SEC_WEBSOCKET_KE: + NEXT_CHAR(); + switch (ch) { + case 'Y': + if (last) { + return 57; + } + goto SEC_WEBSOCKET_KEY; + case 'y': + if (last) { + return 57; + } + goto SEC_WEBSOCKET_KEY; + default: + return -1; + } + +SEC_WEBSOCKET_KEY: + NEXT_CHAR(); + switch (ch) { + case '1': + if (last) { + return 58; + } + goto SEC_WEBSOCKET_KEY1; + default: + return -1; + } + +SEC_WEBSOCKET_P: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_PR; + case 'r': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_PR; + default: + return -1; + } + +SEC_WEBSOCKET_PR: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_PRO; + case 'o': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_PRO; + default: + return -1; + } + +SEC_WEBSOCKET_PRO: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_PROT; + case 't': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_PROT; + default: + return -1; + } + +SEC_WEBSOCKET_PROT: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_PROTO; + case 'o': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_PROTO; + default: + return -1; + } + +SEC_WEBSOCKET_PROTO: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_PROTOC; + case 'c': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_PROTOC; + default: + return -1; + } + +SEC_WEBSOCKET_PROTOC: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_PROTOCO; + case 'o': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_PROTOCO; + default: + return -1; + } + +SEC_WEBSOCKET_PROTOCO: + NEXT_CHAR(); + switch (ch) { + case 'L': + if (last) { + return 59; + } + goto SEC_WEBSOCKET_PROTOCOL; + case 'l': + if (last) { + return 59; + } + goto SEC_WEBSOCKET_PROTOCOL; + default: + return -1; + } + +SEC_WEBSOCKET_V: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_VE; + case 'e': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_VE; + default: + return -1; + } + +SEC_WEBSOCKET_VE: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_VER; + case 'r': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_VER; + default: + return -1; + } + +SEC_WEBSOCKET_VER: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_VERS; + case 's': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_VERS; + default: + return -1; + } + +SEC_WEBSOCKET_VERS: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_VERSI; + case 'i': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_VERSI; + default: + return -1; + } + +SEC_WEBSOCKET_VERSI: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_VERSIO; + case 'o': + if (last) { + return -1; + } + goto SEC_WEBSOCKET_VERSIO; + default: + return -1; + } + +SEC_WEBSOCKET_VERSIO: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return 60; + } + goto SEC_WEBSOCKET_VERSION; + case 'n': + if (last) { + return 60; + } + goto SEC_WEBSOCKET_VERSION; + default: + return -1; + } + +SER: + NEXT_CHAR(); + switch (ch) { + case 'V': + if (last) { + return -1; + } + goto SERV; + case 'v': + if (last) { + return -1; + } + goto SERV; + default: + return -1; + } + +SERV: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto SERVE; + case 'e': + if (last) { + return -1; + } + goto SERVE; + default: + return -1; + } + +SERVE: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return 61; + } + goto SERVER; + case 'r': + if (last) { + return 61; + } + goto SERVER; + default: + return -1; + } + +SET: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto SET_; + default: + return -1; + } + +SET_: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto SET_C; + case 'c': + if (last) { + return -1; + } + goto SET_C; + default: + return -1; + } + +SET_C: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto SET_CO; + case 'o': + if (last) { + return -1; + } + goto SET_CO; + default: + return -1; + } + +SET_CO: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto SET_COO; + case 'o': + if (last) { + return -1; + } + goto SET_COO; + default: + return -1; + } + +SET_COO: + NEXT_CHAR(); + switch (ch) { + case 'K': + if (last) { + return -1; + } + goto SET_COOK; + case 'k': + if (last) { + return -1; + } + goto SET_COOK; + default: + return -1; + } + +SET_COOK: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto SET_COOKI; + case 'i': + if (last) { + return -1; + } + goto SET_COOKI; + default: + return -1; + } + +SET_COOKI: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 62; + } + goto SET_COOKIE; + case 'e': + if (last) { + return 62; + } + goto SET_COOKIE; + default: + return -1; + } + +T: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 63; + } + goto TE; + case 'e': + if (last) { + return 63; + } + goto TE; + case 'R': + if (last) { + return -1; + } + goto TR; + case 'r': + if (last) { + return -1; + } + goto TR; + default: + return -1; + } + +TR: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto TRA; + case 'a': + if (last) { + return -1; + } + goto TRA; + default: + return -1; + } + +TRA: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto TRAI; + case 'i': + if (last) { + return -1; + } + goto TRAI; + case 'N': + if (last) { + return -1; + } + goto TRAN; + case 'n': + if (last) { + return -1; + } + goto TRAN; + default: + return -1; + } + +TRAI: + NEXT_CHAR(); + switch (ch) { + case 'L': + if (last) { + return -1; + } + goto TRAIL; + case 'l': + if (last) { + return -1; + } + goto TRAIL; + default: + return -1; + } + +TRAIL: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto TRAILE; + case 'e': + if (last) { + return -1; + } + goto TRAILE; + default: + return -1; + } + +TRAILE: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return 64; + } + goto TRAILER; + case 'r': + if (last) { + return 64; + } + goto TRAILER; + default: + return -1; + } + +TRAN: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto TRANS; + case 's': + if (last) { + return -1; + } + goto TRANS; + default: + return -1; + } + +TRANS: + NEXT_CHAR(); + switch (ch) { + case 'F': + if (last) { + return -1; + } + goto TRANSF; + case 'f': + if (last) { + return -1; + } + goto TRANSF; + default: + return -1; + } + +TRANSF: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto TRANSFE; + case 'e': + if (last) { + return -1; + } + goto TRANSFE; + default: + return -1; + } + +TRANSFE: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto TRANSFER; + case 'r': + if (last) { + return -1; + } + goto TRANSFER; + default: + return -1; + } + +TRANSFER: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto TRANSFER_; + default: + return -1; + } + +TRANSFER_: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto TRANSFER_E; + case 'e': + if (last) { + return -1; + } + goto TRANSFER_E; + default: + return -1; + } + +TRANSFER_E: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto TRANSFER_EN; + case 'n': + if (last) { + return -1; + } + goto TRANSFER_EN; + default: + return -1; + } + +TRANSFER_EN: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto TRANSFER_ENC; + case 'c': + if (last) { + return -1; + } + goto TRANSFER_ENC; + default: + return -1; + } + +TRANSFER_ENC: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto TRANSFER_ENCO; + case 'o': + if (last) { + return -1; + } + goto TRANSFER_ENCO; + default: + return -1; + } + +TRANSFER_ENCO: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto TRANSFER_ENCOD; + case 'd': + if (last) { + return -1; + } + goto TRANSFER_ENCOD; + default: + return -1; + } + +TRANSFER_ENCOD: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto TRANSFER_ENCODI; + case 'i': + if (last) { + return -1; + } + goto TRANSFER_ENCODI; + default: + return -1; + } + +TRANSFER_ENCODI: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto TRANSFER_ENCODIN; + case 'n': + if (last) { + return -1; + } + goto TRANSFER_ENCODIN; + default: + return -1; + } + +TRANSFER_ENCODIN: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return 65; + } + goto TRANSFER_ENCODING; + case 'g': + if (last) { + return 65; + } + goto TRANSFER_ENCODING; + default: + return -1; + } + +U: + NEXT_CHAR(); + switch (ch) { + case 'P': + if (last) { + return -1; + } + goto UP; + case 'p': + if (last) { + return -1; + } + goto UP; + case 'R': + if (last) { + return -1; + } + goto UR; + case 'r': + if (last) { + return -1; + } + goto UR; + case 'S': + if (last) { + return -1; + } + goto US; + case 's': + if (last) { + return -1; + } + goto US; + default: + return -1; + } + +UP: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto UPG; + case 'g': + if (last) { + return -1; + } + goto UPG; + default: + return -1; + } + +UPG: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto UPGR; + case 'r': + if (last) { + return -1; + } + goto UPGR; + default: + return -1; + } + +UPGR: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto UPGRA; + case 'a': + if (last) { + return -1; + } + goto UPGRA; + default: + return -1; + } + +UPGRA: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto UPGRAD; + case 'd': + if (last) { + return -1; + } + goto UPGRAD; + default: + return -1; + } + +UPGRAD: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 66; + } + goto UPGRADE; + case 'e': + if (last) { + return 66; + } + goto UPGRADE; + default: + return -1; + } + +UR: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return 67; + } + goto URI; + case 'i': + if (last) { + return 67; + } + goto URI; + default: + return -1; + } + +US: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto USE; + case 'e': + if (last) { + return -1; + } + goto USE; + default: + return -1; + } + +USE: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto USER; + case 'r': + if (last) { + return -1; + } + goto USER; + default: + return -1; + } + +USER: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto USER_; + default: + return -1; + } + +USER_: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto USER_A; + case 'a': + if (last) { + return -1; + } + goto USER_A; + default: + return -1; + } + +USER_A: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto USER_AG; + case 'g': + if (last) { + return -1; + } + goto USER_AG; + default: + return -1; + } + +USER_AG: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto USER_AGE; + case 'e': + if (last) { + return -1; + } + goto USER_AGE; + default: + return -1; + } + +USER_AGE: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto USER_AGEN; + case 'n': + if (last) { + return -1; + } + goto USER_AGEN; + default: + return -1; + } + +USER_AGEN: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return 68; + } + goto USER_AGENT; + case 't': + if (last) { + return 68; + } + goto USER_AGENT; + default: + return -1; + } + +V: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto VA; + case 'a': + if (last) { + return -1; + } + goto VA; + case 'I': + if (last) { + return -1; + } + goto VI; + case 'i': + if (last) { + return -1; + } + goto VI; + default: + return -1; + } + +VA: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto VAR; + case 'r': + if (last) { + return -1; + } + goto VAR; + default: + return -1; + } + +VAR: + NEXT_CHAR(); + switch (ch) { + case 'Y': + if (last) { + return 69; + } + goto VARY; + case 'y': + if (last) { + return 69; + } + goto VARY; + default: + return -1; + } + +VI: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return 70; + } + goto VIA; + case 'a': + if (last) { + return 70; + } + goto VIA; + default: + return -1; + } + +W: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto WA; + case 'a': + if (last) { + return -1; + } + goto WA; + case 'E': + if (last) { + return -1; + } + goto WE; + case 'e': + if (last) { + return -1; + } + goto WE; + case 'W': + if (last) { + return -1; + } + goto WW; + case 'w': + if (last) { + return -1; + } + goto WW; + default: + return -1; + } + +WA: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto WAN; + case 'n': + if (last) { + return -1; + } + goto WAN; + case 'R': + if (last) { + return -1; + } + goto WAR; + case 'r': + if (last) { + return -1; + } + goto WAR; + default: + return -1; + } + +WAN: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto WANT; + case 't': + if (last) { + return -1; + } + goto WANT; + default: + return -1; + } + +WANT: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto WANT_; + default: + return -1; + } + +WANT_: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto WANT_D; + case 'd': + if (last) { + return -1; + } + goto WANT_D; + default: + return -1; + } + +WANT_D: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto WANT_DI; + case 'i': + if (last) { + return -1; + } + goto WANT_DI; + default: + return -1; + } + +WANT_DI: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return -1; + } + goto WANT_DIG; + case 'g': + if (last) { + return -1; + } + goto WANT_DIG; + default: + return -1; + } + +WANT_DIG: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto WANT_DIGE; + case 'e': + if (last) { + return -1; + } + goto WANT_DIGE; + default: + return -1; + } + +WANT_DIGE: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto WANT_DIGES; + case 's': + if (last) { + return -1; + } + goto WANT_DIGES; + default: + return -1; + } + +WANT_DIGES: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return 71; + } + goto WANT_DIGEST; + case 't': + if (last) { + return 71; + } + goto WANT_DIGEST; + default: + return -1; + } + +WAR: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto WARN; + case 'n': + if (last) { + return -1; + } + goto WARN; + default: + return -1; + } + +WARN: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto WARNI; + case 'i': + if (last) { + return -1; + } + goto WARNI; + default: + return -1; + } + +WARNI: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto WARNIN; + case 'n': + if (last) { + return -1; + } + goto WARNIN; + default: + return -1; + } + +WARNIN: + NEXT_CHAR(); + switch (ch) { + case 'G': + if (last) { + return 72; + } + goto WARNING; + case 'g': + if (last) { + return 72; + } + goto WARNING; + default: + return -1; + } + +WE: + NEXT_CHAR(); + switch (ch) { + case 'B': + if (last) { + return -1; + } + goto WEB; + case 'b': + if (last) { + return -1; + } + goto WEB; + default: + return -1; + } + +WEB: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto WEBS; + case 's': + if (last) { + return -1; + } + goto WEBS; + default: + return -1; + } + +WEBS: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto WEBSO; + case 'o': + if (last) { + return -1; + } + goto WEBSO; + default: + return -1; + } + +WEBSO: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto WEBSOC; + case 'c': + if (last) { + return -1; + } + goto WEBSOC; + default: + return -1; + } + +WEBSOC: + NEXT_CHAR(); + switch (ch) { + case 'K': + if (last) { + return -1; + } + goto WEBSOCK; + case 'k': + if (last) { + return -1; + } + goto WEBSOCK; + default: + return -1; + } + +WEBSOCK: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto WEBSOCKE; + case 'e': + if (last) { + return -1; + } + goto WEBSOCKE; + default: + return -1; + } + +WEBSOCKE: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return 73; + } + goto WEBSOCKET; + case 't': + if (last) { + return 73; + } + goto WEBSOCKET; + default: + return -1; + } + +WW: + NEXT_CHAR(); + switch (ch) { + case 'W': + if (last) { + return -1; + } + goto WWW; + case 'w': + if (last) { + return -1; + } + goto WWW; + default: + return -1; + } + +WWW: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto WWW_; + default: + return -1; + } + +WWW_: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto WWW_A; + case 'a': + if (last) { + return -1; + } + goto WWW_A; + default: + return -1; + } + +WWW_A: + NEXT_CHAR(); + switch (ch) { + case 'U': + if (last) { + return -1; + } + goto WWW_AU; + case 'u': + if (last) { + return -1; + } + goto WWW_AU; + default: + return -1; + } + +WWW_AU: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto WWW_AUT; + case 't': + if (last) { + return -1; + } + goto WWW_AUT; + default: + return -1; + } + +WWW_AUT: + NEXT_CHAR(); + switch (ch) { + case 'H': + if (last) { + return -1; + } + goto WWW_AUTH; + case 'h': + if (last) { + return -1; + } + goto WWW_AUTH; + default: + return -1; + } + +WWW_AUTH: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto WWW_AUTHE; + case 'e': + if (last) { + return -1; + } + goto WWW_AUTHE; + default: + return -1; + } + +WWW_AUTHE: + NEXT_CHAR(); + switch (ch) { + case 'N': + if (last) { + return -1; + } + goto WWW_AUTHEN; + case 'n': + if (last) { + return -1; + } + goto WWW_AUTHEN; + default: + return -1; + } + +WWW_AUTHEN: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto WWW_AUTHENT; + case 't': + if (last) { + return -1; + } + goto WWW_AUTHENT; + default: + return -1; + } + +WWW_AUTHENT: + NEXT_CHAR(); + switch (ch) { + case 'I': + if (last) { + return -1; + } + goto WWW_AUTHENTI; + case 'i': + if (last) { + return -1; + } + goto WWW_AUTHENTI; + default: + return -1; + } + +WWW_AUTHENTI: + NEXT_CHAR(); + switch (ch) { + case 'C': + if (last) { + return -1; + } + goto WWW_AUTHENTIC; + case 'c': + if (last) { + return -1; + } + goto WWW_AUTHENTIC; + default: + return -1; + } + +WWW_AUTHENTIC: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto WWW_AUTHENTICA; + case 'a': + if (last) { + return -1; + } + goto WWW_AUTHENTICA; + default: + return -1; + } + +WWW_AUTHENTICA: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto WWW_AUTHENTICAT; + case 't': + if (last) { + return -1; + } + goto WWW_AUTHENTICAT; + default: + return -1; + } + +WWW_AUTHENTICAT: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return 74; + } + goto WWW_AUTHENTICATE; + case 'e': + if (last) { + return 74; + } + goto WWW_AUTHENTICATE; + default: + return -1; + } + +X: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto X_; + default: + return -1; + } + +X_: + NEXT_CHAR(); + switch (ch) { + case 'F': + if (last) { + return -1; + } + goto X_F; + case 'f': + if (last) { + return -1; + } + goto X_F; + default: + return -1; + } + +X_F: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto X_FO; + case 'o': + if (last) { + return -1; + } + goto X_FO; + default: + return -1; + } + +X_FO: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto X_FOR; + case 'r': + if (last) { + return -1; + } + goto X_FOR; + default: + return -1; + } + +X_FOR: + NEXT_CHAR(); + switch (ch) { + case 'W': + if (last) { + return -1; + } + goto X_FORW; + case 'w': + if (last) { + return -1; + } + goto X_FORW; + default: + return -1; + } + +X_FORW: + NEXT_CHAR(); + switch (ch) { + case 'A': + if (last) { + return -1; + } + goto X_FORWA; + case 'a': + if (last) { + return -1; + } + goto X_FORWA; + default: + return -1; + } + +X_FORWA: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto X_FORWAR; + case 'r': + if (last) { + return -1; + } + goto X_FORWAR; + default: + return -1; + } + +X_FORWAR: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto X_FORWARD; + case 'd': + if (last) { + return -1; + } + goto X_FORWARD; + default: + return -1; + } + +X_FORWARD: + NEXT_CHAR(); + switch (ch) { + case 'E': + if (last) { + return -1; + } + goto X_FORWARDE; + case 'e': + if (last) { + return -1; + } + goto X_FORWARDE; + default: + return -1; + } + +X_FORWARDE: + NEXT_CHAR(); + switch (ch) { + case 'D': + if (last) { + return -1; + } + goto X_FORWARDED; + case 'd': + if (last) { + return -1; + } + goto X_FORWARDED; + default: + return -1; + } + +X_FORWARDED: + NEXT_CHAR(); + switch (ch) { + case '-': + if (last) { + return -1; + } + goto X_FORWARDED_; + default: + return -1; + } + +X_FORWARDED_: + NEXT_CHAR(); + switch (ch) { + case 'F': + if (last) { + return -1; + } + goto X_FORWARDED_F; + case 'f': + if (last) { + return -1; + } + goto X_FORWARDED_F; + case 'H': + if (last) { + return -1; + } + goto X_FORWARDED_H; + case 'h': + if (last) { + return -1; + } + goto X_FORWARDED_H; + case 'P': + if (last) { + return -1; + } + goto X_FORWARDED_P; + case 'p': + if (last) { + return -1; + } + goto X_FORWARDED_P; + default: + return -1; + } + +X_FORWARDED_F: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto X_FORWARDED_FO; + case 'o': + if (last) { + return -1; + } + goto X_FORWARDED_FO; + default: + return -1; + } + +X_FORWARDED_FO: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return 75; + } + goto X_FORWARDED_FOR; + case 'r': + if (last) { + return 75; + } + goto X_FORWARDED_FOR; + default: + return -1; + } + +X_FORWARDED_H: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto X_FORWARDED_HO; + case 'o': + if (last) { + return -1; + } + goto X_FORWARDED_HO; + default: + return -1; + } + +X_FORWARDED_HO: + NEXT_CHAR(); + switch (ch) { + case 'S': + if (last) { + return -1; + } + goto X_FORWARDED_HOS; + case 's': + if (last) { + return -1; + } + goto X_FORWARDED_HOS; + default: + return -1; + } + +X_FORWARDED_HOS: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return 76; + } + goto X_FORWARDED_HOST; + case 't': + if (last) { + return 76; + } + goto X_FORWARDED_HOST; + default: + return -1; + } + +X_FORWARDED_P: + NEXT_CHAR(); + switch (ch) { + case 'R': + if (last) { + return -1; + } + goto X_FORWARDED_PR; + case 'r': + if (last) { + return -1; + } + goto X_FORWARDED_PR; + default: + return -1; + } + +X_FORWARDED_PR: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return -1; + } + goto X_FORWARDED_PRO; + case 'o': + if (last) { + return -1; + } + goto X_FORWARDED_PRO; + default: + return -1; + } + +X_FORWARDED_PRO: + NEXT_CHAR(); + switch (ch) { + case 'T': + if (last) { + return -1; + } + goto X_FORWARDED_PROT; + case 't': + if (last) { + return -1; + } + goto X_FORWARDED_PROT; + default: + return -1; + } + +X_FORWARDED_PROT: + NEXT_CHAR(); + switch (ch) { + case 'O': + if (last) { + return 77; + } + goto X_FORWARDED_PROTO; + case 'o': + if (last) { + return 77; + } + goto X_FORWARDED_PROTO; + default: + return -1; + } + +ACCEPT_CHARSET: +ACCEPT_ENCODING: +ACCEPT_LANGUAGE: +ACCEPT_RANGES: +ACCESS_CONTROL_ALLOW_CREDENTIALS: +ACCESS_CONTROL_ALLOW_HEADERS: +ACCESS_CONTROL_ALLOW_METHODS: +ACCESS_CONTROL_ALLOW_ORIGIN: +ACCESS_CONTROL_EXPOSE_HEADERS: +ACCESS_CONTROL_MAX_AGE: +ACCESS_CONTROL_REQUEST_HEADERS: +ACCESS_CONTROL_REQUEST_METHOD: +AGE: +ALLOW: +AUTHORIZATION: +CACHE_CONTROL: +CONNECTION: +CONTENT_DISPOSITION: +CONTENT_ENCODING: +CONTENT_LANGUAGE: +CONTENT_LENGTH: +CONTENT_LOCATION: +CONTENT_MD5: +CONTENT_RANGE: +CONTENT_TRANSFER_ENCODING: +CONTENT_TYPE: +COOKIE: +DATE: +DESTINATION: +DIGEST: +ETAG: +EXPECT: +EXPIRES: +FORWARDED: +FROM: +HOST: +IF_MATCH: +IF_MODIFIED_SINCE: +IF_NONE_MATCH: +IF_RANGE: +IF_UNMODIFIED_SINCE: +KEEP_ALIVE: +LAST_EVENT_ID: +LAST_MODIFIED: +LINK: +LOCATION: +MAX_FORWARDS: +ORIGIN: +PRAGMA: +PROXY_AUTHENTICATE: +PROXY_AUTHORIZATION: +RANGE: +REFERER: +RETRY_AFTER: +SEC_WEBSOCKET_ACCEPT: +SEC_WEBSOCKET_EXTENSIONS: +SEC_WEBSOCKET_KEY1: +SEC_WEBSOCKET_PROTOCOL: +SEC_WEBSOCKET_VERSION: +SERVER: +SET_COOKIE: +TE: +TRAILER: +TRANSFER_ENCODING: +UPGRADE: +URI: +USER_AGENT: +VARY: +VIA: +WANT_DIGEST: +WARNING: +WEBSOCKET: +WWW_AUTHENTICATE: +X_FORWARDED_FOR: +X_FORWARDED_HOST: +X_FORWARDED_PROTO: +missing: + /* nothing found */ + return -1; +} diff --git a/venv/lib/python3.7/site-packages/aiohttp/_find_header.h b/venv/lib/python3.7/site-packages/aiohttp/_find_header.h new file mode 100644 index 0000000..99b7b4f --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_find_header.h @@ -0,0 +1,14 @@ +#ifndef _FIND_HEADERS_H +#define _FIND_HEADERS_H + +#ifdef __cplusplus +extern "C" { +#endif + +int find_header(const char *str, int size); + + +#ifdef __cplusplus +} +#endif +#endif diff --git a/venv/lib/python3.7/site-packages/aiohttp/_find_header.pxd b/venv/lib/python3.7/site-packages/aiohttp/_find_header.pxd new file mode 100644 index 0000000..37a6c37 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_find_header.pxd @@ -0,0 +1,2 @@ +cdef extern from "_find_header.h": + int find_header(char *, int) diff --git a/venv/lib/python3.7/site-packages/aiohttp/_frozenlist.c b/venv/lib/python3.7/site-packages/aiohttp/_frozenlist.c new file mode 100644 index 0000000..e13d93c --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_frozenlist.c @@ -0,0 +1,7325 @@ +/* Generated by Cython 0.29.2 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "name": "aiohttp._frozenlist", + "sources": [ + "aiohttp/_frozenlist.pyx" + ] + }, + "module_name": "aiohttp._frozenlist" +} +END: Cython Metadata */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.6+ or Python 3.3+. +#else +#define CYTHON_ABI "0_29_2" +#define CYTHON_HEX_VERSION 0x001D02F0 +#define CYTHON_FUTURE_DIVISION 0 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif + #ifndef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) + #endif + #ifndef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if CYTHON_USE_DICT_VERSIONS +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ + } +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 + #define PyMem_RawMalloc(n) PyMem_Malloc(n) + #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) + #define PyMem_RawFree(p) PyMem_Free(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; // PyThread_create_key reports success always +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif // TSS (Thread Specific Storage) API +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact + #define PyObject_Unicode PyObject_Str +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + + +#define __PYX_ERR(f_index, lineno, Ln_error) \ +{ \ + __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ +} + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__aiohttp___frozenlist +#define __PYX_HAVE_API__aiohttp___frozenlist +/* Early includes */ +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m = NULL; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime = NULL; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + + +static const char *__pyx_f[] = { + "aiohttp/_frozenlist.pyx", + "stringsource", +}; + +/*--- Type declarations ---*/ +struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList; + +/* "aiohttp/_frozenlist.pyx":4 + * + * + * cdef class FrozenList: # <<<<<<<<<<<<<< + * + * cdef readonly bint frozen + */ +struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList { + PyObject_HEAD + struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *__pyx_vtab; + int frozen; + PyObject *_items; +}; + + + +struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList { + PyObject *(*_check_frozen)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *); + PyObject *(*_fast_len)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *); +}; +static struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *__pyx_vtabptr_7aiohttp_11_frozenlist_FrozenList; +static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *); + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/* ObjectGetItem.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key); +#else +#define __Pyx_PyObject_GetItem(obj, key) PyObject_GetItem(obj, key) +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallNoArg.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); +#else +#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) +#endif + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* PyIntCompare.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, long intval, long inplace); + +/* PySequenceContains.proto */ +static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { + int result = PySequence_Contains(seq, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* PyObjectCall2Args.proto */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); + +/* PyObjectGetMethod.proto */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); + +/* PyObjectCallMethod1.proto */ +static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg); + +/* pop_index.proto */ +static PyObject* __Pyx__PyObject_PopNewIndex(PyObject* L, PyObject* py_ix); +static PyObject* __Pyx__PyObject_PopIndex(PyObject* L, PyObject* py_ix); +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static PyObject* __Pyx__PyList_PopIndex(PyObject* L, PyObject* py_ix, Py_ssize_t ix); +#define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\ + (likely(PyList_CheckExact(L) && __Pyx_fits_Py_ssize_t(ix, type, is_signed))) ?\ + __Pyx__PyList_PopIndex(L, py_ix, ix) : (\ + (unlikely(py_ix == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\ + __Pyx__PyObject_PopIndex(L, py_ix))) +#define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\ + __Pyx_fits_Py_ssize_t(ix, type, is_signed) ?\ + __Pyx__PyList_PopIndex(L, py_ix, ix) : (\ + (unlikely(py_ix == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\ + __Pyx__PyObject_PopIndex(L, py_ix))) +#else +#define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func)\ + __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) +#define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\ + (unlikely(py_ix == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\ + __Pyx__PyObject_PopIndex(L, py_ix)) +#endif + +/* ListAppend.proto */ +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { + PyListObject* L = (PyListObject*) list; + Py_ssize_t len = Py_SIZE(list); + if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { + Py_INCREF(x); + PyList_SET_ITEM(list, len, x); + Py_SIZE(list) = len+1; + return 0; + } + return PyList_Append(list, x); +} +#else +#define __Pyx_PyList_Append(L,x) PyList_Append(L,x) +#endif + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* GetAttr.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); + +/* GetAttr3.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); + +/* GetModuleGlobalName.proto */ +#if CYTHON_USE_DICT_VERSIONS +#define __Pyx_GetModuleGlobalName(var, name) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ + (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ + __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +#define __Pyx_GetModuleGlobalNameUncached(var, name) {\ + PY_UINT64_T __pyx_dict_version;\ + PyObject *__pyx_dict_cached_value;\ + (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); +#else +#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) +#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); +#endif + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* HasAttr.proto */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); + +/* PyObject_GenericGetAttrNoDict.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr +#endif + +/* PyObject_GenericGetAttr.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr +#endif + +/* SetVTable.proto */ +static int __Pyx_SetVtable(PyObject *dict, void *vtable); + +/* SetupReduce.proto */ +static int __Pyx_setup_reduce(PyObject* type_obj); + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + +static PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__check_frozen(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto*/ +static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto*/ + +/* Module declarations from 'aiohttp._frozenlist' */ +static PyTypeObject *__pyx_ptype_7aiohttp_11_frozenlist_FrozenList = 0; +static PyObject *__pyx_f_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList__set_state(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *, PyObject *); /*proto*/ +#define __Pyx_MODULE_NAME "aiohttp._frozenlist" +extern int __pyx_module_is_main_aiohttp___frozenlist; +int __pyx_module_is_main_aiohttp___frozenlist = 0; + +/* Implementation of 'aiohttp._frozenlist' */ +static PyObject *__pyx_builtin_RuntimeError; +static const char __pyx_k_new[] = "__new__"; +static const char __pyx_k_pop[] = "pop"; +static const char __pyx_k_pos[] = "pos"; +static const char __pyx_k_dict[] = "__dict__"; +static const char __pyx_k_item[] = "item"; +static const char __pyx_k_iter[] = "__iter__"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_clear[] = "clear"; +static const char __pyx_k_count[] = "count"; +static const char __pyx_k_index[] = "index"; +static const char __pyx_k_items[] = "items"; +static const char __pyx_k_format[] = "format"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_pickle[] = "pickle"; +static const char __pyx_k_reduce[] = "__reduce__"; +static const char __pyx_k_remove[] = "remove"; +static const char __pyx_k_update[] = "update"; +static const char __pyx_k_getstate[] = "__getstate__"; +static const char __pyx_k_pyx_type[] = "__pyx_type"; +static const char __pyx_k_register[] = "register"; +static const char __pyx_k_reversed[] = "__reversed__"; +static const char __pyx_k_setstate[] = "__setstate__"; +static const char __pyx_k_pyx_state[] = "__pyx_state"; +static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; +static const char __pyx_k_FrozenList[] = "FrozenList"; +static const char __pyx_k_pyx_result[] = "__pyx_result"; +static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; +static const char __pyx_k_PickleError[] = "PickleError"; +static const char __pyx_k_RuntimeError[] = "RuntimeError"; +static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; +static const char __pyx_k_stringsource[] = "stringsource"; +static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; +static const char __pyx_k_MutableSequence[] = "MutableSequence"; +static const char __pyx_k_collections_abc[] = "collections.abc"; +static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; +static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_FrozenList_frozen_r[] = ""; +static const char __pyx_k_aiohttp__frozenlist[] = "aiohttp._frozenlist"; +static const char __pyx_k_pyx_unpickle_FrozenList[] = "__pyx_unpickle_FrozenList"; +static const char __pyx_k_Cannot_modify_frozen_list[] = "Cannot modify frozen list."; +static const char __pyx_k_Incompatible_checksums_s_vs_0x94[] = "Incompatible checksums (%s vs 0x949a143 = (_items, frozen))"; +static PyObject *__pyx_kp_s_Cannot_modify_frozen_list; +static PyObject *__pyx_n_s_FrozenList; +static PyObject *__pyx_kp_s_FrozenList_frozen_r; +static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0x94; +static PyObject *__pyx_n_s_MutableSequence; +static PyObject *__pyx_n_s_PickleError; +static PyObject *__pyx_n_s_RuntimeError; +static PyObject *__pyx_n_s_aiohttp__frozenlist; +static PyObject *__pyx_n_s_clear; +static PyObject *__pyx_n_s_cline_in_traceback; +static PyObject *__pyx_n_s_collections_abc; +static PyObject *__pyx_n_s_count; +static PyObject *__pyx_n_s_dict; +static PyObject *__pyx_n_s_format; +static PyObject *__pyx_n_s_getstate; +static PyObject *__pyx_n_s_import; +static PyObject *__pyx_n_s_index; +static PyObject *__pyx_n_s_item; +static PyObject *__pyx_n_s_items; +static PyObject *__pyx_n_s_iter; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_name; +static PyObject *__pyx_n_s_new; +static PyObject *__pyx_n_s_pickle; +static PyObject *__pyx_n_s_pop; +static PyObject *__pyx_n_s_pos; +static PyObject *__pyx_n_s_pyx_PickleError; +static PyObject *__pyx_n_s_pyx_checksum; +static PyObject *__pyx_n_s_pyx_result; +static PyObject *__pyx_n_s_pyx_state; +static PyObject *__pyx_n_s_pyx_type; +static PyObject *__pyx_n_s_pyx_unpickle_FrozenList; +static PyObject *__pyx_n_s_pyx_vtable; +static PyObject *__pyx_n_s_reduce; +static PyObject *__pyx_n_s_reduce_cython; +static PyObject *__pyx_n_s_reduce_ex; +static PyObject *__pyx_n_s_register; +static PyObject *__pyx_n_s_remove; +static PyObject *__pyx_n_s_reversed; +static PyObject *__pyx_n_s_setstate; +static PyObject *__pyx_n_s_setstate_cython; +static PyObject *__pyx_kp_s_stringsource; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_n_s_update; +static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList___init__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_2freeze(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_4__getitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index); /* proto */ +static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6__setitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_8__delitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index); /* proto */ +static Py_ssize_t __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_10__len__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_12__iter__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_14__reversed__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_16__richcmp__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_other, PyObject *__pyx_v_op); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_18insert(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_pos, PyObject *__pyx_v_item); /* proto */ +static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_20__contains__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_22__iadd__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_24index(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_26remove(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_28clear(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_30extend(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_32reverse(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_34pop(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_36append(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_38count(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_40__repr__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6frozen___get__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_42__reduce_cython__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_44__setstate_cython__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_tp_new_7aiohttp_11_frozenlist_FrozenList(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_int_0; +static PyObject *__pyx_int_1; +static PyObject *__pyx_int_2; +static PyObject *__pyx_int_3; +static PyObject *__pyx_int_4; +static PyObject *__pyx_int_5; +static PyObject *__pyx_int_155820355; +static PyObject *__pyx_int_neg_1; +static PyObject *__pyx_tuple_; +static PyObject *__pyx_tuple__2; +static PyObject *__pyx_codeobj__3; +/* Late includes */ + +/* "aiohttp/_frozenlist.pyx":9 + * cdef list _items + * + * def __init__(self, items=None): # <<<<<<<<<<<<<< + * self.frozen = False + * if items is not None: + */ + +/* Python wrapper */ +static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_items = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_items,0}; + PyObject* values[1] = {0}; + values[0] = ((PyObject *)Py_None); + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_items); + if (value) { values[0] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 9, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_items = values[0]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 0, 0, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 9, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList___init__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), __pyx_v_items); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList___init__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("__init__", 0); + __Pyx_INCREF(__pyx_v_items); + + /* "aiohttp/_frozenlist.pyx":10 + * + * def __init__(self, items=None): + * self.frozen = False # <<<<<<<<<<<<<< + * if items is not None: + * items = list(items) + */ + __pyx_v_self->frozen = 0; + + /* "aiohttp/_frozenlist.pyx":11 + * def __init__(self, items=None): + * self.frozen = False + * if items is not None: # <<<<<<<<<<<<<< + * items = list(items) + * else: + */ + __pyx_t_1 = (__pyx_v_items != Py_None); + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "aiohttp/_frozenlist.pyx":12 + * self.frozen = False + * if items is not None: + * items = list(items) # <<<<<<<<<<<<<< + * else: + * items = [] + */ + __pyx_t_3 = PySequence_List(__pyx_v_items); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF_SET(__pyx_v_items, __pyx_t_3); + __pyx_t_3 = 0; + + /* "aiohttp/_frozenlist.pyx":11 + * def __init__(self, items=None): + * self.frozen = False + * if items is not None: # <<<<<<<<<<<<<< + * items = list(items) + * else: + */ + goto __pyx_L3; + } + + /* "aiohttp/_frozenlist.pyx":14 + * items = list(items) + * else: + * items = [] # <<<<<<<<<<<<<< + * self._items = items + * + */ + /*else*/ { + __pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF_SET(__pyx_v_items, __pyx_t_3); + __pyx_t_3 = 0; + } + __pyx_L3:; + + /* "aiohttp/_frozenlist.pyx":15 + * else: + * items = [] + * self._items = items # <<<<<<<<<<<<<< + * + * cdef object _check_frozen(self): + */ + if (!(likely(PyList_CheckExact(__pyx_v_items))||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_items)->tp_name), 0))) __PYX_ERR(0, 15, __pyx_L1_error) + __pyx_t_3 = __pyx_v_items; + __Pyx_INCREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_v_self->_items); + __Pyx_DECREF(__pyx_v_self->_items); + __pyx_v_self->_items = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "aiohttp/_frozenlist.pyx":9 + * cdef list _items + * + * def __init__(self, items=None): # <<<<<<<<<<<<<< + * self.frozen = False + * if items is not None: + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_items); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":17 + * self._items = items + * + * cdef object _check_frozen(self): # <<<<<<<<<<<<<< + * if self.frozen: + * raise RuntimeError("Cannot modify frozen list.") + */ + +static PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__check_frozen(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("_check_frozen", 0); + + /* "aiohttp/_frozenlist.pyx":18 + * + * cdef object _check_frozen(self): + * if self.frozen: # <<<<<<<<<<<<<< + * raise RuntimeError("Cannot modify frozen list.") + * + */ + __pyx_t_1 = (__pyx_v_self->frozen != 0); + if (unlikely(__pyx_t_1)) { + + /* "aiohttp/_frozenlist.pyx":19 + * cdef object _check_frozen(self): + * if self.frozen: + * raise RuntimeError("Cannot modify frozen list.") # <<<<<<<<<<<<<< + * + * cdef inline object _fast_len(self): + */ + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_Raise(__pyx_t_2, 0, 0, 0); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __PYX_ERR(0, 19, __pyx_L1_error) + + /* "aiohttp/_frozenlist.pyx":18 + * + * cdef object _check_frozen(self): + * if self.frozen: # <<<<<<<<<<<<<< + * raise RuntimeError("Cannot modify frozen list.") + * + */ + } + + /* "aiohttp/_frozenlist.pyx":17 + * self._items = items + * + * cdef object _check_frozen(self): # <<<<<<<<<<<<<< + * if self.frozen: + * raise RuntimeError("Cannot modify frozen list.") + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList._check_frozen", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":21 + * raise RuntimeError("Cannot modify frozen list.") + * + * cdef inline object _fast_len(self): # <<<<<<<<<<<<<< + * return len(self._items) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + __Pyx_RefNannySetupContext("_fast_len", 0); + + /* "aiohttp/_frozenlist.pyx":22 + * + * cdef inline object _fast_len(self): + * return len(self._items) # <<<<<<<<<<<<<< + * + * def freeze(self): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_v_self->_items; + __Pyx_INCREF(__pyx_t_1); + if (unlikely(__pyx_t_1 == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 22, __pyx_L1_error) + } + __pyx_t_2 = PyList_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyInt_FromSsize_t(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":21 + * raise RuntimeError("Cannot modify frozen list.") + * + * cdef inline object _fast_len(self): # <<<<<<<<<<<<<< + * return len(self._items) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList._fast_len", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":24 + * return len(self._items) + * + * def freeze(self): # <<<<<<<<<<<<<< + * self.frozen = True + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_3freeze(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_3freeze(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("freeze (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_2freeze(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_2freeze(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("freeze", 0); + + /* "aiohttp/_frozenlist.pyx":25 + * + * def freeze(self): + * self.frozen = True # <<<<<<<<<<<<<< + * + * def __getitem__(self, index): + */ + __pyx_v_self->frozen = 1; + + /* "aiohttp/_frozenlist.pyx":24 + * return len(self._items) + * + * def freeze(self): # <<<<<<<<<<<<<< + * self.frozen = True + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":27 + * self.frozen = True + * + * def __getitem__(self, index): # <<<<<<<<<<<<<< + * return self._items[index] + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_5__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_5__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__getitem__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_4__getitem__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_index)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_4__getitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__getitem__", 0); + + /* "aiohttp/_frozenlist.pyx":28 + * + * def __getitem__(self, index): + * return self._items[index] # <<<<<<<<<<<<<< + * + * def __setitem__(self, index, value): + */ + __Pyx_XDECREF(__pyx_r); + if (unlikely(__pyx_v_self->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 28, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_PyObject_GetItem(__pyx_v_self->_items, __pyx_v_index); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 28, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":27 + * self.frozen = True + * + * def __getitem__(self, index): # <<<<<<<<<<<<<< + * return self._items[index] + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":30 + * return self._items[index] + * + * def __setitem__(self, index, value): # <<<<<<<<<<<<<< + * self._check_frozen() + * self._items[index] = value + */ + +/* Python wrapper */ +static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_7__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_7__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setitem__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6__setitem__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_index), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6__setitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__setitem__", 0); + + /* "aiohttp/_frozenlist.pyx":31 + * + * def __setitem__(self, index, value): + * self._check_frozen() # <<<<<<<<<<<<<< + * self._items[index] = value + * + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 31, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_frozenlist.pyx":32 + * def __setitem__(self, index, value): + * self._check_frozen() + * self._items[index] = value # <<<<<<<<<<<<<< + * + * def __delitem__(self, index): + */ + if (unlikely(__pyx_v_self->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 32, __pyx_L1_error) + } + if (unlikely(PyObject_SetItem(__pyx_v_self->_items, __pyx_v_index, __pyx_v_value) < 0)) __PYX_ERR(0, 32, __pyx_L1_error) + + /* "aiohttp/_frozenlist.pyx":30 + * return self._items[index] + * + * def __setitem__(self, index, value): # <<<<<<<<<<<<<< + * self._check_frozen() + * self._items[index] = value + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__setitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":34 + * self._items[index] = value + * + * def __delitem__(self, index): # <<<<<<<<<<<<<< + * self._check_frozen() + * del self._items[index] + */ + +/* Python wrapper */ +static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_9__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index); /*proto*/ +static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_9__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__delitem__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_8__delitem__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_index)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_8__delitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__delitem__", 0); + + /* "aiohttp/_frozenlist.pyx":35 + * + * def __delitem__(self, index): + * self._check_frozen() # <<<<<<<<<<<<<< + * del self._items[index] + * + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 35, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_frozenlist.pyx":36 + * def __delitem__(self, index): + * self._check_frozen() + * del self._items[index] # <<<<<<<<<<<<<< + * + * def __len__(self): + */ + if (unlikely(__pyx_v_self->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 36, __pyx_L1_error) + } + if (unlikely(PyObject_DelItem(__pyx_v_self->_items, __pyx_v_index) < 0)) __PYX_ERR(0, 36, __pyx_L1_error) + + /* "aiohttp/_frozenlist.pyx":34 + * self._items[index] = value + * + * def __delitem__(self, index): # <<<<<<<<<<<<<< + * self._check_frozen() + * del self._items[index] + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__delitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":38 + * del self._items[index] + * + * def __len__(self): # <<<<<<<<<<<<<< + * return self._fast_len() + * + */ + +/* Python wrapper */ +static Py_ssize_t __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_11__len__(PyObject *__pyx_v_self); /*proto*/ +static Py_ssize_t __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_11__len__(PyObject *__pyx_v_self) { + Py_ssize_t __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__len__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_10__len__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static Py_ssize_t __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_10__len__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { + Py_ssize_t __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + __Pyx_RefNannySetupContext("__len__", 0); + + /* "aiohttp/_frozenlist.pyx":39 + * + * def __len__(self): + * return self._fast_len() # <<<<<<<<<<<<<< + * + * def __iter__(self): + */ + __pyx_t_1 = __pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 39, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyIndex_AsSsize_t(__pyx_t_1); if (unlikely((__pyx_t_2 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 39, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":38 + * del self._items[index] + * + * def __len__(self): # <<<<<<<<<<<<<< + * return self._fast_len() + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__len__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":41 + * return self._fast_len() + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return self._items.__iter__() + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_13__iter__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_13__iter__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_12__iter__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_12__iter__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("__iter__", 0); + + /* "aiohttp/_frozenlist.pyx":42 + * + * def __iter__(self): + * return self._items.__iter__() # <<<<<<<<<<<<<< + * + * def __reversed__(self): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_iter); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 42, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 42, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":41 + * return self._fast_len() + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return self._items.__iter__() + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":44 + * return self._items.__iter__() + * + * def __reversed__(self): # <<<<<<<<<<<<<< + * return self._items.__reversed__() + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_15__reversed__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_15__reversed__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reversed__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_14__reversed__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_14__reversed__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("__reversed__", 0); + + /* "aiohttp/_frozenlist.pyx":45 + * + * def __reversed__(self): + * return self._items.__reversed__() # <<<<<<<<<<<<<< + * + * def __richcmp__(self, other, op): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_reversed); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":44 + * return self._items.__iter__() + * + * def __reversed__(self): # <<<<<<<<<<<<<< + * return self._items.__reversed__() + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__reversed__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":47 + * return self._items.__reversed__() + * + * def __richcmp__(self, other, op): # <<<<<<<<<<<<<< + * if op == 0: # < + * return list(self) < other + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_17__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_arg_op); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_17__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_arg_op) { + PyObject *__pyx_v_op = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__richcmp__ (wrapper)", 0); + __pyx_v_op = __Pyx_PyInt_From_int(__pyx_arg_op); if (unlikely(!__pyx_v_op)) __PYX_ERR(0, 47, __pyx_L3_error) + __Pyx_GOTREF(__pyx_v_op); + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__richcmp__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_16__richcmp__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_other), ((PyObject *)__pyx_v_op)); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_op); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_16__richcmp__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_other, PyObject *__pyx_v_op) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("__richcmp__", 0); + + /* "aiohttp/_frozenlist.pyx":48 + * + * def __richcmp__(self, other, op): + * if op == 0: # < # <<<<<<<<<<<<<< + * return list(self) < other + * if op == 1: # <= + */ + __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_0, 0, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 48, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 48, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "aiohttp/_frozenlist.pyx":49 + * def __richcmp__(self, other, op): + * if op == 0: # < + * return list(self) < other # <<<<<<<<<<<<<< + * if op == 1: # <= + * return list(self) <= other + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PySequence_List(((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 49, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_v_other, Py_LT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 49, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":48 + * + * def __richcmp__(self, other, op): + * if op == 0: # < # <<<<<<<<<<<<<< + * return list(self) < other + * if op == 1: # <= + */ + } + + /* "aiohttp/_frozenlist.pyx":50 + * if op == 0: # < + * return list(self) < other + * if op == 1: # <= # <<<<<<<<<<<<<< + * return list(self) <= other + * if op == 2: # == + */ + __pyx_t_3 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_1, 1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 50, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 50, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_2) { + + /* "aiohttp/_frozenlist.pyx":51 + * return list(self) < other + * if op == 1: # <= + * return list(self) <= other # <<<<<<<<<<<<<< + * if op == 2: # == + * return list(self) == other + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = PySequence_List(((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 51, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_v_other, Py_LE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 51, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":50 + * if op == 0: # < + * return list(self) < other + * if op == 1: # <= # <<<<<<<<<<<<<< + * return list(self) <= other + * if op == 2: # == + */ + } + + /* "aiohttp/_frozenlist.pyx":52 + * if op == 1: # <= + * return list(self) <= other + * if op == 2: # == # <<<<<<<<<<<<<< + * return list(self) == other + * if op == 3: # != + */ + __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_2, 2, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 52, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 52, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "aiohttp/_frozenlist.pyx":53 + * return list(self) <= other + * if op == 2: # == + * return list(self) == other # <<<<<<<<<<<<<< + * if op == 3: # != + * return list(self) != other + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PySequence_List(((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 53, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_v_other, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 53, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":52 + * if op == 1: # <= + * return list(self) <= other + * if op == 2: # == # <<<<<<<<<<<<<< + * return list(self) == other + * if op == 3: # != + */ + } + + /* "aiohttp/_frozenlist.pyx":54 + * if op == 2: # == + * return list(self) == other + * if op == 3: # != # <<<<<<<<<<<<<< + * return list(self) != other + * if op == 4: # > + */ + __pyx_t_3 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_3, 3, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 54, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 54, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_2) { + + /* "aiohttp/_frozenlist.pyx":55 + * return list(self) == other + * if op == 3: # != + * return list(self) != other # <<<<<<<<<<<<<< + * if op == 4: # > + * return list(self) > other + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = PySequence_List(((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 55, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_v_other, Py_NE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 55, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":54 + * if op == 2: # == + * return list(self) == other + * if op == 3: # != # <<<<<<<<<<<<<< + * return list(self) != other + * if op == 4: # > + */ + } + + /* "aiohttp/_frozenlist.pyx":56 + * if op == 3: # != + * return list(self) != other + * if op == 4: # > # <<<<<<<<<<<<<< + * return list(self) > other + * if op == 5: # => + */ + __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_4, 4, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 56, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 56, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "aiohttp/_frozenlist.pyx":57 + * return list(self) != other + * if op == 4: # > + * return list(self) > other # <<<<<<<<<<<<<< + * if op == 5: # => + * return list(self) >= other + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PySequence_List(((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 57, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_v_other, Py_GT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 57, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":56 + * if op == 3: # != + * return list(self) != other + * if op == 4: # > # <<<<<<<<<<<<<< + * return list(self) > other + * if op == 5: # => + */ + } + + /* "aiohttp/_frozenlist.pyx":58 + * if op == 4: # > + * return list(self) > other + * if op == 5: # => # <<<<<<<<<<<<<< + * return list(self) >= other + * + */ + __pyx_t_3 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_5, 5, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 58, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 58, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_2) { + + /* "aiohttp/_frozenlist.pyx":59 + * return list(self) > other + * if op == 5: # => + * return list(self) >= other # <<<<<<<<<<<<<< + * + * def insert(self, pos, item): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = PySequence_List(((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 59, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_v_other, Py_GE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 59, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":58 + * if op == 4: # > + * return list(self) > other + * if op == 5: # => # <<<<<<<<<<<<<< + * return list(self) >= other + * + */ + } + + /* "aiohttp/_frozenlist.pyx":47 + * return self._items.__reversed__() + * + * def __richcmp__(self, other, op): # <<<<<<<<<<<<<< + * if op == 0: # < + * return list(self) < other + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__richcmp__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":61 + * return list(self) >= other + * + * def insert(self, pos, item): # <<<<<<<<<<<<<< + * self._check_frozen() + * self._items.insert(pos, item) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_19insert(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_19insert(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_pos = 0; + PyObject *__pyx_v_item = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("insert (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pos,&__pyx_n_s_item,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pos)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_item)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("insert", 1, 2, 2, 1); __PYX_ERR(0, 61, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "insert") < 0)) __PYX_ERR(0, 61, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_pos = values[0]; + __pyx_v_item = values[1]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("insert", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 61, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.insert", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_18insert(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), __pyx_v_pos, __pyx_v_item); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_18insert(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_pos, PyObject *__pyx_v_item) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + int __pyx_t_3; + __Pyx_RefNannySetupContext("insert", 0); + + /* "aiohttp/_frozenlist.pyx":62 + * + * def insert(self, pos, item): + * self._check_frozen() # <<<<<<<<<<<<<< + * self._items.insert(pos, item) + * + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 62, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_frozenlist.pyx":63 + * def insert(self, pos, item): + * self._check_frozen() + * self._items.insert(pos, item) # <<<<<<<<<<<<<< + * + * def __contains__(self, item): + */ + if (unlikely(__pyx_v_self->_items == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "insert"); + __PYX_ERR(0, 63, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_PyIndex_AsSsize_t(__pyx_v_pos); if (unlikely((__pyx_t_2 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 63, __pyx_L1_error) + __pyx_t_3 = PyList_Insert(__pyx_v_self->_items, __pyx_t_2, __pyx_v_item); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 63, __pyx_L1_error) + + /* "aiohttp/_frozenlist.pyx":61 + * return list(self) >= other + * + * def insert(self, pos, item): # <<<<<<<<<<<<<< + * self._check_frozen() + * self._items.insert(pos, item) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.insert", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":65 + * self._items.insert(pos, item) + * + * def __contains__(self, item): # <<<<<<<<<<<<<< + * return item in self._items + * + */ + +/* Python wrapper */ +static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_21__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ +static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_21__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__contains__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_20__contains__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_item)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_20__contains__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__contains__", 0); + + /* "aiohttp/_frozenlist.pyx":66 + * + * def __contains__(self, item): + * return item in self._items # <<<<<<<<<<<<<< + * + * def __iadd__(self, items): + */ + __pyx_t_1 = (__Pyx_PySequence_ContainsTF(__pyx_v_item, __pyx_v_self->_items, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 66, __pyx_L1_error) + __pyx_r = __pyx_t_1; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":65 + * self._items.insert(pos, item) + * + * def __contains__(self, item): # <<<<<<<<<<<<<< + * return item in self._items + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__contains__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":68 + * return item in self._items + * + * def __iadd__(self, items): # <<<<<<<<<<<<<< + * self._check_frozen() + * self._items += list(items) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_23__iadd__(PyObject *__pyx_v_self, PyObject *__pyx_v_items); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_23__iadd__(PyObject *__pyx_v_self, PyObject *__pyx_v_items) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iadd__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_22__iadd__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_items)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_22__iadd__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("__iadd__", 0); + + /* "aiohttp/_frozenlist.pyx":69 + * + * def __iadd__(self, items): + * self._check_frozen() # <<<<<<<<<<<<<< + * self._items += list(items) + * return self + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 69, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_frozenlist.pyx":70 + * def __iadd__(self, items): + * self._check_frozen() + * self._items += list(items) # <<<<<<<<<<<<<< + * return self + * + */ + __pyx_t_1 = PySequence_List(__pyx_v_items); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 70, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyNumber_InPlaceAdd(__pyx_v_self->_items, __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 70, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GIVEREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_v_self->_items); + __Pyx_DECREF(__pyx_v_self->_items); + __pyx_v_self->_items = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "aiohttp/_frozenlist.pyx":71 + * self._check_frozen() + * self._items += list(items) + * return self # <<<<<<<<<<<<<< + * + * def index(self, item): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":68 + * return item in self._items + * + * def __iadd__(self, items): # <<<<<<<<<<<<<< + * self._check_frozen() + * self._items += list(items) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__iadd__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":73 + * return self + * + * def index(self, item): # <<<<<<<<<<<<<< + * return self._items.index(item) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_25index(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_25index(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("index (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_24index(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_item)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_24index(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("index", 0); + + /* "aiohttp/_frozenlist.pyx":74 + * + * def index(self, item): + * return self._items.index(item) # <<<<<<<<<<<<<< + * + * def remove(self, item): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_index); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 74, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_3, __pyx_v_item) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_item); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 74, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":73 + * return self + * + * def index(self, item): # <<<<<<<<<<<<<< + * return self._items.index(item) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.index", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":76 + * return self._items.index(item) + * + * def remove(self, item): # <<<<<<<<<<<<<< + * self._check_frozen() + * self._items.remove(item) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_27remove(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_27remove(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("remove (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_26remove(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_item)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_26remove(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("remove", 0); + + /* "aiohttp/_frozenlist.pyx":77 + * + * def remove(self, item): + * self._check_frozen() # <<<<<<<<<<<<<< + * self._items.remove(item) + * + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 77, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_frozenlist.pyx":78 + * def remove(self, item): + * self._check_frozen() + * self._items.remove(item) # <<<<<<<<<<<<<< + * + * def clear(self): + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_remove); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 78, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_3, __pyx_v_item) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_item); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 78, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_frozenlist.pyx":76 + * return self._items.index(item) + * + * def remove(self, item): # <<<<<<<<<<<<<< + * self._check_frozen() + * self._items.remove(item) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.remove", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":80 + * self._items.remove(item) + * + * def clear(self): # <<<<<<<<<<<<<< + * self._check_frozen() + * self._items.clear() + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_29clear(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_29clear(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("clear (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_28clear(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_28clear(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("clear", 0); + + /* "aiohttp/_frozenlist.pyx":81 + * + * def clear(self): + * self._check_frozen() # <<<<<<<<<<<<<< + * self._items.clear() + * + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 81, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_frozenlist.pyx":82 + * def clear(self): + * self._check_frozen() + * self._items.clear() # <<<<<<<<<<<<<< + * + * def extend(self, items): + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_clear); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 82, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 82, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_frozenlist.pyx":80 + * self._items.remove(item) + * + * def clear(self): # <<<<<<<<<<<<<< + * self._check_frozen() + * self._items.clear() + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.clear", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":84 + * self._items.clear() + * + * def extend(self, items): # <<<<<<<<<<<<<< + * self._check_frozen() + * self._items += list(items) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_31extend(PyObject *__pyx_v_self, PyObject *__pyx_v_items); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_31extend(PyObject *__pyx_v_self, PyObject *__pyx_v_items) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("extend (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_30extend(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_items)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_30extend(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("extend", 0); + + /* "aiohttp/_frozenlist.pyx":85 + * + * def extend(self, items): + * self._check_frozen() # <<<<<<<<<<<<<< + * self._items += list(items) + * + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 85, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_frozenlist.pyx":86 + * def extend(self, items): + * self._check_frozen() + * self._items += list(items) # <<<<<<<<<<<<<< + * + * def reverse(self): + */ + __pyx_t_1 = PySequence_List(__pyx_v_items); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyNumber_InPlaceAdd(__pyx_v_self->_items, __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GIVEREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_v_self->_items); + __Pyx_DECREF(__pyx_v_self->_items); + __pyx_v_self->_items = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "aiohttp/_frozenlist.pyx":84 + * self._items.clear() + * + * def extend(self, items): # <<<<<<<<<<<<<< + * self._check_frozen() + * self._items += list(items) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.extend", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":88 + * self._items += list(items) + * + * def reverse(self): # <<<<<<<<<<<<<< + * self._check_frozen() + * self._items.reverse() + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_33reverse(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_33reverse(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("reverse (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_32reverse(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_32reverse(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + __Pyx_RefNannySetupContext("reverse", 0); + + /* "aiohttp/_frozenlist.pyx":89 + * + * def reverse(self): + * self._check_frozen() # <<<<<<<<<<<<<< + * self._items.reverse() + * + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 89, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_frozenlist.pyx":90 + * def reverse(self): + * self._check_frozen() + * self._items.reverse() # <<<<<<<<<<<<<< + * + * def pop(self, index=-1): + */ + if (unlikely(__pyx_v_self->_items == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "reverse"); + __PYX_ERR(0, 90, __pyx_L1_error) + } + __pyx_t_2 = PyList_Reverse(__pyx_v_self->_items); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 90, __pyx_L1_error) + + /* "aiohttp/_frozenlist.pyx":88 + * self._items += list(items) + * + * def reverse(self): # <<<<<<<<<<<<<< + * self._check_frozen() + * self._items.reverse() + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.reverse", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":92 + * self._items.reverse() + * + * def pop(self, index=-1): # <<<<<<<<<<<<<< + * self._check_frozen() + * return self._items.pop(index) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_35pop(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_35pop(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_index = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("pop (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_index,0}; + PyObject* values[1] = {0}; + values[0] = ((PyObject *)__pyx_int_neg_1); + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_index); + if (value) { values[0] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "pop") < 0)) __PYX_ERR(0, 92, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_index = values[0]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("pop", 0, 0, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 92, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_34pop(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), __pyx_v_index); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_34pop(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + __Pyx_RefNannySetupContext("pop", 0); + + /* "aiohttp/_frozenlist.pyx":93 + * + * def pop(self, index=-1): + * self._check_frozen() # <<<<<<<<<<<<<< + * return self._items.pop(index) + * + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 93, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_frozenlist.pyx":94 + * def pop(self, index=-1): + * self._check_frozen() + * return self._items.pop(index) # <<<<<<<<<<<<<< + * + * def append(self, item): + */ + __Pyx_XDECREF(__pyx_r); + if (unlikely(__pyx_v_self->_items == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "pop"); + __PYX_ERR(0, 94, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_PyIndex_AsSsize_t(__pyx_v_index); if (unlikely((__pyx_t_2 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 94, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyList_PopIndex(__pyx_v_self->_items, __pyx_v_index, __pyx_t_2, 1, Py_ssize_t, PyInt_FromSsize_t); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 94, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":92 + * self._items.reverse() + * + * def pop(self, index=-1): # <<<<<<<<<<<<<< + * self._check_frozen() + * return self._items.pop(index) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":96 + * return self._items.pop(index) + * + * def append(self, item): # <<<<<<<<<<<<<< + * self._check_frozen() + * return self._items.append(item) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_37append(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_37append(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("append (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_36append(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_item)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_36append(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + __Pyx_RefNannySetupContext("append", 0); + + /* "aiohttp/_frozenlist.pyx":97 + * + * def append(self, item): + * self._check_frozen() # <<<<<<<<<<<<<< + * return self._items.append(item) + * + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 97, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_frozenlist.pyx":98 + * def append(self, item): + * self._check_frozen() + * return self._items.append(item) # <<<<<<<<<<<<<< + * + * def count(self, item): + */ + __Pyx_XDECREF(__pyx_r); + if (unlikely(__pyx_v_self->_items == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); + __PYX_ERR(0, 98, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_self->_items, __pyx_v_item); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 98, __pyx_L1_error) + __pyx_t_1 = __Pyx_Owned_Py_None(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 98, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":96 + * return self._items.pop(index) + * + * def append(self, item): # <<<<<<<<<<<<<< + * self._check_frozen() + * return self._items.append(item) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.append", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":100 + * return self._items.append(item) + * + * def count(self, item): # <<<<<<<<<<<<<< + * return self._items.count(item) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_39count(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_39count(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("count (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_38count(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_item)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_38count(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("count", 0); + + /* "aiohttp/_frozenlist.pyx":101 + * + * def count(self, item): + * return self._items.count(item) # <<<<<<<<<<<<<< + * + * def __repr__(self): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_count); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 101, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_3, __pyx_v_item) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_item); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 101, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":100 + * return self._items.append(item) + * + * def count(self, item): # <<<<<<<<<<<<<< + * return self._items.count(item) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.count", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":103 + * return self._items.count(item) + * + * def __repr__(self): # <<<<<<<<<<<<<< + * return ''.format(self.frozen, + * self._items) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_41__repr__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_41__repr__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_40__repr__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_40__repr__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + __Pyx_RefNannySetupContext("__repr__", 0); + + /* "aiohttp/_frozenlist.pyx":104 + * + * def __repr__(self): + * return ''.format(self.frozen, # <<<<<<<<<<<<<< + * self._items) + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s_FrozenList_frozen_r, __pyx_n_s_format); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 104, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_v_self->frozen); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 104, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + + /* "aiohttp/_frozenlist.pyx":105 + * def __repr__(self): + * return ''.format(self.frozen, + * self._items) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_3, __pyx_v_self->_items}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 104, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_3, __pyx_v_self->_items}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 104, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else + #endif + { + __pyx_t_6 = PyTuple_New(2+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 104, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_t_3); + __Pyx_INCREF(__pyx_v_self->_items); + __Pyx_GIVEREF(__pyx_v_self->_items); + PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_v_self->_items); + __pyx_t_3 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 104, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_frozenlist.pyx":103 + * return self._items.count(item) + * + * def __repr__(self): # <<<<<<<<<<<<<< + * return ''.format(self.frozen, + * self._items) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_frozenlist.pyx":6 + * cdef class FrozenList: + * + * cdef readonly bint frozen # <<<<<<<<<<<<<< + * cdef list _items + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_6frozen_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_6frozen_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6frozen___get__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6frozen___get__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->frozen); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.frozen.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_43__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_43__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_42__reduce_cython__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_42__reduce_cython__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self._items, self.frozen) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->frozen); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v_self->_items); + __Pyx_GIVEREF(__pyx_v_self->_items); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_self->_items); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_v_state = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self._items, self.frozen) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_2 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_v__dict = __pyx_t_2; + __pyx_t_2 = 0; + + /* "(tree fragment)":7 + * state = (self._items, self.frozen) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_3 = (__pyx_v__dict != Py_None); + __pyx_t_4 = (__pyx_t_3 != 0); + if (__pyx_t_4) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v__dict); + __pyx_t_1 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_1)); + __pyx_t_1 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self._items is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self._items, self.frozen) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self._items is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, None), state + */ + /*else*/ { + __pyx_t_4 = (__pyx_v_self->_items != ((PyObject*)Py_None)); + __pyx_v_use_setstate = __pyx_t_4; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self._items is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, None), state + * else: + */ + __pyx_t_4 = (__pyx_v_use_setstate != 0); + if (__pyx_t_4) { + + /* "(tree fragment)":13 + * use_setstate = self._items is not None + * if use_setstate: + * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_pyx_unpickle_FrozenList); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_155820355); + __Pyx_GIVEREF(__pyx_int_155820355); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_155820355); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_2, 2, Py_None); + __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_2); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_v_state); + __pyx_t_1 = 0; + __pyx_t_2 = 0; + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self._items is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, None), state + * else: + * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_FrozenList__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_pyx_unpickle_FrozenList); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_155820355); + __Pyx_GIVEREF(__pyx_int_155820355); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_155820355); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_v_state); + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_2); + __pyx_t_5 = 0; + __pyx_t_2 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_FrozenList__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_45__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_45__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_44__setstate_cython__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_44__setstate_cython__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":17 + * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_FrozenList__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_FrozenList__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_FrozenList(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList = {"__pyx_unpickle_FrozenList", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_FrozenList (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_FrozenList", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_FrozenList", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_FrozenList") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_FrozenList", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._frozenlist.__pyx_unpickle_FrozenList", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + __Pyx_RefNannySetupContext("__pyx_unpickle_FrozenList", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum != 0x949a143: # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) + */ + __pyx_t_1 = ((__pyx_v___pyx_checksum != 0x949a143) != 0); + if (__pyx_t_1) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum != 0x949a143: + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) + * __pyx_result = FrozenList.__new__(__pyx_type) + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, -1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_t_2); + __pyx_v___pyx_PickleError = __pyx_t_2; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum != 0x949a143: + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) # <<<<<<<<<<<<<< + * __pyx_result = FrozenList.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0x94, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_INCREF(__pyx_v___pyx_PickleError); + __pyx_t_2 = __pyx_v___pyx_PickleError; __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_5, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum != 0x949a143: # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) + * __pyx_result = FrozenList.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_7aiohttp_11_frozenlist_FrozenList), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_3 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_4, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v___pyx_result = __pyx_t_3; + __pyx_t_3 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) + * __pyx_result = FrozenList.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_1 = (__pyx_v___pyx_state != Py_None); + __pyx_t_6 = (__pyx_t_1 != 0); + if (__pyx_t_6) { + + /* "(tree fragment)":9 + * __pyx_result = FrozenList.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error) + __pyx_t_3 = __pyx_f_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList__set_state(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) + * __pyx_result = FrozenList.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): + * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_FrozenList(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("aiohttp._frozenlist.__pyx_unpickle_FrozenList", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList__set_state(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + Py_ssize_t __pyx_t_3; + int __pyx_t_4; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("__pyx_unpickle_FrozenList__set_state", 0); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): + * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[2]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyList_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->_items); + __Pyx_DECREF(__pyx_v___pyx_result->_items); + __pyx_v___pyx_result->_items = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->frozen = __pyx_t_2; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): + * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[2]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(1, 13, __pyx_L1_error) + } + __pyx_t_3 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_4 = ((__pyx_t_3 > 2) != 0); + if (__pyx_t_4) { + } else { + __pyx_t_2 = __pyx_t_4; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_5 = (__pyx_t_4 != 0); + __pyx_t_2 = __pyx_t_5; + __pyx_L4_bool_binop_done:; + if (__pyx_t_2) { + + /* "(tree fragment)":14 + * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[2]) # <<<<<<<<<<<<<< + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 14, __pyx_L1_error) + } + __pyx_t_6 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_1 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_6); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): + * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[2]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("aiohttp._frozenlist.__pyx_unpickle_FrozenList__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} +static struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList __pyx_vtable_7aiohttp_11_frozenlist_FrozenList; + +static PyObject *__pyx_tp_new_7aiohttp_11_frozenlist_FrozenList(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)o); + p->__pyx_vtab = __pyx_vtabptr_7aiohttp_11_frozenlist_FrozenList; + p->_items = ((PyObject*)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_7aiohttp_11_frozenlist_FrozenList(PyObject *o) { + struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *p = (struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_items); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_7aiohttp_11_frozenlist_FrozenList(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *p = (struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)o; + if (p->_items) { + e = (*v)(p->_items, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_7aiohttp_11_frozenlist_FrozenList(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *p = (struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)o; + tmp = ((PyObject*)p->_items); + p->_items = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} +static PyObject *__pyx_sq_item_7aiohttp_11_frozenlist_FrozenList(PyObject *o, Py_ssize_t i) { + PyObject *r; + PyObject *x = PyInt_FromSsize_t(i); if(!x) return 0; + r = Py_TYPE(o)->tp_as_mapping->mp_subscript(o, x); + Py_DECREF(x); + return r; +} + +static int __pyx_mp_ass_subscript_7aiohttp_11_frozenlist_FrozenList(PyObject *o, PyObject *i, PyObject *v) { + if (v) { + return __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_7__setitem__(o, i, v); + } + else { + return __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_9__delitem__(o, i); + } +} + +static PyObject *__pyx_getprop_7aiohttp_11_frozenlist_10FrozenList_frozen(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_6frozen_1__get__(o); +} + +static PyMethodDef __pyx_methods_7aiohttp_11_frozenlist_FrozenList[] = { + {"freeze", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_3freeze, METH_NOARGS, 0}, + {"__reversed__", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_15__reversed__, METH_NOARGS, 0}, + {"insert", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_19insert, METH_VARARGS|METH_KEYWORDS, 0}, + {"index", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_25index, METH_O, 0}, + {"remove", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_27remove, METH_O, 0}, + {"clear", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_29clear, METH_NOARGS, 0}, + {"extend", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_31extend, METH_O, 0}, + {"reverse", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_33reverse, METH_NOARGS, 0}, + {"pop", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_35pop, METH_VARARGS|METH_KEYWORDS, 0}, + {"append", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_37append, METH_O, 0}, + {"count", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_39count, METH_O, 0}, + {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_43__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_45__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_7aiohttp_11_frozenlist_FrozenList[] = { + {(char *)"frozen", __pyx_getprop_7aiohttp_11_frozenlist_10FrozenList_frozen, 0, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyNumberMethods __pyx_tp_as_number_FrozenList = { + 0, /*nb_add*/ + 0, /*nb_subtract*/ + 0, /*nb_multiply*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_divide*/ + #endif + 0, /*nb_remainder*/ + 0, /*nb_divmod*/ + 0, /*nb_power*/ + 0, /*nb_negative*/ + 0, /*nb_positive*/ + 0, /*nb_absolute*/ + 0, /*nb_nonzero*/ + 0, /*nb_invert*/ + 0, /*nb_lshift*/ + 0, /*nb_rshift*/ + 0, /*nb_and*/ + 0, /*nb_xor*/ + 0, /*nb_or*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_coerce*/ + #endif + 0, /*nb_int*/ + #if PY_MAJOR_VERSION < 3 + 0, /*nb_long*/ + #else + 0, /*reserved*/ + #endif + 0, /*nb_float*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_oct*/ + #endif + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_hex*/ + #endif + __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_23__iadd__, /*nb_inplace_add*/ + 0, /*nb_inplace_subtract*/ + 0, /*nb_inplace_multiply*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_inplace_divide*/ + #endif + 0, /*nb_inplace_remainder*/ + 0, /*nb_inplace_power*/ + 0, /*nb_inplace_lshift*/ + 0, /*nb_inplace_rshift*/ + 0, /*nb_inplace_and*/ + 0, /*nb_inplace_xor*/ + 0, /*nb_inplace_or*/ + 0, /*nb_floor_divide*/ + 0, /*nb_true_divide*/ + 0, /*nb_inplace_floor_divide*/ + 0, /*nb_inplace_true_divide*/ + 0, /*nb_index*/ + #if PY_VERSION_HEX >= 0x03050000 + 0, /*nb_matrix_multiply*/ + #endif + #if PY_VERSION_HEX >= 0x03050000 + 0, /*nb_inplace_matrix_multiply*/ + #endif +}; + +static PySequenceMethods __pyx_tp_as_sequence_FrozenList = { + __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_11__len__, /*sq_length*/ + 0, /*sq_concat*/ + 0, /*sq_repeat*/ + __pyx_sq_item_7aiohttp_11_frozenlist_FrozenList, /*sq_item*/ + 0, /*sq_slice*/ + 0, /*sq_ass_item*/ + 0, /*sq_ass_slice*/ + __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_21__contains__, /*sq_contains*/ + 0, /*sq_inplace_concat*/ + 0, /*sq_inplace_repeat*/ +}; + +static PyMappingMethods __pyx_tp_as_mapping_FrozenList = { + __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_11__len__, /*mp_length*/ + __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_5__getitem__, /*mp_subscript*/ + __pyx_mp_ass_subscript_7aiohttp_11_frozenlist_FrozenList, /*mp_ass_subscript*/ +}; + +static PyTypeObject __pyx_type_7aiohttp_11_frozenlist_FrozenList = { + PyVarObject_HEAD_INIT(0, 0) + "aiohttp._frozenlist.FrozenList", /*tp_name*/ + sizeof(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_7aiohttp_11_frozenlist_FrozenList, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_41__repr__, /*tp_repr*/ + &__pyx_tp_as_number_FrozenList, /*tp_as_number*/ + &__pyx_tp_as_sequence_FrozenList, /*tp_as_sequence*/ + &__pyx_tp_as_mapping_FrozenList, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_7aiohttp_11_frozenlist_FrozenList, /*tp_traverse*/ + __pyx_tp_clear_7aiohttp_11_frozenlist_FrozenList, /*tp_clear*/ + __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_17__richcmp__, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_13__iter__, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_7aiohttp_11_frozenlist_FrozenList, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_7aiohttp_11_frozenlist_FrozenList, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_7aiohttp_11_frozenlist_FrozenList, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec__frozenlist(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec__frozenlist}, + {0, NULL} +}; +#endif + +static struct PyModuleDef __pyx_moduledef = { + PyModuleDef_HEAD_INIT, + "_frozenlist", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_s_Cannot_modify_frozen_list, __pyx_k_Cannot_modify_frozen_list, sizeof(__pyx_k_Cannot_modify_frozen_list), 0, 0, 1, 0}, + {&__pyx_n_s_FrozenList, __pyx_k_FrozenList, sizeof(__pyx_k_FrozenList), 0, 0, 1, 1}, + {&__pyx_kp_s_FrozenList_frozen_r, __pyx_k_FrozenList_frozen_r, sizeof(__pyx_k_FrozenList_frozen_r), 0, 0, 1, 0}, + {&__pyx_kp_s_Incompatible_checksums_s_vs_0x94, __pyx_k_Incompatible_checksums_s_vs_0x94, sizeof(__pyx_k_Incompatible_checksums_s_vs_0x94), 0, 0, 1, 0}, + {&__pyx_n_s_MutableSequence, __pyx_k_MutableSequence, sizeof(__pyx_k_MutableSequence), 0, 0, 1, 1}, + {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_RuntimeError, __pyx_k_RuntimeError, sizeof(__pyx_k_RuntimeError), 0, 0, 1, 1}, + {&__pyx_n_s_aiohttp__frozenlist, __pyx_k_aiohttp__frozenlist, sizeof(__pyx_k_aiohttp__frozenlist), 0, 0, 1, 1}, + {&__pyx_n_s_clear, __pyx_k_clear, sizeof(__pyx_k_clear), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_collections_abc, __pyx_k_collections_abc, sizeof(__pyx_k_collections_abc), 0, 0, 1, 1}, + {&__pyx_n_s_count, __pyx_k_count, sizeof(__pyx_k_count), 0, 0, 1, 1}, + {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, + {&__pyx_n_s_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 0, 1, 1}, + {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_index, __pyx_k_index, sizeof(__pyx_k_index), 0, 0, 1, 1}, + {&__pyx_n_s_item, __pyx_k_item, sizeof(__pyx_k_item), 0, 0, 1, 1}, + {&__pyx_n_s_items, __pyx_k_items, sizeof(__pyx_k_items), 0, 0, 1, 1}, + {&__pyx_n_s_iter, __pyx_k_iter, sizeof(__pyx_k_iter), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, + {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, + {&__pyx_n_s_pop, __pyx_k_pop, sizeof(__pyx_k_pop), 0, 0, 1, 1}, + {&__pyx_n_s_pos, __pyx_k_pos, sizeof(__pyx_k_pos), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_FrozenList, __pyx_k_pyx_unpickle_FrozenList, sizeof(__pyx_k_pyx_unpickle_FrozenList), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, + {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, + {&__pyx_n_s_register, __pyx_k_register, sizeof(__pyx_k_register), 0, 0, 1, 1}, + {&__pyx_n_s_remove, __pyx_k_remove, sizeof(__pyx_k_remove), 0, 0, 1, 1}, + {&__pyx_n_s_reversed, __pyx_k_reversed, sizeof(__pyx_k_reversed), 0, 0, 1, 1}, + {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, + {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_RuntimeError = __Pyx_GetBuiltinName(__pyx_n_s_RuntimeError); if (!__pyx_builtin_RuntimeError) __PYX_ERR(0, 19, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "aiohttp/_frozenlist.pyx":19 + * cdef object _check_frozen(self): + * if self.frozen: + * raise RuntimeError("Cannot modify frozen list.") # <<<<<<<<<<<<<< + * + * cdef inline object _fast_len(self): + */ + __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_s_Cannot_modify_frozen_list); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple_); + __Pyx_GIVEREF(__pyx_tuple_); + + /* "(tree fragment)":1 + * def __pyx_unpickle_FrozenList(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_tuple__2 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__2); + __Pyx_GIVEREF(__pyx_tuple__2); + __pyx_codeobj__3 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__2, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_FrozenList, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__3)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + __pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_2 = PyInt_FromLong(2); if (unlikely(!__pyx_int_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_3 = PyInt_FromLong(3); if (unlikely(!__pyx_int_3)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_4 = PyInt_FromLong(4); if (unlikely(!__pyx_int_4)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_5 = PyInt_FromLong(5); if (unlikely(!__pyx_int_5)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_155820355 = PyInt_FromLong(155820355L); if (unlikely(!__pyx_int_155820355)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_neg_1 = PyInt_FromLong(-1); if (unlikely(!__pyx_int_neg_1)) __PYX_ERR(0, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + __pyx_vtabptr_7aiohttp_11_frozenlist_FrozenList = &__pyx_vtable_7aiohttp_11_frozenlist_FrozenList; + __pyx_vtable_7aiohttp_11_frozenlist_FrozenList._check_frozen = (PyObject *(*)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *))__pyx_f_7aiohttp_11_frozenlist_10FrozenList__check_frozen; + __pyx_vtable_7aiohttp_11_frozenlist_FrozenList._fast_len = (PyObject *(*)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *))__pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len; + if (PyType_Ready(&__pyx_type_7aiohttp_11_frozenlist_FrozenList) < 0) __PYX_ERR(0, 4, __pyx_L1_error) + __pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_print = 0; + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_dictoffset && __pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (__Pyx_SetVtable(__pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_dict, __pyx_vtabptr_7aiohttp_11_frozenlist_FrozenList) < 0) __PYX_ERR(0, 4, __pyx_L1_error) + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_FrozenList, (PyObject *)&__pyx_type_7aiohttp_11_frozenlist_FrozenList) < 0) __PYX_ERR(0, 4, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_11_frozenlist_FrozenList) < 0) __PYX_ERR(0, 4, __pyx_L1_error) + __pyx_ptype_7aiohttp_11_frozenlist_FrozenList = &__pyx_type_7aiohttp_11_frozenlist_FrozenList; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + +#if PY_MAJOR_VERSION < 3 +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC void +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#else +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC init_frozenlist(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC init_frozenlist(void) +#else +__Pyx_PyMODINIT_FUNC PyInit__frozenlist(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit__frozenlist(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { + result = PyDict_SetItemString(moddict, to_name, value); + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec__frozenlist(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module '_frozenlist' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__frozenlist(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("_frozenlist", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_aiohttp___frozenlist) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "aiohttp._frozenlist")) { + if (unlikely(PyDict_SetItemString(modules, "aiohttp._frozenlist", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + if (unlikely(__Pyx_modinit_type_init_code() != 0)) goto __pyx_L1_error; + (void)__Pyx_modinit_type_import_code(); + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "aiohttp/_frozenlist.pyx":1 + * from collections.abc import MutableSequence # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_MutableSequence); + __Pyx_GIVEREF(__pyx_n_s_MutableSequence); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_MutableSequence); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_collections_abc, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_MutableSequence); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_MutableSequence, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_frozenlist.pyx":108 + * + * + * MutableSequence.register(FrozenList) # <<<<<<<<<<<<<< + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_MutableSequence); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 108, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_register); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 108, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_1, ((PyObject *)__pyx_ptype_7aiohttp_11_frozenlist_FrozenList)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 108, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_FrozenList(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList, NULL, __pyx_n_s_aiohttp__frozenlist); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_FrozenList, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_frozenlist.pyx":1 + * from collections.abc import MutableSequence # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init aiohttp._frozenlist", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + Py_CLEAR(__pyx_m); + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init aiohttp._frozenlist"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* GetItemInt */ +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (!j) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyList_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { + PyObject *r = PyList_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyTuple_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; + if (likely(m && m->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { + Py_ssize_t l = m->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return m->sq_item(o, i); + } + } +#else + if (is_list || PySequence_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/* ObjectGetItem */ +#if CYTHON_USE_TYPE_SLOTS +static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject* index) { + PyObject *runerr; + Py_ssize_t key_value; + PySequenceMethods *m = Py_TYPE(obj)->tp_as_sequence; + if (unlikely(!(m && m->sq_item))) { + PyErr_Format(PyExc_TypeError, "'%.200s' object is not subscriptable", Py_TYPE(obj)->tp_name); + return NULL; + } + key_value = __Pyx_PyIndex_AsSsize_t(index); + if (likely(key_value != -1 || !(runerr = PyErr_Occurred()))) { + return __Pyx_GetItemInt_Fast(obj, key_value, 0, 1, 1); + } + if (PyErr_GivenExceptionMatches(runerr, PyExc_OverflowError)) { + PyErr_Clear(); + PyErr_Format(PyExc_IndexError, "cannot fit '%.200s' into an index-sized integer", Py_TYPE(index)->tp_name); + } + return NULL; +} +static PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key) { + PyMappingMethods *m = Py_TYPE(obj)->tp_as_mapping; + if (likely(m && m->mp_subscript)) { + return m->mp_subscript(obj, key); + } + return __Pyx_PyObject_GetIndex(obj, key); +} +#endif + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif +#endif + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallNoArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, NULL, 0); + } +#endif +#ifdef __Pyx_CyFunction_USED + if (likely(PyCFunction_Check(func) || __Pyx_CyFunction_Check(func))) +#else + if (likely(PyCFunction_Check(func))) +#endif + { + if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { + return __Pyx_PyObject_CallMethO(func, NULL); + } + } + return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); +} +#endif + +/* PyCFunctionFastCall */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + int flags = PyCFunction_GET_FLAGS(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { + return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); + } else { + return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); + } +} +#endif + +/* PyObjectCallOneArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif + if (likely(PyCFunction_Check(func))) { + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* PyIntCompare */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, CYTHON_UNUSED long inplace) { + if (op1 == op2) { + Py_RETURN_TRUE; + } + #if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(op1))) { + const long b = intval; + long a = PyInt_AS_LONG(op1); + if (a == b) Py_RETURN_TRUE; else Py_RETURN_FALSE; + } + #endif + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(PyLong_CheckExact(op1))) { + int unequal; + unsigned long uintval; + Py_ssize_t size = Py_SIZE(op1); + const digit* digits = ((PyLongObject*)op1)->ob_digit; + if (intval == 0) { + if (size == 0) Py_RETURN_TRUE; else Py_RETURN_FALSE; + } else if (intval < 0) { + if (size >= 0) + Py_RETURN_FALSE; + intval = -intval; + size = -size; + } else { + if (size <= 0) + Py_RETURN_FALSE; + } + uintval = (unsigned long) intval; +#if PyLong_SHIFT * 4 < SIZEOF_LONG*8 + if (uintval >> (PyLong_SHIFT * 4)) { + unequal = (size != 5) || (digits[0] != (uintval & (unsigned long) PyLong_MASK)) + | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[2] != ((uintval >> (2 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[3] != ((uintval >> (3 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[4] != ((uintval >> (4 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)); + } else +#endif +#if PyLong_SHIFT * 3 < SIZEOF_LONG*8 + if (uintval >> (PyLong_SHIFT * 3)) { + unequal = (size != 4) || (digits[0] != (uintval & (unsigned long) PyLong_MASK)) + | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[2] != ((uintval >> (2 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[3] != ((uintval >> (3 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)); + } else +#endif +#if PyLong_SHIFT * 2 < SIZEOF_LONG*8 + if (uintval >> (PyLong_SHIFT * 2)) { + unequal = (size != 3) || (digits[0] != (uintval & (unsigned long) PyLong_MASK)) + | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[2] != ((uintval >> (2 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)); + } else +#endif +#if PyLong_SHIFT * 1 < SIZEOF_LONG*8 + if (uintval >> (PyLong_SHIFT * 1)) { + unequal = (size != 2) || (digits[0] != (uintval & (unsigned long) PyLong_MASK)) + | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)); + } else +#endif + unequal = (size != 1) || (((unsigned long) digits[0]) != (uintval & (unsigned long) PyLong_MASK)); + if (unequal == 0) Py_RETURN_TRUE; else Py_RETURN_FALSE; + } + #endif + if (PyFloat_CheckExact(op1)) { + const long b = intval; + double a = PyFloat_AS_DOUBLE(op1); + if ((double)a == (double)b) Py_RETURN_TRUE; else Py_RETURN_FALSE; + } + return ( + PyObject_RichCompare(op1, op2, Py_EQ)); +} + +/* PyObjectCall2Args */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { + PyObject *args, *result = NULL; + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyFunction_FastCall(function, args, 2); + } + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyCFunction_FastCall(function, args, 2); + } + #endif + args = PyTuple_New(2); + if (unlikely(!args)) goto done; + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 0, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 1, arg2); + Py_INCREF(function); + result = __Pyx_PyObject_Call(function, args, NULL); + Py_DECREF(args); + Py_DECREF(function); +done: + return result; +} + +/* PyObjectGetMethod */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { + PyObject *attr; +#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP + PyTypeObject *tp = Py_TYPE(obj); + PyObject *descr; + descrgetfunc f = NULL; + PyObject **dictptr, *dict; + int meth_found = 0; + assert (*method == NULL); + if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; + } + if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { + return 0; + } + descr = _PyType_Lookup(tp, name); + if (likely(descr != NULL)) { + Py_INCREF(descr); +#if PY_MAJOR_VERSION >= 3 + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type))) + #endif +#else + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr))) + #endif +#endif + { + meth_found = 1; + } else { + f = Py_TYPE(descr)->tp_descr_get; + if (f != NULL && PyDescr_IsData(descr)) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + } + } + dictptr = _PyObject_GetDictPtr(obj); + if (dictptr != NULL && (dict = *dictptr) != NULL) { + Py_INCREF(dict); + attr = __Pyx_PyDict_GetItemStr(dict, name); + if (attr != NULL) { + Py_INCREF(attr); + Py_DECREF(dict); + Py_XDECREF(descr); + goto try_unpack; + } + Py_DECREF(dict); + } + if (meth_found) { + *method = descr; + return 1; + } + if (f != NULL) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + if (descr != NULL) { + *method = descr; + return 0; + } + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'%.50s' object has no attribute '%U'", + tp->tp_name, name); +#else + "'%.50s' object has no attribute '%.400s'", + tp->tp_name, PyString_AS_STRING(name)); +#endif + return 0; +#else + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; +#endif +try_unpack: +#if CYTHON_UNPACK_METHODS + if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { + PyObject *function = PyMethod_GET_FUNCTION(attr); + Py_INCREF(function); + Py_DECREF(attr); + *method = function; + return 1; + } +#endif + *method = attr; + return 0; +} + +/* PyObjectCallMethod1 */ +static PyObject* __Pyx__PyObject_CallMethod1(PyObject* method, PyObject* arg) { + PyObject *result = __Pyx_PyObject_CallOneArg(method, arg); + Py_DECREF(method); + return result; +} +static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg) { + PyObject *method = NULL, *result; + int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); + if (likely(is_method)) { + result = __Pyx_PyObject_Call2Args(method, obj, arg); + Py_DECREF(method); + return result; + } + if (unlikely(!method)) return NULL; + return __Pyx__PyObject_CallMethod1(method, arg); +} + +/* pop_index */ +static PyObject* __Pyx__PyObject_PopNewIndex(PyObject* L, PyObject* py_ix) { + PyObject *r; + if (unlikely(!py_ix)) return NULL; + r = __Pyx__PyObject_PopIndex(L, py_ix); + Py_DECREF(py_ix); + return r; +} +static PyObject* __Pyx__PyObject_PopIndex(PyObject* L, PyObject* py_ix) { + return __Pyx_PyObject_CallMethod1(L, __pyx_n_s_pop, py_ix); +} +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static PyObject* __Pyx__PyList_PopIndex(PyObject* L, PyObject* py_ix, Py_ssize_t ix) { + Py_ssize_t size = PyList_GET_SIZE(L); + if (likely(size > (((PyListObject*)L)->allocated >> 1))) { + Py_ssize_t cix = ix; + if (cix < 0) { + cix += size; + } + if (likely(__Pyx_is_valid_index(cix, size))) { + PyObject* v = PyList_GET_ITEM(L, cix); + Py_SIZE(L) -= 1; + size -= 1; + memmove(&PyList_GET_ITEM(L, cix), &PyList_GET_ITEM(L, cix+1), (size_t)(size-cix)*sizeof(PyObject*)); + return v; + } + } + if (py_ix == Py_None) { + return __Pyx__PyObject_PopNewIndex(L, PyInt_FromSsize_t(ix)); + } else { + return __Pyx__PyObject_PopIndex(L, py_ix); + } +} +#endif + +/* PyErrExceptionMatches */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; icurexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; + if (unlikely(PyTuple_Check(err))) + return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); +} +#endif + +/* GetAttr */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { +#if CYTHON_USE_TYPE_SLOTS +#if PY_MAJOR_VERSION >= 3 + if (likely(PyUnicode_Check(n))) +#else + if (likely(PyString_Check(n))) +#endif + return __Pyx_PyObject_GetAttrStr(o, n); +#endif + return PyObject_GetAttr(o, n); +} + +/* GetAttr3 */ +static PyObject *__Pyx_GetAttr3Default(PyObject *d) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + return NULL; + __Pyx_PyErr_Clear(); + Py_INCREF(d); + return d; +} +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { + PyObject *r = __Pyx_GetAttr(o, n); + return (likely(r)) ? r : __Pyx_GetAttr3Default(d); +} + +/* GetModuleGlobalName */ +#if CYTHON_USE_DICT_VERSIONS +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) +#else +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) +#endif +{ + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } else if (unlikely(PyErr_Occurred())) { + return NULL; + } +#else + result = PyDict_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } + PyErr_Clear(); +#endif + return __Pyx_GetBuiltinName(name); +} + +/* Import */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.')) { + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +/* ImportFrom */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* HasAttr */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { + PyObject *r; + if (unlikely(!__Pyx_PyBaseString_Check(n))) { + PyErr_SetString(PyExc_TypeError, + "hasattr(): attribute name must be string"); + return -1; + } + r = __Pyx_GetAttr(o, n); + if (unlikely(!r)) { + PyErr_Clear(); + return 0; + } else { + Py_DECREF(r); + return 1; + } +} + +/* PyObject_GenericGetAttrNoDict */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'%.50s' object has no attribute '%U'", + tp->tp_name, attr_name); +#else + "'%.50s' object has no attribute '%.400s'", + tp->tp_name, PyString_AS_STRING(attr_name)); +#endif + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { + PyObject *descr; + PyTypeObject *tp = Py_TYPE(obj); + if (unlikely(!PyString_Check(attr_name))) { + return PyObject_GenericGetAttr(obj, attr_name); + } + assert(!tp->tp_dictoffset); + descr = _PyType_Lookup(tp, attr_name); + if (unlikely(!descr)) { + return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); + } + Py_INCREF(descr); + #if PY_MAJOR_VERSION < 3 + if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) + #endif + { + descrgetfunc f = Py_TYPE(descr)->tp_descr_get; + if (unlikely(f)) { + PyObject *res = f(descr, obj, (PyObject *)tp); + Py_DECREF(descr); + return res; + } + } + return descr; +} +#endif + +/* PyObject_GenericGetAttr */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { + if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { + return PyObject_GenericGetAttr(obj, attr_name); + } + return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); +} +#endif + +/* SetVTable */ +static int __Pyx_SetVtable(PyObject *dict, void *vtable) { +#if PY_VERSION_HEX >= 0x02070000 + PyObject *ob = PyCapsule_New(vtable, 0, 0); +#else + PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); +#endif + if (!ob) + goto bad; + if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0) + goto bad; + Py_DECREF(ob); + return 0; +bad: + Py_XDECREF(ob); + return -1; +} + +/* SetupReduce */ +static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { + int ret; + PyObject *name_attr; + name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); + if (likely(name_attr)) { + ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); + } else { + ret = -1; + } + if (unlikely(ret < 0)) { + PyErr_Clear(); + ret = 0; + } + Py_XDECREF(name_attr); + return ret; +} +static int __Pyx_setup_reduce(PyObject* type_obj) { + int ret = 0; + PyObject *object_reduce = NULL; + PyObject *object_reduce_ex = NULL; + PyObject *reduce = NULL; + PyObject *reduce_ex = NULL; + PyObject *reduce_cython = NULL; + PyObject *setstate = NULL; + PyObject *setstate_cython = NULL; +#if CYTHON_USE_PYTYPE_LOOKUP + if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto GOOD; +#else + if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto GOOD; +#endif +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD; +#else + object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD; +#endif + reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto BAD; + if (reduce_ex == object_reduce_ex) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD; +#else + object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD; +#endif + reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto BAD; + if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { + reduce_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_cython); if (unlikely(!reduce_cython)) goto BAD; + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto BAD; + setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); + if (!setstate) PyErr_Clear(); + if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { + setstate_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate_cython); if (unlikely(!setstate_cython)) goto BAD; + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto BAD; + } + PyType_Modified((PyTypeObject*)type_obj); + } + } + goto GOOD; +BAD: + if (!PyErr_Occurred()) + PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); + ret = -1; +GOOD: +#if !CYTHON_USE_PYTYPE_LOOKUP + Py_XDECREF(object_reduce); + Py_XDECREF(object_reduce_ex); +#endif + Py_XDECREF(reduce); + Py_XDECREF(reduce_ex); + Py_XDECREF(reduce_cython); + Py_XDECREF(setstate); + Py_XDECREF(setstate_cython); + return ret; +} + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { + const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(int) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(int) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(int) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(int), + little, !is_unsigned); + } +} + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { + const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; ip) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/venv/lib/python3.7/site-packages/aiohttp/_frozenlist.cpython-37m-x86_64-linux-gnu.so b/venv/lib/python3.7/site-packages/aiohttp/_frozenlist.cpython-37m-x86_64-linux-gnu.so new file mode 100755 index 0000000..6ad3303 Binary files /dev/null and b/venv/lib/python3.7/site-packages/aiohttp/_frozenlist.cpython-37m-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.7/site-packages/aiohttp/_frozenlist.pyx b/venv/lib/python3.7/site-packages/aiohttp/_frozenlist.pyx new file mode 100644 index 0000000..b130577 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_frozenlist.pyx @@ -0,0 +1,108 @@ +from collections.abc import MutableSequence + + +cdef class FrozenList: + + cdef readonly bint frozen + cdef list _items + + def __init__(self, items=None): + self.frozen = False + if items is not None: + items = list(items) + else: + items = [] + self._items = items + + cdef object _check_frozen(self): + if self.frozen: + raise RuntimeError("Cannot modify frozen list.") + + cdef inline object _fast_len(self): + return len(self._items) + + def freeze(self): + self.frozen = True + + def __getitem__(self, index): + return self._items[index] + + def __setitem__(self, index, value): + self._check_frozen() + self._items[index] = value + + def __delitem__(self, index): + self._check_frozen() + del self._items[index] + + def __len__(self): + return self._fast_len() + + def __iter__(self): + return self._items.__iter__() + + def __reversed__(self): + return self._items.__reversed__() + + def __richcmp__(self, other, op): + if op == 0: # < + return list(self) < other + if op == 1: # <= + return list(self) <= other + if op == 2: # == + return list(self) == other + if op == 3: # != + return list(self) != other + if op == 4: # > + return list(self) > other + if op == 5: # => + return list(self) >= other + + def insert(self, pos, item): + self._check_frozen() + self._items.insert(pos, item) + + def __contains__(self, item): + return item in self._items + + def __iadd__(self, items): + self._check_frozen() + self._items += list(items) + return self + + def index(self, item): + return self._items.index(item) + + def remove(self, item): + self._check_frozen() + self._items.remove(item) + + def clear(self): + self._check_frozen() + self._items.clear() + + def extend(self, items): + self._check_frozen() + self._items += list(items) + + def reverse(self): + self._check_frozen() + self._items.reverse() + + def pop(self, index=-1): + self._check_frozen() + return self._items.pop(index) + + def append(self, item): + self._check_frozen() + return self._items.append(item) + + def count(self, item): + return self._items.count(item) + + def __repr__(self): + return ''.format(self.frozen, + self._items) + + +MutableSequence.register(FrozenList) diff --git a/venv/lib/python3.7/site-packages/aiohttp/_headers.pxi b/venv/lib/python3.7/site-packages/aiohttp/_headers.pxi new file mode 100644 index 0000000..22ef15c --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_headers.pxi @@ -0,0 +1,84 @@ +# The file is autogenerated from aiohttp/hdrs.py +# Run ./tools/gen.py to update it after the origin changing. + +from . import hdrs +cdef tuple headers = ( + hdrs.ACCEPT, + hdrs.ACCEPT_CHARSET, + hdrs.ACCEPT_ENCODING, + hdrs.ACCEPT_LANGUAGE, + hdrs.ACCEPT_RANGES, + hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, + hdrs.ACCESS_CONTROL_ALLOW_HEADERS, + hdrs.ACCESS_CONTROL_ALLOW_METHODS, + hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, + hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, + hdrs.ACCESS_CONTROL_MAX_AGE, + hdrs.ACCESS_CONTROL_REQUEST_HEADERS, + hdrs.ACCESS_CONTROL_REQUEST_METHOD, + hdrs.AGE, + hdrs.ALLOW, + hdrs.AUTHORIZATION, + hdrs.CACHE_CONTROL, + hdrs.CONNECTION, + hdrs.CONTENT_DISPOSITION, + hdrs.CONTENT_ENCODING, + hdrs.CONTENT_LANGUAGE, + hdrs.CONTENT_LENGTH, + hdrs.CONTENT_LOCATION, + hdrs.CONTENT_MD5, + hdrs.CONTENT_RANGE, + hdrs.CONTENT_TRANSFER_ENCODING, + hdrs.CONTENT_TYPE, + hdrs.COOKIE, + hdrs.DATE, + hdrs.DESTINATION, + hdrs.DIGEST, + hdrs.ETAG, + hdrs.EXPECT, + hdrs.EXPIRES, + hdrs.FORWARDED, + hdrs.FROM, + hdrs.HOST, + hdrs.IF_MATCH, + hdrs.IF_MODIFIED_SINCE, + hdrs.IF_NONE_MATCH, + hdrs.IF_RANGE, + hdrs.IF_UNMODIFIED_SINCE, + hdrs.KEEP_ALIVE, + hdrs.LAST_EVENT_ID, + hdrs.LAST_MODIFIED, + hdrs.LINK, + hdrs.LOCATION, + hdrs.MAX_FORWARDS, + hdrs.ORIGIN, + hdrs.PRAGMA, + hdrs.PROXY_AUTHENTICATE, + hdrs.PROXY_AUTHORIZATION, + hdrs.RANGE, + hdrs.REFERER, + hdrs.RETRY_AFTER, + hdrs.SEC_WEBSOCKET_ACCEPT, + hdrs.SEC_WEBSOCKET_EXTENSIONS, + hdrs.SEC_WEBSOCKET_KEY, + hdrs.SEC_WEBSOCKET_KEY1, + hdrs.SEC_WEBSOCKET_PROTOCOL, + hdrs.SEC_WEBSOCKET_VERSION, + hdrs.SERVER, + hdrs.SET_COOKIE, + hdrs.TE, + hdrs.TRAILER, + hdrs.TRANSFER_ENCODING, + hdrs.UPGRADE, + hdrs.URI, + hdrs.USER_AGENT, + hdrs.VARY, + hdrs.VIA, + hdrs.WANT_DIGEST, + hdrs.WARNING, + hdrs.WEBSOCKET, + hdrs.WWW_AUTHENTICATE, + hdrs.X_FORWARDED_FOR, + hdrs.X_FORWARDED_HOST, + hdrs.X_FORWARDED_PROTO, +) diff --git a/venv/lib/python3.7/site-packages/aiohttp/_helpers.c b/venv/lib/python3.7/site-packages/aiohttp/_helpers.c new file mode 100644 index 0000000..b30cdf2 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_helpers.c @@ -0,0 +1,5312 @@ +/* Generated by Cython 0.29.2 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "name": "aiohttp._helpers", + "sources": [ + "aiohttp/_helpers.pyx" + ] + }, + "module_name": "aiohttp._helpers" +} +END: Cython Metadata */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.6+ or Python 3.3+. +#else +#define CYTHON_ABI "0_29_2" +#define CYTHON_HEX_VERSION 0x001D02F0 +#define CYTHON_FUTURE_DIVISION 0 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif + #ifndef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) + #endif + #ifndef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if CYTHON_USE_DICT_VERSIONS +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ + } +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 + #define PyMem_RawMalloc(n) PyMem_Malloc(n) + #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) + #define PyMem_RawFree(p) PyMem_Free(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; // PyThread_create_key reports success always +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif // TSS (Thread Specific Storage) API +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact + #define PyObject_Unicode PyObject_Str +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + + +#define __PYX_ERR(f_index, lineno, Ln_error) \ +{ \ + __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ +} + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__aiohttp___helpers +#define __PYX_HAVE_API__aiohttp___helpers +/* Early includes */ +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m = NULL; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime = NULL; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + + +static const char *__pyx_f[] = { + "aiohttp/_helpers.pyx", + "stringsource", +}; + +/*--- Type declarations ---*/ +struct __pyx_obj_7aiohttp_8_helpers_reify; + +/* "aiohttp/_helpers.pyx":1 + * cdef class reify: # <<<<<<<<<<<<<< + * """Use as a class method decorator. It operates almost exactly like + * the Python `@property` decorator, but it puts the result of the + */ +struct __pyx_obj_7aiohttp_8_helpers_reify { + PyObject_HEAD + PyObject *wrapped; + PyObject *name; +}; + + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/* ObjectGetItem.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key); +#else +#define __Pyx_PyObject_GetItem(obj, key) PyObject_GetItem(obj, key) +#endif + +/* GetTopmostException.proto */ +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() +#endif + +/* SaveResetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +#else +#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) +#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) +#endif + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* GetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyObjectCall2Args.proto */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* GetAttr.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); + +/* GetAttr3.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); + +/* GetModuleGlobalName.proto */ +#if CYTHON_USE_DICT_VERSIONS +#define __Pyx_GetModuleGlobalName(var, name) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ + (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ + __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +#define __Pyx_GetModuleGlobalNameUncached(var, name) {\ + PY_UINT64_T __pyx_dict_version;\ + PyObject *__pyx_dict_cached_value;\ + (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); +#else +#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) +#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); +#endif + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* HasAttr.proto */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); + +/* PyObject_GenericGetAttrNoDict.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr +#endif + +/* PyObject_GenericGetAttr.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr +#endif + +/* SetupReduce.proto */ +static int __Pyx_setup_reduce(PyObject* type_obj); + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + + +/* Module declarations from 'aiohttp._helpers' */ +static PyTypeObject *__pyx_ptype_7aiohttp_8_helpers_reify = 0; +static PyObject *__pyx_f_7aiohttp_8_helpers___pyx_unpickle_reify__set_state(struct __pyx_obj_7aiohttp_8_helpers_reify *, PyObject *); /*proto*/ +#define __Pyx_MODULE_NAME "aiohttp._helpers" +extern int __pyx_module_is_main_aiohttp___helpers; +int __pyx_module_is_main_aiohttp___helpers = 0; + +/* Implementation of 'aiohttp._helpers' */ +static PyObject *__pyx_builtin_KeyError; +static PyObject *__pyx_builtin_AttributeError; +static const char __pyx_k_doc[] = "__doc__"; +static const char __pyx_k_new[] = "__new__"; +static const char __pyx_k_dict[] = "__dict__"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_cache[] = "_cache"; +static const char __pyx_k_reify[] = "reify"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_pickle[] = "pickle"; +static const char __pyx_k_reduce[] = "__reduce__"; +static const char __pyx_k_update[] = "update"; +static const char __pyx_k_wrapped[] = "wrapped"; +static const char __pyx_k_KeyError[] = "KeyError"; +static const char __pyx_k_getstate[] = "__getstate__"; +static const char __pyx_k_pyx_type[] = "__pyx_type"; +static const char __pyx_k_setstate[] = "__setstate__"; +static const char __pyx_k_pyx_state[] = "__pyx_state"; +static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; +static const char __pyx_k_pyx_result[] = "__pyx_result"; +static const char __pyx_k_PickleError[] = "PickleError"; +static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; +static const char __pyx_k_stringsource[] = "stringsource"; +static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; +static const char __pyx_k_AttributeError[] = "AttributeError"; +static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; +static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; +static const char __pyx_k_aiohttp__helpers[] = "aiohttp._helpers"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_pyx_unpickle_reify[] = "__pyx_unpickle_reify"; +static const char __pyx_k_reified_property_is_read_only[] = "reified property is read-only"; +static const char __pyx_k_Incompatible_checksums_s_vs_0x77[] = "Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))"; +static PyObject *__pyx_n_s_AttributeError; +static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0x77; +static PyObject *__pyx_n_s_KeyError; +static PyObject *__pyx_n_s_PickleError; +static PyObject *__pyx_n_s_aiohttp__helpers; +static PyObject *__pyx_n_s_cache; +static PyObject *__pyx_n_s_cline_in_traceback; +static PyObject *__pyx_n_s_dict; +static PyObject *__pyx_n_s_doc; +static PyObject *__pyx_n_s_getstate; +static PyObject *__pyx_n_s_import; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_name; +static PyObject *__pyx_n_s_new; +static PyObject *__pyx_n_s_pickle; +static PyObject *__pyx_n_s_pyx_PickleError; +static PyObject *__pyx_n_s_pyx_checksum; +static PyObject *__pyx_n_s_pyx_result; +static PyObject *__pyx_n_s_pyx_state; +static PyObject *__pyx_n_s_pyx_type; +static PyObject *__pyx_n_s_pyx_unpickle_reify; +static PyObject *__pyx_n_s_reduce; +static PyObject *__pyx_n_s_reduce_cython; +static PyObject *__pyx_n_s_reduce_ex; +static PyObject *__pyx_kp_s_reified_property_is_read_only; +static PyObject *__pyx_n_s_reify; +static PyObject *__pyx_n_s_setstate; +static PyObject *__pyx_n_s_setstate_cython; +static PyObject *__pyx_kp_s_stringsource; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_n_s_update; +static PyObject *__pyx_n_s_wrapped; +static int __pyx_pf_7aiohttp_8_helpers_5reify___init__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, PyObject *__pyx_v_wrapped); /* proto */ +static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_7__doc_____get__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_2__get__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, PyObject *__pyx_v_inst, CYTHON_UNUSED PyObject *__pyx_v_owner); /* proto */ +static int __pyx_pf_7aiohttp_8_helpers_5reify_4__set__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_inst, CYTHON_UNUSED PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_6__reduce_cython__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_8__setstate_cython__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_7aiohttp_8_helpers___pyx_unpickle_reify(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_tp_new_7aiohttp_8_helpers_reify(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_int_124832655; +static PyObject *__pyx_tuple_; +static PyObject *__pyx_tuple__2; +static PyObject *__pyx_codeobj__3; +/* Late includes */ + +/* "aiohttp/_helpers.pyx":13 + * cdef object name + * + * def __init__(self, wrapped): # <<<<<<<<<<<<<< + * self.wrapped = wrapped + * self.name = wrapped.__name__ + */ + +/* Python wrapper */ +static int __pyx_pw_7aiohttp_8_helpers_5reify_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_7aiohttp_8_helpers_5reify_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_wrapped = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_wrapped,0}; + PyObject* values[1] = {0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_wrapped)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 13, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + } + __pyx_v_wrapped = values[0]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 13, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._helpers.reify.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_7aiohttp_8_helpers_5reify___init__(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v_self), __pyx_v_wrapped); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_7aiohttp_8_helpers_5reify___init__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, PyObject *__pyx_v_wrapped) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "aiohttp/_helpers.pyx":14 + * + * def __init__(self, wrapped): + * self.wrapped = wrapped # <<<<<<<<<<<<<< + * self.name = wrapped.__name__ + * + */ + __Pyx_INCREF(__pyx_v_wrapped); + __Pyx_GIVEREF(__pyx_v_wrapped); + __Pyx_GOTREF(__pyx_v_self->wrapped); + __Pyx_DECREF(__pyx_v_self->wrapped); + __pyx_v_self->wrapped = __pyx_v_wrapped; + + /* "aiohttp/_helpers.pyx":15 + * def __init__(self, wrapped): + * self.wrapped = wrapped + * self.name = wrapped.__name__ # <<<<<<<<<<<<<< + * + * @property + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_wrapped, __pyx_n_s_name); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->name); + __Pyx_DECREF(__pyx_v_self->name); + __pyx_v_self->name = __pyx_t_1; + __pyx_t_1 = 0; + + /* "aiohttp/_helpers.pyx":13 + * cdef object name + * + * def __init__(self, wrapped): # <<<<<<<<<<<<<< + * self.wrapped = wrapped + * self.name = wrapped.__name__ + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._helpers.reify.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_helpers.pyx":18 + * + * @property + * def __doc__(self): # <<<<<<<<<<<<<< + * return self.wrapped.__doc__ + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_7__doc___1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_7__doc___1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_8_helpers_5reify_7__doc_____get__(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_7__doc_____get__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__get__", 0); + + /* "aiohttp/_helpers.pyx":19 + * @property + * def __doc__(self): + * return self.wrapped.__doc__ # <<<<<<<<<<<<<< + * + * def __get__(self, inst, owner): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->wrapped, __pyx_n_s_doc); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_helpers.pyx":18 + * + * @property + * def __doc__(self): # <<<<<<<<<<<<<< + * return self.wrapped.__doc__ + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._helpers.reify.__doc__.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_helpers.pyx":21 + * return self.wrapped.__doc__ + * + * def __get__(self, inst, owner): # <<<<<<<<<<<<<< + * try: + * try: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_3__get__(PyObject *__pyx_v_self, PyObject *__pyx_v_inst, PyObject *__pyx_v_owner); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_3__get__(PyObject *__pyx_v_self, PyObject *__pyx_v_inst, PyObject *__pyx_v_owner) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_8_helpers_5reify_2__get__(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v_self), ((PyObject *)__pyx_v_inst), ((PyObject *)__pyx_v_owner)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_2__get__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, PyObject *__pyx_v_inst, CYTHON_UNUSED PyObject *__pyx_v_owner) { + PyObject *__pyx_v_val = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + int __pyx_t_9; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + PyObject *__pyx_t_13 = NULL; + int __pyx_t_14; + int __pyx_t_15; + __Pyx_RefNannySetupContext("__get__", 0); + + /* "aiohttp/_helpers.pyx":22 + * + * def __get__(self, inst, owner): + * try: # <<<<<<<<<<<<<< + * try: + * return inst._cache[self.name] + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "aiohttp/_helpers.pyx":23 + * def __get__(self, inst, owner): + * try: + * try: # <<<<<<<<<<<<<< + * return inst._cache[self.name] + * except KeyError: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_4, &__pyx_t_5, &__pyx_t_6); + __Pyx_XGOTREF(__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_6); + /*try:*/ { + + /* "aiohttp/_helpers.pyx":24 + * try: + * try: + * return inst._cache[self.name] # <<<<<<<<<<<<<< + * except KeyError: + * val = self.wrapped(inst) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_inst, __pyx_n_s_cache); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 24, __pyx_L9_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyObject_GetItem(__pyx_t_7, __pyx_v_self->name); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 24, __pyx_L9_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_r = __pyx_t_8; + __pyx_t_8 = 0; + goto __pyx_L13_try_return; + + /* "aiohttp/_helpers.pyx":23 + * def __get__(self, inst, owner): + * try: + * try: # <<<<<<<<<<<<<< + * return inst._cache[self.name] + * except KeyError: + */ + } + __pyx_L9_error:; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "aiohttp/_helpers.pyx":25 + * try: + * return inst._cache[self.name] + * except KeyError: # <<<<<<<<<<<<<< + * val = self.wrapped(inst) + * inst._cache[self.name] = val + */ + __pyx_t_9 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_KeyError); + if (__pyx_t_9) { + __Pyx_AddTraceback("aiohttp._helpers.reify.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_8, &__pyx_t_7, &__pyx_t_10) < 0) __PYX_ERR(0, 25, __pyx_L11_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_t_10); + + /* "aiohttp/_helpers.pyx":26 + * return inst._cache[self.name] + * except KeyError: + * val = self.wrapped(inst) # <<<<<<<<<<<<<< + * inst._cache[self.name] = val + * return val + */ + __Pyx_INCREF(__pyx_v_self->wrapped); + __pyx_t_12 = __pyx_v_self->wrapped; __pyx_t_13 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_12))) { + __pyx_t_13 = PyMethod_GET_SELF(__pyx_t_12); + if (likely(__pyx_t_13)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_12); + __Pyx_INCREF(__pyx_t_13); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_12, function); + } + } + __pyx_t_11 = (__pyx_t_13) ? __Pyx_PyObject_Call2Args(__pyx_t_12, __pyx_t_13, __pyx_v_inst) : __Pyx_PyObject_CallOneArg(__pyx_t_12, __pyx_v_inst); + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 26, __pyx_L11_except_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __pyx_v_val = __pyx_t_11; + __pyx_t_11 = 0; + + /* "aiohttp/_helpers.pyx":27 + * except KeyError: + * val = self.wrapped(inst) + * inst._cache[self.name] = val # <<<<<<<<<<<<<< + * return val + * except AttributeError: + */ + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_inst, __pyx_n_s_cache); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 27, __pyx_L11_except_error) + __Pyx_GOTREF(__pyx_t_11); + if (unlikely(PyObject_SetItem(__pyx_t_11, __pyx_v_self->name, __pyx_v_val) < 0)) __PYX_ERR(0, 27, __pyx_L11_except_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + + /* "aiohttp/_helpers.pyx":28 + * val = self.wrapped(inst) + * inst._cache[self.name] = val + * return val # <<<<<<<<<<<<<< + * except AttributeError: + * if inst is None: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_val); + __pyx_r = __pyx_v_val; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + goto __pyx_L12_except_return; + } + goto __pyx_L11_except_error; + __pyx_L11_except_error:; + + /* "aiohttp/_helpers.pyx":23 + * def __get__(self, inst, owner): + * try: + * try: # <<<<<<<<<<<<<< + * return inst._cache[self.name] + * except KeyError: + */ + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); + goto __pyx_L3_error; + __pyx_L13_try_return:; + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); + goto __pyx_L7_try_return; + __pyx_L12_except_return:; + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); + goto __pyx_L7_try_return; + } + + /* "aiohttp/_helpers.pyx":22 + * + * def __get__(self, inst, owner): + * try: # <<<<<<<<<<<<<< + * try: + * return inst._cache[self.name] + */ + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + + /* "aiohttp/_helpers.pyx":29 + * inst._cache[self.name] = val + * return val + * except AttributeError: # <<<<<<<<<<<<<< + * if inst is None: + * return self + */ + __pyx_t_9 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_AttributeError); + if (__pyx_t_9) { + __Pyx_AddTraceback("aiohttp._helpers.reify.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_10, &__pyx_t_7, &__pyx_t_8) < 0) __PYX_ERR(0, 29, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_t_8); + + /* "aiohttp/_helpers.pyx":30 + * return val + * except AttributeError: + * if inst is None: # <<<<<<<<<<<<<< + * return self + * raise + */ + __pyx_t_14 = (__pyx_v_inst == Py_None); + __pyx_t_15 = (__pyx_t_14 != 0); + if (__pyx_t_15) { + + /* "aiohttp/_helpers.pyx":31 + * except AttributeError: + * if inst is None: + * return self # <<<<<<<<<<<<<< + * raise + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __pyx_r = ((PyObject *)__pyx_v_self); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + goto __pyx_L6_except_return; + + /* "aiohttp/_helpers.pyx":30 + * return val + * except AttributeError: + * if inst is None: # <<<<<<<<<<<<<< + * return self + * raise + */ + } + + /* "aiohttp/_helpers.pyx":32 + * if inst is None: + * return self + * raise # <<<<<<<<<<<<<< + * + * def __set__(self, inst, value): + */ + __Pyx_GIVEREF(__pyx_t_10); + __Pyx_GIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_ErrRestoreWithState(__pyx_t_10, __pyx_t_7, __pyx_t_8); + __pyx_t_10 = 0; __pyx_t_7 = 0; __pyx_t_8 = 0; + __PYX_ERR(0, 32, __pyx_L5_except_error) + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "aiohttp/_helpers.pyx":22 + * + * def __get__(self, inst, owner): + * try: # <<<<<<<<<<<<<< + * try: + * return inst._cache[self.name] + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L7_try_return:; + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L0; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L0; + } + + /* "aiohttp/_helpers.pyx":21 + * return self.wrapped.__doc__ + * + * def __get__(self, inst, owner): # <<<<<<<<<<<<<< + * try: + * try: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_XDECREF(__pyx_t_13); + __Pyx_AddTraceback("aiohttp._helpers.reify.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_val); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_helpers.pyx":34 + * raise + * + * def __set__(self, inst, value): # <<<<<<<<<<<<<< + * raise AttributeError("reified property is read-only") + */ + +/* Python wrapper */ +static int __pyx_pw_7aiohttp_8_helpers_5reify_5__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_inst, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_7aiohttp_8_helpers_5reify_5__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_inst, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_8_helpers_5reify_4__set__(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v_self), ((PyObject *)__pyx_v_inst), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_7aiohttp_8_helpers_5reify_4__set__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_inst, CYTHON_UNUSED PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__set__", 0); + + /* "aiohttp/_helpers.pyx":35 + * + * def __set__(self, inst, value): + * raise AttributeError("reified property is read-only") # <<<<<<<<<<<<<< + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_AttributeError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 35, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 35, __pyx_L1_error) + + /* "aiohttp/_helpers.pyx":34 + * raise + * + * def __set__(self, inst, value): # <<<<<<<<<<<<<< + * raise AttributeError("reified property is read-only") + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._helpers.reify.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_8_helpers_5reify_6__reduce_cython__(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_6__reduce_cython__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self.name, self.wrapped) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_self->name); + __Pyx_GIVEREF(__pyx_v_self->name); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->name); + __Pyx_INCREF(__pyx_v_self->wrapped); + __Pyx_GIVEREF(__pyx_v_self->wrapped); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->wrapped); + __pyx_v_state = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self.name, self.wrapped) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v__dict = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":7 + * state = (self.name, self.wrapped) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_2 = (__pyx_v__dict != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict); + __pyx_t_4 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self.name is not None or self.wrapped is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self.name, self.wrapped) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self.name is not None or self.wrapped is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_reify, (type(self), 0x770cb8f, None), state + */ + /*else*/ { + __pyx_t_2 = (__pyx_v_self->name != Py_None); + __pyx_t_5 = (__pyx_t_2 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_3 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_self->wrapped != Py_None); + __pyx_t_2 = (__pyx_t_5 != 0); + __pyx_t_3 = __pyx_t_2; + __pyx_L4_bool_binop_done:; + __pyx_v_use_setstate = __pyx_t_3; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.name is not None or self.wrapped is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_reify, (type(self), 0x770cb8f, None), state + * else: + */ + __pyx_t_3 = (__pyx_v_use_setstate != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":13 + * use_setstate = self.name is not None or self.wrapped is not None + * if use_setstate: + * return __pyx_unpickle_reify, (type(self), 0x770cb8f, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_reify, (type(self), 0x770cb8f, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_reify); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_124832655); + __Pyx_GIVEREF(__pyx_int_124832655); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_124832655); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None); + __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_1); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_v_state); + __pyx_t_4 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.name is not None or self.wrapped is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_reify, (type(self), 0x770cb8f, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_reify, (type(self), 0x770cb8f, None), state + * else: + * return __pyx_unpickle_reify, (type(self), 0x770cb8f, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_reify__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_pyx_unpickle_reify); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_124832655); + __Pyx_GIVEREF(__pyx_int_124832655); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_124832655); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state); + __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); + __pyx_t_6 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("aiohttp._helpers.reify.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_reify, (type(self), 0x770cb8f, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_reify__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_8_helpers_5reify_8__setstate_cython__(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_8__setstate_cython__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":17 + * return __pyx_unpickle_reify, (type(self), 0x770cb8f, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_reify__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_7aiohttp_8_helpers___pyx_unpickle_reify__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_reify, (type(self), 0x770cb8f, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_reify__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._helpers.reify.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_reify(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_8_helpers_1__pyx_unpickle_reify(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7aiohttp_8_helpers_1__pyx_unpickle_reify = {"__pyx_unpickle_reify", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_8_helpers_1__pyx_unpickle_reify, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_7aiohttp_8_helpers_1__pyx_unpickle_reify(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_reify (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_reify", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_reify", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_reify") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_reify", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._helpers.__pyx_unpickle_reify", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_7aiohttp_8_helpers___pyx_unpickle_reify(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_8_helpers___pyx_unpickle_reify(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + __Pyx_RefNannySetupContext("__pyx_unpickle_reify", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum != 0x770cb8f: # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum) + */ + __pyx_t_1 = ((__pyx_v___pyx_checksum != 0x770cb8f) != 0); + if (__pyx_t_1) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum != 0x770cb8f: + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum) + * __pyx_result = reify.__new__(__pyx_type) + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, -1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_t_2); + __pyx_v___pyx_PickleError = __pyx_t_2; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum != 0x770cb8f: + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum) # <<<<<<<<<<<<<< + * __pyx_result = reify.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0x77, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_INCREF(__pyx_v___pyx_PickleError); + __pyx_t_2 = __pyx_v___pyx_PickleError; __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_5, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum != 0x770cb8f: # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum) + * __pyx_result = reify.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_reify__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_7aiohttp_8_helpers_reify), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_3 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_4, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v___pyx_result = __pyx_t_3; + __pyx_t_3 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum) + * __pyx_result = reify.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_reify__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_1 = (__pyx_v___pyx_state != Py_None); + __pyx_t_6 = (__pyx_t_1 != 0); + if (__pyx_t_6) { + + /* "(tree fragment)":9 + * __pyx_result = reify.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_reify__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_reify__set_state(reify __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error) + __pyx_t_3 = __pyx_f_7aiohttp_8_helpers___pyx_unpickle_reify__set_state(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum) + * __pyx_result = reify.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_reify__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_reify__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_reify__set_state(reify __pyx_result, tuple __pyx_state): + * __pyx_result.name = __pyx_state[0]; __pyx_result.wrapped = __pyx_state[1] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_reify(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("aiohttp._helpers.__pyx_unpickle_reify", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_reify__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_reify__set_state(reify __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.name = __pyx_state[0]; __pyx_result.wrapped = __pyx_state[1] + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_7aiohttp_8_helpers___pyx_unpickle_reify__set_state(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + Py_ssize_t __pyx_t_3; + int __pyx_t_4; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("__pyx_unpickle_reify__set_state", 0); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_reify__set_state(reify __pyx_result, tuple __pyx_state): + * __pyx_result.name = __pyx_state[0]; __pyx_result.wrapped = __pyx_state[1] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[2]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->name); + __Pyx_DECREF(__pyx_v___pyx_result->name); + __pyx_v___pyx_result->name = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->wrapped); + __Pyx_DECREF(__pyx_v___pyx_result->wrapped); + __pyx_v___pyx_result->wrapped = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_reify__set_state(reify __pyx_result, tuple __pyx_state): + * __pyx_result.name = __pyx_state[0]; __pyx_result.wrapped = __pyx_state[1] + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[2]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(1, 13, __pyx_L1_error) + } + __pyx_t_3 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_4 = ((__pyx_t_3 > 2) != 0); + if (__pyx_t_4) { + } else { + __pyx_t_2 = __pyx_t_4; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_5 = (__pyx_t_4 != 0); + __pyx_t_2 = __pyx_t_5; + __pyx_L4_bool_binop_done:; + if (__pyx_t_2) { + + /* "(tree fragment)":14 + * __pyx_result.name = __pyx_state[0]; __pyx_result.wrapped = __pyx_state[1] + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[2]) # <<<<<<<<<<<<<< + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 14, __pyx_L1_error) + } + __pyx_t_6 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_1 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_6); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_reify__set_state(reify __pyx_result, tuple __pyx_state): + * __pyx_result.name = __pyx_state[0]; __pyx_result.wrapped = __pyx_state[1] + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[2]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_reify__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_reify__set_state(reify __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.name = __pyx_state[0]; __pyx_result.wrapped = __pyx_state[1] + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("aiohttp._helpers.__pyx_unpickle_reify__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_tp_new_7aiohttp_8_helpers_reify(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_7aiohttp_8_helpers_reify *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_7aiohttp_8_helpers_reify *)o); + p->wrapped = Py_None; Py_INCREF(Py_None); + p->name = Py_None; Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_7aiohttp_8_helpers_reify(PyObject *o) { + struct __pyx_obj_7aiohttp_8_helpers_reify *p = (struct __pyx_obj_7aiohttp_8_helpers_reify *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->wrapped); + Py_CLEAR(p->name); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_7aiohttp_8_helpers_reify(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_7aiohttp_8_helpers_reify *p = (struct __pyx_obj_7aiohttp_8_helpers_reify *)o; + if (p->wrapped) { + e = (*v)(p->wrapped, a); if (e) return e; + } + if (p->name) { + e = (*v)(p->name, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_7aiohttp_8_helpers_reify(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_7aiohttp_8_helpers_reify *p = (struct __pyx_obj_7aiohttp_8_helpers_reify *)o; + tmp = ((PyObject*)p->wrapped); + p->wrapped = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->name); + p->name = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_tp_descr_get_7aiohttp_8_helpers_reify(PyObject *o, PyObject *i, PyObject *c) { + PyObject *r = 0; + if (!i) i = Py_None; + if (!c) c = Py_None; + r = __pyx_pw_7aiohttp_8_helpers_5reify_3__get__(o, i, c); + return r; +} + +static int __pyx_tp_descr_set_7aiohttp_8_helpers_reify(PyObject *o, PyObject *i, PyObject *v) { + if (v) { + return __pyx_pw_7aiohttp_8_helpers_5reify_5__set__(o, i, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__delete__"); + return -1; + } +} + +static PyObject *__pyx_getprop_7aiohttp_8_helpers_5reify___doc__(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_8_helpers_5reify_7__doc___1__get__(o); +} + +static PyMethodDef __pyx_methods_7aiohttp_8_helpers_reify[] = { + {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_8_helpers_5reify_7__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_8_helpers_5reify_9__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_7aiohttp_8_helpers_reify[] = { + {(char *)"__doc__", __pyx_getprop_7aiohttp_8_helpers_5reify___doc__, 0, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_7aiohttp_8_helpers_reify = { + PyVarObject_HEAD_INIT(0, 0) + "aiohttp._helpers.reify", /*tp_name*/ + sizeof(struct __pyx_obj_7aiohttp_8_helpers_reify), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_7aiohttp_8_helpers_reify, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + "Use as a class method decorator. It operates almost exactly like\n the Python `@property` decorator, but it puts the result of the\n method it decorates into the instance dict after the first call,\n effectively replacing the function it decorates with an instance\n variable. It is, in Python parlance, a data descriptor.\n\n ", /*tp_doc*/ + __pyx_tp_traverse_7aiohttp_8_helpers_reify, /*tp_traverse*/ + __pyx_tp_clear_7aiohttp_8_helpers_reify, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_7aiohttp_8_helpers_reify, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_7aiohttp_8_helpers_reify, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + __pyx_tp_descr_get_7aiohttp_8_helpers_reify, /*tp_descr_get*/ + __pyx_tp_descr_set_7aiohttp_8_helpers_reify, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_7aiohttp_8_helpers_5reify_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_7aiohttp_8_helpers_reify, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec__helpers(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec__helpers}, + {0, NULL} +}; +#endif + +static struct PyModuleDef __pyx_moduledef = { + PyModuleDef_HEAD_INIT, + "_helpers", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_n_s_AttributeError, __pyx_k_AttributeError, sizeof(__pyx_k_AttributeError), 0, 0, 1, 1}, + {&__pyx_kp_s_Incompatible_checksums_s_vs_0x77, __pyx_k_Incompatible_checksums_s_vs_0x77, sizeof(__pyx_k_Incompatible_checksums_s_vs_0x77), 0, 0, 1, 0}, + {&__pyx_n_s_KeyError, __pyx_k_KeyError, sizeof(__pyx_k_KeyError), 0, 0, 1, 1}, + {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_aiohttp__helpers, __pyx_k_aiohttp__helpers, sizeof(__pyx_k_aiohttp__helpers), 0, 0, 1, 1}, + {&__pyx_n_s_cache, __pyx_k_cache, sizeof(__pyx_k_cache), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, + {&__pyx_n_s_doc, __pyx_k_doc, sizeof(__pyx_k_doc), 0, 0, 1, 1}, + {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, + {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_reify, __pyx_k_pyx_unpickle_reify, sizeof(__pyx_k_pyx_unpickle_reify), 0, 0, 1, 1}, + {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, + {&__pyx_kp_s_reified_property_is_read_only, __pyx_k_reified_property_is_read_only, sizeof(__pyx_k_reified_property_is_read_only), 0, 0, 1, 0}, + {&__pyx_n_s_reify, __pyx_k_reify, sizeof(__pyx_k_reify), 0, 0, 1, 1}, + {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, + {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, + {&__pyx_n_s_wrapped, __pyx_k_wrapped, sizeof(__pyx_k_wrapped), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_KeyError = __Pyx_GetBuiltinName(__pyx_n_s_KeyError); if (!__pyx_builtin_KeyError) __PYX_ERR(0, 25, __pyx_L1_error) + __pyx_builtin_AttributeError = __Pyx_GetBuiltinName(__pyx_n_s_AttributeError); if (!__pyx_builtin_AttributeError) __PYX_ERR(0, 29, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "aiohttp/_helpers.pyx":35 + * + * def __set__(self, inst, value): + * raise AttributeError("reified property is read-only") # <<<<<<<<<<<<<< + */ + __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_s_reified_property_is_read_only); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 35, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple_); + __Pyx_GIVEREF(__pyx_tuple_); + + /* "(tree fragment)":1 + * def __pyx_unpickle_reify(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_tuple__2 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__2); + __Pyx_GIVEREF(__pyx_tuple__2); + __pyx_codeobj__3 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__2, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_reify, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__3)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + __pyx_int_124832655 = PyInt_FromLong(124832655L); if (unlikely(!__pyx_int_124832655)) __PYX_ERR(0, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + if (PyType_Ready(&__pyx_type_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_type_7aiohttp_8_helpers_reify.tp_print = 0; + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_8_helpers_reify.tp_dictoffset && __pyx_type_7aiohttp_8_helpers_reify.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_7aiohttp_8_helpers_reify.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_reify, (PyObject *)&__pyx_type_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_ptype_7aiohttp_8_helpers_reify = &__pyx_type_7aiohttp_8_helpers_reify; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + +#if PY_MAJOR_VERSION < 3 +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC void +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#else +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC init_helpers(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC init_helpers(void) +#else +__Pyx_PyMODINIT_FUNC PyInit__helpers(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit__helpers(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { + result = PyDict_SetItemString(moddict, to_name, value); + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec__helpers(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module '_helpers' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__helpers(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("_helpers", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_aiohttp___helpers) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "aiohttp._helpers")) { + if (unlikely(PyDict_SetItemString(modules, "aiohttp._helpers", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + if (unlikely(__Pyx_modinit_type_init_code() != 0)) goto __pyx_L1_error; + (void)__Pyx_modinit_type_import_code(); + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "(tree fragment)":1 + * def __pyx_unpickle_reify(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_8_helpers_1__pyx_unpickle_reify, NULL, __pyx_n_s_aiohttp__helpers); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_reify, __pyx_t_1) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_helpers.pyx":1 + * cdef class reify: # <<<<<<<<<<<<<< + * """Use as a class method decorator. It operates almost exactly like + * the Python `@property` decorator, but it puts the result of the + */ + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init aiohttp._helpers", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + Py_CLEAR(__pyx_m); + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init aiohttp._helpers"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* GetItemInt */ +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (!j) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyList_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { + PyObject *r = PyList_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyTuple_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; + if (likely(m && m->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { + Py_ssize_t l = m->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return m->sq_item(o, i); + } + } +#else + if (is_list || PySequence_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/* ObjectGetItem */ +#if CYTHON_USE_TYPE_SLOTS +static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject* index) { + PyObject *runerr; + Py_ssize_t key_value; + PySequenceMethods *m = Py_TYPE(obj)->tp_as_sequence; + if (unlikely(!(m && m->sq_item))) { + PyErr_Format(PyExc_TypeError, "'%.200s' object is not subscriptable", Py_TYPE(obj)->tp_name); + return NULL; + } + key_value = __Pyx_PyIndex_AsSsize_t(index); + if (likely(key_value != -1 || !(runerr = PyErr_Occurred()))) { + return __Pyx_GetItemInt_Fast(obj, key_value, 0, 1, 1); + } + if (PyErr_GivenExceptionMatches(runerr, PyExc_OverflowError)) { + PyErr_Clear(); + PyErr_Format(PyExc_IndexError, "cannot fit '%.200s' into an index-sized integer", Py_TYPE(index)->tp_name); + } + return NULL; +} +static PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key) { + PyMappingMethods *m = Py_TYPE(obj)->tp_as_mapping; + if (likely(m && m->mp_subscript)) { + return m->mp_subscript(obj, key); + } + return __Pyx_PyObject_GetIndex(obj, key); +} +#endif + +/* GetTopmostException */ +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * +__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) +{ + _PyErr_StackItem *exc_info = tstate->exc_info; + while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) && + exc_info->previous_item != NULL) + { + exc_info = exc_info->previous_item; + } + return exc_info; +} +#endif + +/* SaveResetException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); + *type = exc_info->exc_type; + *value = exc_info->exc_value; + *tb = exc_info->exc_traceback; + #else + *type = tstate->exc_type; + *value = tstate->exc_value; + *tb = tstate->exc_traceback; + #endif + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); +} +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = type; + exc_info->exc_value = value; + exc_info->exc_traceback = tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = type; + tstate->exc_value = value; + tstate->exc_traceback = tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +#endif + +/* PyErrExceptionMatches */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; icurexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; + if (unlikely(PyTuple_Check(err))) + return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); +} +#endif + +/* GetException */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) +#endif +{ + PyObject *local_type, *local_value, *local_tb; +#if CYTHON_FAST_THREAD_STATE + PyObject *tmp_type, *tmp_value, *tmp_tb; + local_type = tstate->curexc_type; + local_value = tstate->curexc_value; + local_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(&local_type, &local_value, &local_tb); +#endif + PyErr_NormalizeException(&local_type, &local_value, &local_tb); +#if CYTHON_FAST_THREAD_STATE + if (unlikely(tstate->curexc_type)) +#else + if (unlikely(PyErr_Occurred())) +#endif + goto bad; + #if PY_MAJOR_VERSION >= 3 + if (local_tb) { + if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) + goto bad; + } + #endif + Py_XINCREF(local_tb); + Py_XINCREF(local_type); + Py_XINCREF(local_value); + *type = local_type; + *value = local_value; + *tb = local_tb; +#if CYTHON_FAST_THREAD_STATE + #if CYTHON_USE_EXC_INFO_STACK + { + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = local_type; + exc_info->exc_value = local_value; + exc_info->exc_traceback = local_tb; + } + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = local_type; + tstate->exc_value = local_value; + tstate->exc_traceback = local_tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_SetExcInfo(local_type, local_value, local_tb); +#endif + return 0; +bad: + *type = 0; + *value = 0; + *tb = 0; + Py_XDECREF(local_type); + Py_XDECREF(local_value); + Py_XDECREF(local_tb); + return -1; +} + +/* PyCFunctionFastCall */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + int flags = PyCFunction_GET_FLAGS(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { + return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); + } else { + return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); + } +} +#endif + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif +#endif + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCall2Args */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { + PyObject *args, *result = NULL; + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyFunction_FastCall(function, args, 2); + } + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyCFunction_FastCall(function, args, 2); + } + #endif + args = PyTuple_New(2); + if (unlikely(!args)) goto done; + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 0, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 1, arg2); + Py_INCREF(function); + result = __Pyx_PyObject_Call(function, args, NULL); + Py_DECREF(args); + Py_DECREF(function); +done: + return result; +} + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallOneArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif + if (likely(PyCFunction_Check(func))) { + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* GetAttr */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { +#if CYTHON_USE_TYPE_SLOTS +#if PY_MAJOR_VERSION >= 3 + if (likely(PyUnicode_Check(n))) +#else + if (likely(PyString_Check(n))) +#endif + return __Pyx_PyObject_GetAttrStr(o, n); +#endif + return PyObject_GetAttr(o, n); +} + +/* GetAttr3 */ +static PyObject *__Pyx_GetAttr3Default(PyObject *d) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + return NULL; + __Pyx_PyErr_Clear(); + Py_INCREF(d); + return d; +} +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { + PyObject *r = __Pyx_GetAttr(o, n); + return (likely(r)) ? r : __Pyx_GetAttr3Default(d); +} + +/* GetModuleGlobalName */ +#if CYTHON_USE_DICT_VERSIONS +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) +#else +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) +#endif +{ + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } else if (unlikely(PyErr_Occurred())) { + return NULL; + } +#else + result = PyDict_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } + PyErr_Clear(); +#endif + return __Pyx_GetBuiltinName(name); +} + +/* Import */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.')) { + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +/* ImportFrom */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* HasAttr */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { + PyObject *r; + if (unlikely(!__Pyx_PyBaseString_Check(n))) { + PyErr_SetString(PyExc_TypeError, + "hasattr(): attribute name must be string"); + return -1; + } + r = __Pyx_GetAttr(o, n); + if (unlikely(!r)) { + PyErr_Clear(); + return 0; + } else { + Py_DECREF(r); + return 1; + } +} + +/* PyObject_GenericGetAttrNoDict */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'%.50s' object has no attribute '%U'", + tp->tp_name, attr_name); +#else + "'%.50s' object has no attribute '%.400s'", + tp->tp_name, PyString_AS_STRING(attr_name)); +#endif + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { + PyObject *descr; + PyTypeObject *tp = Py_TYPE(obj); + if (unlikely(!PyString_Check(attr_name))) { + return PyObject_GenericGetAttr(obj, attr_name); + } + assert(!tp->tp_dictoffset); + descr = _PyType_Lookup(tp, attr_name); + if (unlikely(!descr)) { + return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); + } + Py_INCREF(descr); + #if PY_MAJOR_VERSION < 3 + if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) + #endif + { + descrgetfunc f = Py_TYPE(descr)->tp_descr_get; + if (unlikely(f)) { + PyObject *res = f(descr, obj, (PyObject *)tp); + Py_DECREF(descr); + return res; + } + } + return descr; +} +#endif + +/* PyObject_GenericGetAttr */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { + if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { + return PyObject_GenericGetAttr(obj, attr_name); + } + return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); +} +#endif + +/* SetupReduce */ +static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { + int ret; + PyObject *name_attr; + name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); + if (likely(name_attr)) { + ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); + } else { + ret = -1; + } + if (unlikely(ret < 0)) { + PyErr_Clear(); + ret = 0; + } + Py_XDECREF(name_attr); + return ret; +} +static int __Pyx_setup_reduce(PyObject* type_obj) { + int ret = 0; + PyObject *object_reduce = NULL; + PyObject *object_reduce_ex = NULL; + PyObject *reduce = NULL; + PyObject *reduce_ex = NULL; + PyObject *reduce_cython = NULL; + PyObject *setstate = NULL; + PyObject *setstate_cython = NULL; +#if CYTHON_USE_PYTYPE_LOOKUP + if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto GOOD; +#else + if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto GOOD; +#endif +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD; +#else + object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD; +#endif + reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto BAD; + if (reduce_ex == object_reduce_ex) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD; +#else + object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD; +#endif + reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto BAD; + if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { + reduce_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_cython); if (unlikely(!reduce_cython)) goto BAD; + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto BAD; + setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); + if (!setstate) PyErr_Clear(); + if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { + setstate_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate_cython); if (unlikely(!setstate_cython)) goto BAD; + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto BAD; + } + PyType_Modified((PyTypeObject*)type_obj); + } + } + goto GOOD; +BAD: + if (!PyErr_Occurred()) + PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); + ret = -1; +GOOD: +#if !CYTHON_USE_PYTYPE_LOOKUP + Py_XDECREF(object_reduce); + Py_XDECREF(object_reduce_ex); +#endif + Py_XDECREF(reduce); + Py_XDECREF(reduce_ex); + Py_XDECREF(reduce_cython); + Py_XDECREF(setstate); + Py_XDECREF(setstate_cython); + return ret; +} + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { + const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; ip) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/venv/lib/python3.7/site-packages/aiohttp/_helpers.cpython-37m-x86_64-linux-gnu.so b/venv/lib/python3.7/site-packages/aiohttp/_helpers.cpython-37m-x86_64-linux-gnu.so new file mode 100755 index 0000000..7ca8d5d Binary files /dev/null and b/venv/lib/python3.7/site-packages/aiohttp/_helpers.cpython-37m-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.7/site-packages/aiohttp/_helpers.pyi b/venv/lib/python3.7/site-packages/aiohttp/_helpers.pyi new file mode 100644 index 0000000..59608e1 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_helpers.pyi @@ -0,0 +1,8 @@ +from typing import Any + +class reify: + def __init__(self, wrapped: Any) -> None: ... + + def __get__(self, inst: Any, owner: Any) -> Any: ... + + def __set__(self, inst: Any, value: Any) -> None: ... diff --git a/venv/lib/python3.7/site-packages/aiohttp/_helpers.pyx b/venv/lib/python3.7/site-packages/aiohttp/_helpers.pyx new file mode 100644 index 0000000..665f367 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_helpers.pyx @@ -0,0 +1,35 @@ +cdef class reify: + """Use as a class method decorator. It operates almost exactly like + the Python `@property` decorator, but it puts the result of the + method it decorates into the instance dict after the first call, + effectively replacing the function it decorates with an instance + variable. It is, in Python parlance, a data descriptor. + + """ + + cdef object wrapped + cdef object name + + def __init__(self, wrapped): + self.wrapped = wrapped + self.name = wrapped.__name__ + + @property + def __doc__(self): + return self.wrapped.__doc__ + + def __get__(self, inst, owner): + try: + try: + return inst._cache[self.name] + except KeyError: + val = self.wrapped(inst) + inst._cache[self.name] = val + return val + except AttributeError: + if inst is None: + return self + raise + + def __set__(self, inst, value): + raise AttributeError("reified property is read-only") diff --git a/venv/lib/python3.7/site-packages/aiohttp/_http_parser.c b/venv/lib/python3.7/site-packages/aiohttp/_http_parser.c new file mode 100644 index 0000000..4e69e79 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_http_parser.c @@ -0,0 +1,24057 @@ +/* Generated by Cython 0.29.2 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "define_macros": [ + [ + "HTTP_PARSER_STRICT", + 0 + ] + ], + "depends": [], + "name": "aiohttp._http_parser", + "sources": [ + "aiohttp/_http_parser.pyx", + "vendor/http-parser/http_parser.c", + "aiohttp/_find_header.c" + ] + }, + "module_name": "aiohttp._http_parser" +} +END: Cython Metadata */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.6+ or Python 3.3+. +#else +#define CYTHON_ABI "0_29_2" +#define CYTHON_HEX_VERSION 0x001D02F0 +#define CYTHON_FUTURE_DIVISION 1 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif + #ifndef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) + #endif + #ifndef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if CYTHON_USE_DICT_VERSIONS +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ + } +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 + #define PyMem_RawMalloc(n) PyMem_Malloc(n) + #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) + #define PyMem_RawFree(p) PyMem_Free(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; // PyThread_create_key reports success always +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif // TSS (Thread Specific Storage) API +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact + #define PyObject_Unicode PyObject_Str +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + + +#define __PYX_ERR(f_index, lineno, Ln_error) \ +{ \ + __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ +} + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__aiohttp___http_parser +#define __PYX_HAVE_API__aiohttp___http_parser +/* Early includes */ +#include +#include +#include "pythread.h" +#include +#include "../vendor/http-parser/http_parser.h" +#include "_find_header.h" +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m = NULL; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime = NULL; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + + +static const char *__pyx_f[] = { + "aiohttp/_http_parser.pyx", + "stringsource", + "type.pxd", + "bool.pxd", + "complex.pxd", + "aiohttp/_headers.pxi", +}; + +/*--- Type declarations ---*/ +struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage; +struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage; +struct __pyx_obj_7aiohttp_12_http_parser_HttpParser; +struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser; +struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser; +struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__; +struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr; +struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__; +struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr; +struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init; + +/* "aiohttp/_http_parser.pyx":308 + * PyMem_Free(self._csettings) + * + * cdef _init(self, cparser.http_parser_type mode, # <<<<<<<<<<<<<< + * object protocol, object loop, object timer=None, + * size_t max_line_size=8190, size_t max_headers=32768, + */ +struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init { + int __pyx_n; + PyObject *timer; + size_t max_line_size; + size_t max_headers; + size_t max_field_size; + PyObject *payload_exception; + int response_with_body; + int auto_decompress; +}; + +/* "aiohttp/_http_parser.pyx":93 + * + * @cython.freelist(DEFAULT_FREELIST_SIZE) + * cdef class RawRequestMessage: # <<<<<<<<<<<<<< + * cdef readonly str method + * cdef readonly str path + */ +struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage { + PyObject_HEAD + PyObject *method; + PyObject *path; + PyObject *version; + PyObject *headers; + PyObject *raw_headers; + PyObject *should_close; + PyObject *compression; + PyObject *upgrade; + PyObject *chunked; + PyObject *url; +}; + + +/* "aiohttp/_http_parser.pyx":193 + * + * @cython.freelist(DEFAULT_FREELIST_SIZE) + * cdef class RawResponseMessage: # <<<<<<<<<<<<<< + * cdef readonly object version # HttpVersion + * cdef readonly int code + */ +struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage { + PyObject_HEAD + PyObject *version; + int code; + PyObject *reason; + PyObject *headers; + PyObject *raw_headers; + PyObject *should_close; + PyObject *compression; + PyObject *upgrade; + PyObject *chunked; +}; + + +/* "aiohttp/_http_parser.pyx":255 + * + * @cython.internal + * cdef class HttpParser: # <<<<<<<<<<<<<< + * + * cdef: + */ +struct __pyx_obj_7aiohttp_12_http_parser_HttpParser { + PyObject_HEAD + struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *__pyx_vtab; + struct http_parser *_cparser; + struct http_parser_settings *_csettings; + PyObject *_raw_name; + PyObject *_raw_value; + int _has_value; + PyObject *_protocol; + PyObject *_loop; + PyObject *_timer; + size_t _max_line_size; + size_t _max_field_size; + size_t _max_headers; + int _response_with_body; + int _started; + PyObject *_url; + PyObject *_buf; + PyObject *_path; + PyObject *_reason; + PyObject *_headers; + PyObject *_raw_headers; + int _upgraded; + PyObject *_messages; + PyObject *_payload; + int _payload_error; + PyObject *_payload_exception; + PyObject *_last_error; + int _auto_decompress; + PyObject *_content_encoding; + Py_buffer py_buf; +}; + + +/* "aiohttp/_http_parser.pyx":537 + * + * + * cdef class HttpRequestParser(HttpParser): # <<<<<<<<<<<<<< + * + * def __init__(self, protocol, loop, timer=None, + */ +struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser { + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser __pyx_base; +}; + + +/* "aiohttp/_http_parser.pyx":564 + * + * + * cdef class HttpResponseParser(HttpParser): # <<<<<<<<<<<<<< + * + * def __init__(self, protocol, loop, timer=None, + */ +struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser { + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser __pyx_base; +}; + + +/* "aiohttp/_http_parser.pyx":118 + * self.url = url + * + * def __repr__(self): # <<<<<<<<<<<<<< + * info = [] + * info.append(("method", self.method)) + */ +struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ { + PyObject_HEAD + PyObject *__pyx_v_info; +}; + + +/* "aiohttp/_http_parser.pyx":130 + * info.append(("chunked", self.chunked)) + * info.append(("url", self.url)) + * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< + * return '' + * + */ +struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr { + PyObject_HEAD + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *__pyx_outer_scope; + PyObject *__pyx_v_name; + PyObject *__pyx_v_val; +}; + + +/* "aiohttp/_http_parser.pyx":216 + * self.chunked = chunked + * + * def __repr__(self): # <<<<<<<<<<<<<< + * info = [] + * info.append(("version", self.version)) + */ +struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ { + PyObject_HEAD + PyObject *__pyx_v_info; +}; + + +/* "aiohttp/_http_parser.pyx":227 + * info.append(("upgrade", self.upgrade)) + * info.append(("chunked", self.chunked)) + * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< + * return '' + * + */ +struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr { + PyObject_HEAD + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *__pyx_outer_scope; + PyObject *__pyx_v_name; + PyObject *__pyx_v_val; +}; + + + +/* "aiohttp/_http_parser.pyx":255 + * + * @cython.internal + * cdef class HttpParser: # <<<<<<<<<<<<<< + * + * cdef: + */ + +struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser { + PyObject *(*_init)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, enum http_parser_type, PyObject *, PyObject *, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init *__pyx_optional_args); + PyObject *(*_process_header)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); + PyObject *(*_on_header_field)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, char *, size_t); + PyObject *(*_on_header_value)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, char *, size_t); + PyObject *(*_on_headers_complete)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); + PyObject *(*_on_message_complete)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); + PyObject *(*_on_chunk_header)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); + PyObject *(*_on_chunk_complete)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); + PyObject *(*_on_status_complete)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); + PyObject *(*http_version)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); +}; +static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *__pyx_vtabptr_7aiohttp_12_http_parser_HttpParser; +static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); + + +/* "aiohttp/_http_parser.pyx":537 + * + * + * cdef class HttpRequestParser(HttpParser): # <<<<<<<<<<<<<< + * + * def __init__(self, protocol, loop, timer=None, + */ + +struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParser { + struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser __pyx_base; +}; +static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParser *__pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParser; + + +/* "aiohttp/_http_parser.pyx":564 + * + * + * cdef class HttpResponseParser(HttpParser): # <<<<<<<<<<<<<< + * + * def __init__(self, protocol, loop, timer=None, + */ + +struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpResponseParser { + struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser __pyx_base; +}; +static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpResponseParser *__pyx_vtabptr_7aiohttp_12_http_parser_HttpResponseParser; + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/* decode_c_string_utf16.proto */ +static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16(const char *s, Py_ssize_t size, const char *errors) { + int byteorder = 0; + return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); +} +static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16LE(const char *s, Py_ssize_t size, const char *errors) { + int byteorder = -1; + return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); +} +static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16BE(const char *s, Py_ssize_t size, const char *errors) { + int byteorder = 1; + return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); +} + +/* decode_c_bytes.proto */ +static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes( + const char* cstring, Py_ssize_t length, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)); + +/* decode_bytes.proto */ +static CYTHON_INLINE PyObject* __Pyx_decode_bytes( + PyObject* string, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { + return __Pyx_decode_c_bytes( + PyBytes_AS_STRING(string), PyBytes_GET_SIZE(string), + start, stop, encoding, errors, decode_func); +} + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* None.proto */ +static CYTHON_INLINE void __Pyx_RaiseClosureNameError(const char *varname); + +/* RaiseTooManyValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +/* RaiseNeedMoreValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +/* IterFinish.proto */ +static CYTHON_INLINE int __Pyx_IterFinish(void); + +/* UnpackItemEndCheck.proto */ +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); + +/* ListCompAppend.proto */ +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static CYTHON_INLINE int __Pyx_ListComp_Append(PyObject* list, PyObject* x) { + PyListObject* L = (PyListObject*) list; + Py_ssize_t len = Py_SIZE(list); + if (likely(L->allocated > len)) { + Py_INCREF(x); + PyList_SET_ITEM(list, len, x); + Py_SIZE(list) = len+1; + return 0; + } + return PyList_Append(list, x); +} +#else +#define __Pyx_ListComp_Append(L,x) PyList_Append(L,x) +#endif + +/* ListAppend.proto */ +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { + PyListObject* L = (PyListObject*) list; + Py_ssize_t len = Py_SIZE(list); + if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { + Py_INCREF(x); + PyList_SET_ITEM(list, len, x); + Py_SIZE(list) = len+1; + return 0; + } + return PyList_Append(list, x); +} +#else +#define __Pyx_PyList_Append(L,x) PyList_Append(L,x) +#endif + +/* KeywordStringCheck.proto */ +static int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed); + +/* ExtTypeTest.proto */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); + +/* PyDictContains.proto */ +static CYTHON_INLINE int __Pyx_PyDict_ContainsTF(PyObject* item, PyObject* dict, int eq) { + int result = PyDict_Contains(dict, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* DictGetItem.proto */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key); +#define __Pyx_PyObject_Dict_GetItem(obj, name)\ + (likely(PyDict_CheckExact(obj)) ?\ + __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name)) +#else +#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) +#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name) +#endif + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* GetAttr.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); + +/* GetAttr3.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); + +/* GetModuleGlobalName.proto */ +#if CYTHON_USE_DICT_VERSIONS +#define __Pyx_GetModuleGlobalName(var, name) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ + (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ + __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +#define __Pyx_GetModuleGlobalNameUncached(var, name) {\ + PY_UINT64_T __pyx_dict_version;\ + PyObject *__pyx_dict_cached_value;\ + (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); +#else +#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) +#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallNoArg.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); +#else +#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) +#endif + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* PyObjectCall2Args.proto */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); + +/* PySequenceContains.proto */ +static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { + int result = PySequence_Contains(seq, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* IncludeStringH.proto */ +#include + +/* BytesEquals.proto */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); + +/* UnicodeEquals.proto */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); + +/* SliceObject.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice( + PyObject* obj, Py_ssize_t cstart, Py_ssize_t cstop, + PyObject** py_start, PyObject** py_stop, PyObject** py_slice, + int has_cstart, int has_cstop, int wraparound); + +/* decode_bytearray.proto */ +static CYTHON_INLINE PyObject* __Pyx_decode_bytearray( + PyObject* string, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { + return __Pyx_decode_c_bytes( + PyByteArray_AS_STRING(string), PyByteArray_GET_SIZE(string), + start, stop, encoding, errors, decode_func); +} + +/* GetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* SwapException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* GetTopmostException.proto */ +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); +#endif + +/* SaveResetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +#else +#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) +#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) +#endif + +/* decode_c_string.proto */ +static CYTHON_INLINE PyObject* __Pyx_decode_c_string( + const char* cstring, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)); + +/* UnpackUnboundCMethod.proto */ +typedef struct { + PyObject *type; + PyObject **method_name; + PyCFunction func; + PyObject *method; + int flag; +} __Pyx_CachedCFunction; + +/* CallUnboundCMethod1.proto */ +static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); +#else +#define __Pyx_CallUnboundCMethod1(cfunc, self, arg) __Pyx__CallUnboundCMethod1(cfunc, self, arg) +#endif + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* HasAttr.proto */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); + +/* PyObject_GenericGetAttrNoDict.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr +#endif + +/* PyObject_GenericGetAttr.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr +#endif + +/* SetupReduce.proto */ +static int __Pyx_setup_reduce(PyObject* type_obj); + +/* SetVTable.proto */ +static int __Pyx_SetVtable(PyObject *dict, void *vtable); + +/* TypeImport.proto */ +#ifndef __PYX_HAVE_RT_ImportType_proto +#define __PYX_HAVE_RT_ImportType_proto +enum __Pyx_ImportType_CheckSize { + __Pyx_ImportType_CheckSize_Error = 0, + __Pyx_ImportType_CheckSize_Warn = 1, + __Pyx_ImportType_CheckSize_Ignore = 2 +}; +static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); +#endif + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_int(unsigned int value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_short(unsigned short value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint16_t(uint16_t value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE enum http_method __Pyx_PyInt_As_enum__http_method(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE size_t __Pyx_PyInt_As_size_t(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* FetchCommonType.proto */ +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); + +/* PyObjectGetMethod.proto */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); + +/* PyObjectCallMethod1.proto */ +static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg); + +/* CoroutineBase.proto */ +typedef PyObject *(*__pyx_coroutine_body_t)(PyObject *, PyThreadState *, PyObject *); +#if CYTHON_USE_EXC_INFO_STACK +#define __Pyx_ExcInfoStruct _PyErr_StackItem +#else +typedef struct { + PyObject *exc_type; + PyObject *exc_value; + PyObject *exc_traceback; +} __Pyx_ExcInfoStruct; +#endif +typedef struct { + PyObject_HEAD + __pyx_coroutine_body_t body; + PyObject *closure; + __Pyx_ExcInfoStruct gi_exc_state; + PyObject *gi_weakreflist; + PyObject *classobj; + PyObject *yieldfrom; + PyObject *gi_name; + PyObject *gi_qualname; + PyObject *gi_modulename; + PyObject *gi_code; + int resume_label; + char is_running; +} __pyx_CoroutineObject; +static __pyx_CoroutineObject *__Pyx__Coroutine_New( + PyTypeObject *type, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, + PyObject *name, PyObject *qualname, PyObject *module_name); +static __pyx_CoroutineObject *__Pyx__Coroutine_NewInit( + __pyx_CoroutineObject *gen, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, + PyObject *name, PyObject *qualname, PyObject *module_name); +static CYTHON_INLINE void __Pyx_Coroutine_ExceptionClear(__Pyx_ExcInfoStruct *self); +static int __Pyx_Coroutine_clear(PyObject *self); +static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value); +static PyObject *__Pyx_Coroutine_Close(PyObject *self); +static PyObject *__Pyx_Coroutine_Throw(PyObject *gen, PyObject *args); +#if CYTHON_USE_EXC_INFO_STACK +#define __Pyx_Coroutine_SwapException(self) +#define __Pyx_Coroutine_ResetAndClearException(self) __Pyx_Coroutine_ExceptionClear(&(self)->gi_exc_state) +#else +#define __Pyx_Coroutine_SwapException(self) {\ + __Pyx_ExceptionSwap(&(self)->gi_exc_state.exc_type, &(self)->gi_exc_state.exc_value, &(self)->gi_exc_state.exc_traceback);\ + __Pyx_Coroutine_ResetFrameBackpointer(&(self)->gi_exc_state);\ + } +#define __Pyx_Coroutine_ResetAndClearException(self) {\ + __Pyx_ExceptionReset((self)->gi_exc_state.exc_type, (self)->gi_exc_state.exc_value, (self)->gi_exc_state.exc_traceback);\ + (self)->gi_exc_state.exc_type = (self)->gi_exc_state.exc_value = (self)->gi_exc_state.exc_traceback = NULL;\ + } +#endif +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyGen_FetchStopIterationValue(pvalue)\ + __Pyx_PyGen__FetchStopIterationValue(__pyx_tstate, pvalue) +#else +#define __Pyx_PyGen_FetchStopIterationValue(pvalue)\ + __Pyx_PyGen__FetchStopIterationValue(__Pyx_PyThreadState_Current, pvalue) +#endif +static int __Pyx_PyGen__FetchStopIterationValue(PyThreadState *tstate, PyObject **pvalue); +static CYTHON_INLINE void __Pyx_Coroutine_ResetFrameBackpointer(__Pyx_ExcInfoStruct *exc_state); + +/* PatchModuleWithCoroutine.proto */ +static PyObject* __Pyx_Coroutine_patch_module(PyObject* module, const char* py_code); + +/* PatchGeneratorABC.proto */ +static int __Pyx_patch_abc(void); + +/* Generator.proto */ +#define __Pyx_Generator_USED +static PyTypeObject *__pyx_GeneratorType = 0; +#define __Pyx_Generator_CheckExact(obj) (Py_TYPE(obj) == __pyx_GeneratorType) +#define __Pyx_Generator_New(body, code, closure, name, qualname, module_name)\ + __Pyx__Coroutine_New(__pyx_GeneratorType, body, code, closure, name, qualname, module_name) +static PyObject *__Pyx_Generator_Next(PyObject *self); +static int __pyx_Generator_init(void); + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, enum http_parser_type __pyx_v_mode, PyObject *__pyx_v_protocol, PyObject *__pyx_v_loop, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init *__pyx_optional_args); /* proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, char *__pyx_v_at, size_t __pyx_v_length); /* proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, char *__pyx_v_at, size_t __pyx_v_length); /* proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_message_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_header(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_status_complete(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ +static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *__pyx_v_self); /* proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *__pyx_v_self); /* proto*/ + +/* Module declarations from 'cpython.mem' */ + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'cpython.version' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from 'cpython.ref' */ + +/* Module declarations from 'cpython.exc' */ + +/* Module declarations from 'cpython.module' */ + +/* Module declarations from 'cpython.tuple' */ + +/* Module declarations from 'cpython.list' */ + +/* Module declarations from 'cpython.sequence' */ + +/* Module declarations from 'cpython.mapping' */ + +/* Module declarations from 'cpython.iterator' */ + +/* Module declarations from 'cpython.number' */ + +/* Module declarations from 'cpython.int' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.bool' */ +static PyTypeObject *__pyx_ptype_7cpython_4bool_bool = 0; + +/* Module declarations from 'cpython.long' */ + +/* Module declarations from 'cpython.float' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.complex' */ +static PyTypeObject *__pyx_ptype_7cpython_7complex_complex = 0; + +/* Module declarations from 'cpython.string' */ + +/* Module declarations from 'cpython.unicode' */ + +/* Module declarations from 'cpython.dict' */ + +/* Module declarations from 'cpython.instance' */ + +/* Module declarations from 'cpython.function' */ + +/* Module declarations from 'cpython.method' */ + +/* Module declarations from 'cpython.weakref' */ + +/* Module declarations from 'cpython.getargs' */ + +/* Module declarations from 'cpython.pythread' */ + +/* Module declarations from 'cpython.pystate' */ + +/* Module declarations from 'cpython.cobject' */ + +/* Module declarations from 'cpython.oldbuffer' */ + +/* Module declarations from 'cpython.set' */ + +/* Module declarations from 'cpython.buffer' */ + +/* Module declarations from 'cpython.bytes' */ + +/* Module declarations from 'cpython.pycapsule' */ + +/* Module declarations from 'cpython' */ + +/* Module declarations from 'cython' */ + +/* Module declarations from 'aiohttp' */ + +/* Module declarations from 'libc.stdint' */ + +/* Module declarations from 'aiohttp._cparser' */ + +/* Module declarations from 'aiohttp._find_header' */ + +/* Module declarations from 'aiohttp._http_parser' */ +static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage = 0; +static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage = 0; +static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser_HttpParser = 0; +static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser = 0; +static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser = 0; +static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct____repr__ = 0; +static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr = 0; +static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ = 0; +static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr = 0; +static PyObject *__pyx_v_7aiohttp_12_http_parser_headers = 0; +static PyObject *__pyx_v_7aiohttp_12_http_parser_URL = 0; +static PyObject *__pyx_v_7aiohttp_12_http_parser_URL_build = 0; +static PyObject *__pyx_v_7aiohttp_12_http_parser_CIMultiDict = 0; +static PyObject *__pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy = 0; +static PyObject *__pyx_v_7aiohttp_12_http_parser_HttpVersion = 0; +static PyObject *__pyx_v_7aiohttp_12_http_parser_HttpVersion10 = 0; +static PyObject *__pyx_v_7aiohttp_12_http_parser_HttpVersion11 = 0; +static PyObject *__pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1 = 0; +static PyObject *__pyx_v_7aiohttp_12_http_parser_CONTENT_ENCODING = 0; +static PyObject *__pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD = 0; +static PyObject *__pyx_v_7aiohttp_12_http_parser_StreamReader = 0; +static PyObject *__pyx_v_7aiohttp_12_http_parser_DeflateBuffer = 0; +static PyObject *__pyx_v_7aiohttp_12_http_parser__http_method = 0; +static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_extend(PyObject *, char const *, size_t); /*proto*/ +static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_http_method_str(int); /*proto*/ +static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_find_header(PyObject *); /*proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, int, PyObject *, int, int, PyObject *); /*proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject *, int, PyObject *, PyObject *, PyObject *, int, PyObject *, int, int); /*proto*/ +static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(struct http_parser *); /*proto*/ +static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(struct http_parser *, char const *, size_t); /*proto*/ +static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(struct http_parser *, char const *, size_t); /*proto*/ +static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(struct http_parser *, char const *, size_t); /*proto*/ +static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(struct http_parser *, char const *, size_t); /*proto*/ +static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(struct http_parser *); /*proto*/ +static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(struct http_parser *, char const *, size_t); /*proto*/ +static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(struct http_parser *); /*proto*/ +static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(struct http_parser *); /*proto*/ +static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(struct http_parser *); /*proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(enum http_errno); /*proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser__parse_url(char *, size_t); /*proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawRequestMessage__set_state(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *, PyObject *); /*proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawResponseMessage__set_state(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *, PyObject *); /*proto*/ +#define __Pyx_MODULE_NAME "aiohttp._http_parser" +extern int __pyx_module_is_main_aiohttp___http_parser; +int __pyx_module_is_main_aiohttp___http_parser = 0; + +/* Implementation of 'aiohttp._http_parser' */ +static PyObject *__pyx_builtin_range; +static PyObject *__pyx_builtin_MemoryError; +static PyObject *__pyx_builtin_TypeError; +static PyObject *__pyx_builtin_BaseException; +static const char __pyx_k_[] = "="; +static const char __pyx_k_i[] = "i"; +static const char __pyx_k_TE[] = "TE"; +static const char __pyx_k__2[] = ", "; +static const char __pyx_k__3[] = ")>"; +static const char __pyx_k__4[] = ""; +static const char __pyx_k_br[] = "br"; +static const char __pyx_k_AGE[] = "AGE"; +static const char __pyx_k_URI[] = "URI"; +static const char __pyx_k_URL[] = "URL"; +static const char __pyx_k_VIA[] = "VIA"; +static const char __pyx_k__11[] = ":"; +static const char __pyx_k_add[] = "add"; +static const char __pyx_k_all[] = "__all__"; +static const char __pyx_k_new[] = "__new__"; +static const char __pyx_k_url[] = "url"; +static const char __pyx_k_DATE[] = "DATE"; +static const char __pyx_k_ETAG[] = "ETAG"; +static const char __pyx_k_FROM[] = "FROM"; +static const char __pyx_k_HOST[] = "HOST"; +static const char __pyx_k_LINK[] = "LINK"; +static const char __pyx_k_VARY[] = "VARY"; +static const char __pyx_k_args[] = "args"; +static const char __pyx_k_code[] = "code"; +static const char __pyx_k_dict[] = "__dict__"; +static const char __pyx_k_gzip[] = "gzip"; +static const char __pyx_k_hdrs[] = "hdrs"; +static const char __pyx_k_host[] = "host"; +static const char __pyx_k_loop[] = "loop"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_path[] = "path"; +static const char __pyx_k_port[] = "port"; +static const char __pyx_k_send[] = "send"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_user[] = "user"; +static const char __pyx_k_yarl[] = "yarl"; +static const char __pyx_k_ALLOW[] = "ALLOW"; +static const char __pyx_k_RANGE[] = "RANGE"; +static const char __pyx_k_URL_2[] = "_URL"; +static const char __pyx_k_build[] = "build"; +static const char __pyx_k_close[] = "close"; +static const char __pyx_k_lower[] = "lower"; +static const char __pyx_k_query[] = "query"; +static const char __pyx_k_range[] = "range"; +static const char __pyx_k_throw[] = "throw"; +static const char __pyx_k_timer[] = "timer"; +static const char __pyx_k_ACCEPT[] = "ACCEPT"; +static const char __pyx_k_COOKIE[] = "COOKIE"; +static const char __pyx_k_DIGEST[] = "DIGEST"; +static const char __pyx_k_EXPECT[] = "EXPECT"; +static const char __pyx_k_ORIGIN[] = "ORIGIN"; +static const char __pyx_k_PRAGMA[] = "PRAGMA"; +static const char __pyx_k_SERVER[] = "SERVER"; +static const char __pyx_k_format[] = "format"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_method[] = "method"; +static const char __pyx_k_pickle[] = "pickle"; +static const char __pyx_k_py_buf[] = "py_buf"; +static const char __pyx_k_reason[] = "reason"; +static const char __pyx_k_reduce[] = "__reduce__"; +static const char __pyx_k_scheme[] = "scheme"; +static const char __pyx_k_update[] = "update"; +static const char __pyx_k_EXPIRES[] = "EXPIRES"; +static const char __pyx_k_REFERER[] = "REFERER"; +static const char __pyx_k_TRAILER[] = "TRAILER"; +static const char __pyx_k_UPGRADE[] = "UPGRADE"; +static const char __pyx_k_WARNING[] = "WARNING"; +static const char __pyx_k_aiohttp[] = "aiohttp"; +static const char __pyx_k_chunked[] = "chunked"; +static const char __pyx_k_deflate[] = "deflate"; +static const char __pyx_k_genexpr[] = "genexpr"; +static const char __pyx_k_headers[] = "headers"; +static const char __pyx_k_streams[] = "streams"; +static const char __pyx_k_unknown[] = ""; +static const char __pyx_k_upgrade[] = "upgrade"; +static const char __pyx_k_version[] = "version"; +static const char __pyx_k_IF_MATCH[] = "IF_MATCH"; +static const char __pyx_k_IF_RANGE[] = "IF_RANGE"; +static const char __pyx_k_LOCATION[] = "LOCATION"; +static const char __pyx_k_buf_data[] = "buf_data"; +static const char __pyx_k_feed_eof[] = "feed_eof"; +static const char __pyx_k_fragment[] = "fragment"; +static const char __pyx_k_getstate[] = "__getstate__"; +static const char __pyx_k_password[] = "password"; +static const char __pyx_k_protocol[] = "protocol"; +static const char __pyx_k_pyx_type[] = "__pyx_type"; +static const char __pyx_k_setstate[] = "__setstate__"; +static const char __pyx_k_FORWARDED[] = "FORWARDED"; +static const char __pyx_k_TypeError[] = "TypeError"; +static const char __pyx_k_WEBSOCKET[] = "WEBSOCKET"; +static const char __pyx_k_feed_data[] = "feed_data"; +static const char __pyx_k_multidict[] = "multidict"; +static const char __pyx_k_parse_url[] = "parse_url"; +static const char __pyx_k_partition[] = "partition"; +static const char __pyx_k_pyx_state[] = "__pyx_state"; +static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; +static const char __pyx_k_CONNECTION[] = "CONNECTION"; +static const char __pyx_k_KEEP_ALIVE[] = "KEEP_ALIVE"; +static const char __pyx_k_SET_COOKIE[] = "SET_COOKIE"; +static const char __pyx_k_USER_AGENT[] = "USER_AGENT"; +static const char __pyx_k_pyx_result[] = "__pyx_result"; +static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; +static const char __pyx_k_CIMultiDict[] = "CIMultiDict"; +static const char __pyx_k_CONTENT_MD5[] = "CONTENT_MD5"; +static const char __pyx_k_DESTINATION[] = "DESTINATION"; +static const char __pyx_k_HttpVersion[] = "HttpVersion"; +static const char __pyx_k_LineTooLong[] = "LineTooLong"; +static const char __pyx_k_MemoryError[] = "MemoryError"; +static const char __pyx_k_PickleError[] = "PickleError"; +static const char __pyx_k_RETRY_AFTER[] = "RETRY_AFTER"; +static const char __pyx_k_WANT_DIGEST[] = "WANT_DIGEST"; +static const char __pyx_k_compression[] = "compression"; +static const char __pyx_k_http_parser[] = "http_parser"; +static const char __pyx_k_http_writer[] = "http_writer"; +static const char __pyx_k_max_headers[] = "max_headers"; +static const char __pyx_k_raw_headers[] = "raw_headers"; +static const char __pyx_k_CONTENT_TYPE[] = "CONTENT_TYPE"; +static const char __pyx_k_MAX_FORWARDS[] = "MAX_FORWARDS"; +static const char __pyx_k_StreamReader[] = "StreamReader"; +static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; +static const char __pyx_k_should_close[] = "should_close"; +static const char __pyx_k_stringsource[] = "stringsource"; +static const char __pyx_k_ACCEPT_RANGES[] = "ACCEPT_RANGES"; +static const char __pyx_k_AUTHORIZATION[] = "AUTHORIZATION"; +static const char __pyx_k_BadStatusLine[] = "BadStatusLine"; +static const char __pyx_k_BaseException[] = "BaseException"; +static const char __pyx_k_CACHE_CONTROL[] = "CACHE_CONTROL"; +static const char __pyx_k_CIMultiDict_2[] = "_CIMultiDict"; +static const char __pyx_k_CONTENT_RANGE[] = "CONTENT_RANGE"; +static const char __pyx_k_DeflateBuffer[] = "DeflateBuffer"; +static const char __pyx_k_EMPTY_PAYLOAD[] = "EMPTY_PAYLOAD"; +static const char __pyx_k_HttpVersion10[] = "HttpVersion10"; +static const char __pyx_k_HttpVersion11[] = "HttpVersion11"; +static const char __pyx_k_HttpVersion_2[] = "_HttpVersion"; +static const char __pyx_k_IF_NONE_MATCH[] = "IF_NONE_MATCH"; +static const char __pyx_k_InvalidHeader[] = "InvalidHeader"; +static const char __pyx_k_LAST_EVENT_ID[] = "LAST_EVENT_ID"; +static const char __pyx_k_LAST_MODIFIED[] = "LAST_MODIFIED"; +static const char __pyx_k_invalid_url_r[] = "invalid url {!r}"; +static const char __pyx_k_max_line_size[] = "max_line_size"; +static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; +static const char __pyx_k_set_exception[] = "set_exception"; +static const char __pyx_k_ACCEPT_CHARSET[] = "ACCEPT_CHARSET"; +static const char __pyx_k_BadHttpMessage[] = "BadHttpMessage"; +static const char __pyx_k_CONTENT_LENGTH[] = "CONTENT_LENGTH"; +static const char __pyx_k_StreamReader_2[] = "_StreamReader"; +static const char __pyx_k_max_field_size[] = "max_field_size"; +static const char __pyx_k_read_until_eof[] = "read_until_eof"; +static const char __pyx_k_ACCEPT_ENCODING[] = "ACCEPT_ENCODING"; +static const char __pyx_k_ACCEPT_LANGUAGE[] = "ACCEPT_LANGUAGE"; +static const char __pyx_k_DeflateBuffer_2[] = "_DeflateBuffer"; +static const char __pyx_k_EMPTY_PAYLOAD_2[] = "_EMPTY_PAYLOAD"; +static const char __pyx_k_HttpVersion10_2[] = "_HttpVersion10"; +static const char __pyx_k_HttpVersion11_2[] = "_HttpVersion11"; +static const char __pyx_k_InvalidURLError[] = "InvalidURLError"; +static const char __pyx_k_X_FORWARDED_FOR[] = "X_FORWARDED_FOR"; +static const char __pyx_k_auto_decompress[] = "auto_decompress"; +static const char __pyx_k_http_exceptions[] = "http_exceptions"; +static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; +static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; +static const char __pyx_k_CIMultiDictProxy[] = "CIMultiDictProxy"; +static const char __pyx_k_CONTENT_ENCODING[] = "CONTENT_ENCODING"; +static const char __pyx_k_CONTENT_LANGUAGE[] = "CONTENT_LANGUAGE"; +static const char __pyx_k_CONTENT_LOCATION[] = "CONTENT_LOCATION"; +static const char __pyx_k_WWW_AUTHENTICATE[] = "WWW_AUTHENTICATE"; +static const char __pyx_k_X_FORWARDED_HOST[] = "X_FORWARDED_HOST"; +static const char __pyx_k_HttpRequestParser[] = "HttpRequestParser"; +static const char __pyx_k_IF_MODIFIED_SINCE[] = "IF_MODIFIED_SINCE"; +static const char __pyx_k_RawRequestMessage[] = "_http_method[i] + */ + +static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_http_method_str(int __pyx_v_i) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("http_method_str", 0); + + /* "aiohttp/_http_parser.pyx":76 + * + * cdef inline str http_method_str(int i): + * if i < METHODS_COUNT: # <<<<<<<<<<<<<< + * return _http_method[i] + * else: + */ + __pyx_t_1 = ((__pyx_v_i < 34) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_parser.pyx":77 + * cdef inline str http_method_str(int i): + * if i < METHODS_COUNT: + * return _http_method[i] # <<<<<<<<<<<<<< + * else: + * return "" + */ + __Pyx_XDECREF(__pyx_r); + if (unlikely(__pyx_v_7aiohttp_12_http_parser__http_method == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 77, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_GetItemInt_List(__pyx_v_7aiohttp_12_http_parser__http_method, __pyx_v_i, int, 1, __Pyx_PyInt_From_int, 1, 1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 77, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(((PyObject*)__pyx_t_2)); + __pyx_r = ((PyObject*)__pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":76 + * + * cdef inline str http_method_str(int i): + * if i < METHODS_COUNT: # <<<<<<<<<<<<<< + * return _http_method[i] + * else: + */ + } + + /* "aiohttp/_http_parser.pyx":79 + * return _http_method[i] + * else: + * return "" # <<<<<<<<<<<<<< + * + * cdef inline object find_header(bytes raw_header): + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_kp_u_unknown); + __pyx_r = __pyx_kp_u_unknown; + goto __pyx_L0; + } + + /* "aiohttp/_http_parser.pyx":75 + * + * + * cdef inline str http_method_str(int i): # <<<<<<<<<<<<<< + * if i < METHODS_COUNT: + * return _http_method[i] + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("aiohttp._http_parser.http_method_str", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":81 + * return "" + * + * cdef inline object find_header(bytes raw_header): # <<<<<<<<<<<<<< + * cdef Py_ssize_t size + * cdef char *buf + */ + +static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_find_header(PyObject *__pyx_v_raw_header) { + Py_ssize_t __pyx_v_size; + char *__pyx_v_buf; + int __pyx_v_idx; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("find_header", 0); + + /* "aiohttp/_http_parser.pyx":85 + * cdef char *buf + * cdef int idx + * PyBytes_AsStringAndSize(raw_header, &buf, &size) # <<<<<<<<<<<<<< + * idx = _find_header.find_header(buf, size) + * if idx == -1: + */ + __pyx_t_1 = PyBytes_AsStringAndSize(__pyx_v_raw_header, (&__pyx_v_buf), (&__pyx_v_size)); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 85, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":86 + * cdef int idx + * PyBytes_AsStringAndSize(raw_header, &buf, &size) + * idx = _find_header.find_header(buf, size) # <<<<<<<<<<<<<< + * if idx == -1: + * return raw_header.decode('utf-8', 'surrogateescape') + */ + __pyx_v_idx = find_header(__pyx_v_buf, __pyx_v_size); + + /* "aiohttp/_http_parser.pyx":87 + * PyBytes_AsStringAndSize(raw_header, &buf, &size) + * idx = _find_header.find_header(buf, size) + * if idx == -1: # <<<<<<<<<<<<<< + * return raw_header.decode('utf-8', 'surrogateescape') + * return headers[idx] + */ + __pyx_t_2 = ((__pyx_v_idx == -1L) != 0); + if (__pyx_t_2) { + + /* "aiohttp/_http_parser.pyx":88 + * idx = _find_header.find_header(buf, size) + * if idx == -1: + * return raw_header.decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< + * return headers[idx] + * + */ + __Pyx_XDECREF(__pyx_r); + if (unlikely(__pyx_v_raw_header == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "decode"); + __PYX_ERR(0, 88, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_decode_bytes(__pyx_v_raw_header, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 88, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":87 + * PyBytes_AsStringAndSize(raw_header, &buf, &size) + * idx = _find_header.find_header(buf, size) + * if idx == -1: # <<<<<<<<<<<<<< + * return raw_header.decode('utf-8', 'surrogateescape') + * return headers[idx] + */ + } + + /* "aiohttp/_http_parser.pyx":89 + * if idx == -1: + * return raw_header.decode('utf-8', 'surrogateescape') + * return headers[idx] # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + if (unlikely(__pyx_v_7aiohttp_12_http_parser_headers == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 89, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_GetItemInt_Tuple(__pyx_v_7aiohttp_12_http_parser_headers, __pyx_v_idx, int, 1, __Pyx_PyInt_From_int, 0, 1, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 89, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":81 + * return "" + * + * cdef inline object find_header(bytes raw_header): # <<<<<<<<<<<<<< + * cdef Py_ssize_t size + * cdef char *buf + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("aiohttp._http_parser.find_header", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":105 + * cdef readonly object url # yarl.URL + * + * def __init__(self, method, path, version, headers, raw_headers, # <<<<<<<<<<<<<< + * should_close, compression, upgrade, chunked, url): + * self.method = method + */ + +/* Python wrapper */ +static int __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_method = 0; + PyObject *__pyx_v_path = 0; + PyObject *__pyx_v_version = 0; + PyObject *__pyx_v_headers = 0; + PyObject *__pyx_v_raw_headers = 0; + PyObject *__pyx_v_should_close = 0; + PyObject *__pyx_v_compression = 0; + PyObject *__pyx_v_upgrade = 0; + PyObject *__pyx_v_chunked = 0; + PyObject *__pyx_v_url = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_method,&__pyx_n_s_path,&__pyx_n_s_version,&__pyx_n_s_headers,&__pyx_n_s_raw_headers,&__pyx_n_s_should_close,&__pyx_n_s_compression,&__pyx_n_s_upgrade,&__pyx_n_s_chunked,&__pyx_n_s_url,0}; + PyObject* values[10] = {0,0,0,0,0,0,0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + CYTHON_FALLTHROUGH; + case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + CYTHON_FALLTHROUGH; + case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + CYTHON_FALLTHROUGH; + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_method)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_path)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 1); __PYX_ERR(0, 105, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_version)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 2); __PYX_ERR(0, 105, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_headers)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 3); __PYX_ERR(0, 105, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 4: + if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_raw_headers)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 4); __PYX_ERR(0, 105, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 5: + if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_should_close)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 5); __PYX_ERR(0, 105, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 6: + if (likely((values[6] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_compression)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 6); __PYX_ERR(0, 105, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 7: + if (likely((values[7] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_upgrade)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 7); __PYX_ERR(0, 105, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 8: + if (likely((values[8] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_chunked)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 8); __PYX_ERR(0, 105, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 9: + if (likely((values[9] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_url)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 9); __PYX_ERR(0, 105, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 105, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 10) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + } + __pyx_v_method = values[0]; + __pyx_v_path = values[1]; + __pyx_v_version = values[2]; + __pyx_v_headers = values[3]; + __pyx_v_raw_headers = values[4]; + __pyx_v_should_close = values[5]; + __pyx_v_compression = values[6]; + __pyx_v_upgrade = values[7]; + __pyx_v_chunked = values[8]; + __pyx_v_url = values[9]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 105, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._http_parser.RawRequestMessage.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self), __pyx_v_method, __pyx_v_path, __pyx_v_version, __pyx_v_headers, __pyx_v_raw_headers, __pyx_v_should_close, __pyx_v_compression, __pyx_v_upgrade, __pyx_v_chunked, __pyx_v_url); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self, PyObject *__pyx_v_method, PyObject *__pyx_v_path, PyObject *__pyx_v_version, PyObject *__pyx_v_headers, PyObject *__pyx_v_raw_headers, PyObject *__pyx_v_should_close, PyObject *__pyx_v_compression, PyObject *__pyx_v_upgrade, PyObject *__pyx_v_chunked, PyObject *__pyx_v_url) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "aiohttp/_http_parser.pyx":107 + * def __init__(self, method, path, version, headers, raw_headers, + * should_close, compression, upgrade, chunked, url): + * self.method = method # <<<<<<<<<<<<<< + * self.path = path + * self.version = version + */ + if (!(likely(PyUnicode_CheckExact(__pyx_v_method))||((__pyx_v_method) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_v_method)->tp_name), 0))) __PYX_ERR(0, 107, __pyx_L1_error) + __pyx_t_1 = __pyx_v_method; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->method); + __Pyx_DECREF(__pyx_v_self->method); + __pyx_v_self->method = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":108 + * should_close, compression, upgrade, chunked, url): + * self.method = method + * self.path = path # <<<<<<<<<<<<<< + * self.version = version + * self.headers = headers + */ + if (!(likely(PyUnicode_CheckExact(__pyx_v_path))||((__pyx_v_path) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_v_path)->tp_name), 0))) __PYX_ERR(0, 108, __pyx_L1_error) + __pyx_t_1 = __pyx_v_path; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->path); + __Pyx_DECREF(__pyx_v_self->path); + __pyx_v_self->path = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":109 + * self.method = method + * self.path = path + * self.version = version # <<<<<<<<<<<<<< + * self.headers = headers + * self.raw_headers = raw_headers + */ + __Pyx_INCREF(__pyx_v_version); + __Pyx_GIVEREF(__pyx_v_version); + __Pyx_GOTREF(__pyx_v_self->version); + __Pyx_DECREF(__pyx_v_self->version); + __pyx_v_self->version = __pyx_v_version; + + /* "aiohttp/_http_parser.pyx":110 + * self.path = path + * self.version = version + * self.headers = headers # <<<<<<<<<<<<<< + * self.raw_headers = raw_headers + * self.should_close = should_close + */ + __Pyx_INCREF(__pyx_v_headers); + __Pyx_GIVEREF(__pyx_v_headers); + __Pyx_GOTREF(__pyx_v_self->headers); + __Pyx_DECREF(__pyx_v_self->headers); + __pyx_v_self->headers = __pyx_v_headers; + + /* "aiohttp/_http_parser.pyx":111 + * self.version = version + * self.headers = headers + * self.raw_headers = raw_headers # <<<<<<<<<<<<<< + * self.should_close = should_close + * self.compression = compression + */ + __Pyx_INCREF(__pyx_v_raw_headers); + __Pyx_GIVEREF(__pyx_v_raw_headers); + __Pyx_GOTREF(__pyx_v_self->raw_headers); + __Pyx_DECREF(__pyx_v_self->raw_headers); + __pyx_v_self->raw_headers = __pyx_v_raw_headers; + + /* "aiohttp/_http_parser.pyx":112 + * self.headers = headers + * self.raw_headers = raw_headers + * self.should_close = should_close # <<<<<<<<<<<<<< + * self.compression = compression + * self.upgrade = upgrade + */ + __Pyx_INCREF(__pyx_v_should_close); + __Pyx_GIVEREF(__pyx_v_should_close); + __Pyx_GOTREF(__pyx_v_self->should_close); + __Pyx_DECREF(__pyx_v_self->should_close); + __pyx_v_self->should_close = __pyx_v_should_close; + + /* "aiohttp/_http_parser.pyx":113 + * self.raw_headers = raw_headers + * self.should_close = should_close + * self.compression = compression # <<<<<<<<<<<<<< + * self.upgrade = upgrade + * self.chunked = chunked + */ + __Pyx_INCREF(__pyx_v_compression); + __Pyx_GIVEREF(__pyx_v_compression); + __Pyx_GOTREF(__pyx_v_self->compression); + __Pyx_DECREF(__pyx_v_self->compression); + __pyx_v_self->compression = __pyx_v_compression; + + /* "aiohttp/_http_parser.pyx":114 + * self.should_close = should_close + * self.compression = compression + * self.upgrade = upgrade # <<<<<<<<<<<<<< + * self.chunked = chunked + * self.url = url + */ + __Pyx_INCREF(__pyx_v_upgrade); + __Pyx_GIVEREF(__pyx_v_upgrade); + __Pyx_GOTREF(__pyx_v_self->upgrade); + __Pyx_DECREF(__pyx_v_self->upgrade); + __pyx_v_self->upgrade = __pyx_v_upgrade; + + /* "aiohttp/_http_parser.pyx":115 + * self.compression = compression + * self.upgrade = upgrade + * self.chunked = chunked # <<<<<<<<<<<<<< + * self.url = url + * + */ + __Pyx_INCREF(__pyx_v_chunked); + __Pyx_GIVEREF(__pyx_v_chunked); + __Pyx_GOTREF(__pyx_v_self->chunked); + __Pyx_DECREF(__pyx_v_self->chunked); + __pyx_v_self->chunked = __pyx_v_chunked; + + /* "aiohttp/_http_parser.pyx":116 + * self.upgrade = upgrade + * self.chunked = chunked + * self.url = url # <<<<<<<<<<<<<< + * + * def __repr__(self): + */ + __Pyx_INCREF(__pyx_v_url); + __Pyx_GIVEREF(__pyx_v_url); + __Pyx_GOTREF(__pyx_v_self->url); + __Pyx_DECREF(__pyx_v_self->url); + __pyx_v_self->url = __pyx_v_url; + + /* "aiohttp/_http_parser.pyx":105 + * cdef readonly object url # yarl.URL + * + * def __init__(self, method, path, version, headers, raw_headers, # <<<<<<<<<<<<<< + * should_close, compression, upgrade, chunked, url): + * self.method = method + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser.RawRequestMessage.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":118 + * self.url = url + * + * def __repr__(self): # <<<<<<<<<<<<<< + * info = [] + * info.append(("method", self.method)) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_3__repr__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_3__repr__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_2__repr__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} +static PyObject *__pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___2generator(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */ + +/* "aiohttp/_http_parser.pyx":130 + * info.append(("chunked", self.chunked)) + * info.append(("url", self.url)) + * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< + * return '' + * + */ + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___genexpr(PyObject *__pyx_self) { + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *__pyx_cur_scope; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("genexpr", 0); + __pyx_cur_scope = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *)__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr(__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr, __pyx_empty_tuple, NULL); + if (unlikely(!__pyx_cur_scope)) { + __pyx_cur_scope = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *)Py_None); + __Pyx_INCREF(Py_None); + __PYX_ERR(0, 130, __pyx_L1_error) + } else { + __Pyx_GOTREF(__pyx_cur_scope); + } + __pyx_cur_scope->__pyx_outer_scope = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *) __pyx_self; + __Pyx_INCREF(((PyObject *)__pyx_cur_scope->__pyx_outer_scope)); + __Pyx_GIVEREF(__pyx_cur_scope->__pyx_outer_scope); + { + __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___2generator, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_genexpr, __pyx_n_s_repr___locals_genexpr, __pyx_n_s_aiohttp__http_parser); if (unlikely(!gen)) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_DECREF(__pyx_cur_scope); + __Pyx_RefNannyFinishContext(); + return (PyObject *) gen; + } + + /* function exit code */ + __pyx_L1_error:; + __Pyx_AddTraceback("aiohttp._http_parser.RawRequestMessage.__repr__.genexpr", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___2generator(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value) /* generator body */ +{ + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *__pyx_cur_scope = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *)__pyx_generator->closure); + PyObject *__pyx_r = NULL; + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *(*__pyx_t_7)(PyObject *); + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("genexpr", 0); + switch (__pyx_generator->resume_label) { + case 0: goto __pyx_L3_first_run; + default: /* CPython raises the right error here */ + __Pyx_RefNannyFinishContext(); + return NULL; + } + __pyx_L3_first_run:; + if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 130, __pyx_L1_error) + __pyx_r = PyList_New(0); if (unlikely(!__pyx_r)) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_GOTREF(__pyx_r); + if (unlikely(!__pyx_cur_scope->__pyx_outer_scope->__pyx_v_info)) { __Pyx_RaiseClosureNameError("info"); __PYX_ERR(0, 130, __pyx_L1_error) } + if (unlikely(__pyx_cur_scope->__pyx_outer_scope->__pyx_v_info == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 130, __pyx_L1_error) + } + __pyx_t_1 = __pyx_cur_scope->__pyx_outer_scope->__pyx_v_info; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 130, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + if ((likely(PyTuple_CheckExact(__pyx_t_3))) || (PyList_CheckExact(__pyx_t_3))) { + PyObject* sequence = __pyx_t_3; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 130, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_5 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_4 = PyList_GET_ITEM(sequence, 0); + __pyx_t_5 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + #else + __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + #endif + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_6 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = Py_TYPE(__pyx_t_6)->tp_iternext; + index = 0; __pyx_t_4 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_4)) goto __pyx_L6_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + index = 1; __pyx_t_5 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_5)) goto __pyx_L6_unpacking_failed; + __Pyx_GOTREF(__pyx_t_5); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_7(__pyx_t_6), 2) < 0) __PYX_ERR(0, 130, __pyx_L1_error) + __pyx_t_7 = NULL; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L7_unpacking_done; + __pyx_L6_unpacking_failed:; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_7 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 130, __pyx_L1_error) + __pyx_L7_unpacking_done:; + } + __Pyx_XGOTREF(__pyx_cur_scope->__pyx_v_name); + __Pyx_XDECREF_SET(__pyx_cur_scope->__pyx_v_name, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + __pyx_t_4 = 0; + __Pyx_XGOTREF(__pyx_cur_scope->__pyx_v_val); + __Pyx_XDECREF_SET(__pyx_cur_scope->__pyx_v_val, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_5); + __pyx_t_5 = 0; + __pyx_t_3 = PyNumber_Add(__pyx_cur_scope->__pyx_v_name, __pyx_kp_u_); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_Repr(__pyx_cur_scope->__pyx_v_val); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_4 = PyNumber_Add(__pyx_t_3, __pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(__Pyx_ListComp_Append(__pyx_r, (PyObject*)__pyx_t_4))) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + CYTHON_MAYBE_UNUSED_VAR(__pyx_cur_scope); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_r); __pyx_r = 0; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("genexpr", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + #if !CYTHON_USE_EXC_INFO_STACK + __Pyx_Coroutine_ResetAndClearException(__pyx_generator); + #endif + __pyx_generator->resume_label = -1; + __Pyx_Coroutine_clear((PyObject*)__pyx_generator); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":118 + * self.url = url + * + * def __repr__(self): # <<<<<<<<<<<<<< + * info = [] + * info.append(("method", self.method)) + */ + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_2__repr__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *__pyx_cur_scope; + PyObject *__pyx_v_sinfo = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("__repr__", 0); + __pyx_cur_scope = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *)__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct____repr__(__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct____repr__, __pyx_empty_tuple, NULL); + if (unlikely(!__pyx_cur_scope)) { + __pyx_cur_scope = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *)Py_None); + __Pyx_INCREF(Py_None); + __PYX_ERR(0, 118, __pyx_L1_error) + } else { + __Pyx_GOTREF(__pyx_cur_scope); + } + + /* "aiohttp/_http_parser.pyx":119 + * + * def __repr__(self): + * info = [] # <<<<<<<<<<<<<< + * info.append(("method", self.method)) + * info.append(("path", self.path)) + */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 119, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __pyx_cur_scope->__pyx_v_info = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":120 + * def __repr__(self): + * info = [] + * info.append(("method", self.method)) # <<<<<<<<<<<<<< + * info.append(("path", self.path)) + * info.append(("version", self.version)) + */ + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_u_method); + __Pyx_GIVEREF(__pyx_n_u_method); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_method); + __Pyx_INCREF(__pyx_v_self->method); + __Pyx_GIVEREF(__pyx_v_self->method); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->method); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":121 + * info = [] + * info.append(("method", self.method)) + * info.append(("path", self.path)) # <<<<<<<<<<<<<< + * info.append(("version", self.version)) + * info.append(("headers", self.headers)) + */ + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 121, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_u_path); + __Pyx_GIVEREF(__pyx_n_u_path); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_path); + __Pyx_INCREF(__pyx_v_self->path); + __Pyx_GIVEREF(__pyx_v_self->path); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->path); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 121, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":122 + * info.append(("method", self.method)) + * info.append(("path", self.path)) + * info.append(("version", self.version)) # <<<<<<<<<<<<<< + * info.append(("headers", self.headers)) + * info.append(("raw_headers", self.raw_headers)) + */ + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 122, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_u_version); + __Pyx_GIVEREF(__pyx_n_u_version); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_version); + __Pyx_INCREF(__pyx_v_self->version); + __Pyx_GIVEREF(__pyx_v_self->version); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->version); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 122, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":123 + * info.append(("path", self.path)) + * info.append(("version", self.version)) + * info.append(("headers", self.headers)) # <<<<<<<<<<<<<< + * info.append(("raw_headers", self.raw_headers)) + * info.append(("should_close", self.should_close)) + */ + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_u_headers); + __Pyx_GIVEREF(__pyx_n_u_headers); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_headers); + __Pyx_INCREF(__pyx_v_self->headers); + __Pyx_GIVEREF(__pyx_v_self->headers); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->headers); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":124 + * info.append(("version", self.version)) + * info.append(("headers", self.headers)) + * info.append(("raw_headers", self.raw_headers)) # <<<<<<<<<<<<<< + * info.append(("should_close", self.should_close)) + * info.append(("compression", self.compression)) + */ + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 124, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_u_raw_headers); + __Pyx_GIVEREF(__pyx_n_u_raw_headers); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_raw_headers); + __Pyx_INCREF(__pyx_v_self->raw_headers); + __Pyx_GIVEREF(__pyx_v_self->raw_headers); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->raw_headers); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 124, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":125 + * info.append(("headers", self.headers)) + * info.append(("raw_headers", self.raw_headers)) + * info.append(("should_close", self.should_close)) # <<<<<<<<<<<<<< + * info.append(("compression", self.compression)) + * info.append(("upgrade", self.upgrade)) + */ + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 125, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_u_should_close); + __Pyx_GIVEREF(__pyx_n_u_should_close); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_should_close); + __Pyx_INCREF(__pyx_v_self->should_close); + __Pyx_GIVEREF(__pyx_v_self->should_close); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->should_close); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 125, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":126 + * info.append(("raw_headers", self.raw_headers)) + * info.append(("should_close", self.should_close)) + * info.append(("compression", self.compression)) # <<<<<<<<<<<<<< + * info.append(("upgrade", self.upgrade)) + * info.append(("chunked", self.chunked)) + */ + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 126, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_u_compression); + __Pyx_GIVEREF(__pyx_n_u_compression); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_compression); + __Pyx_INCREF(__pyx_v_self->compression); + __Pyx_GIVEREF(__pyx_v_self->compression); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->compression); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 126, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":127 + * info.append(("should_close", self.should_close)) + * info.append(("compression", self.compression)) + * info.append(("upgrade", self.upgrade)) # <<<<<<<<<<<<<< + * info.append(("chunked", self.chunked)) + * info.append(("url", self.url)) + */ + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_u_upgrade); + __Pyx_GIVEREF(__pyx_n_u_upgrade); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_upgrade); + __Pyx_INCREF(__pyx_v_self->upgrade); + __Pyx_GIVEREF(__pyx_v_self->upgrade); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->upgrade); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":128 + * info.append(("compression", self.compression)) + * info.append(("upgrade", self.upgrade)) + * info.append(("chunked", self.chunked)) # <<<<<<<<<<<<<< + * info.append(("url", self.url)) + * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + */ + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 128, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_u_chunked); + __Pyx_GIVEREF(__pyx_n_u_chunked); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_chunked); + __Pyx_INCREF(__pyx_v_self->chunked); + __Pyx_GIVEREF(__pyx_v_self->chunked); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->chunked); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 128, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":129 + * info.append(("upgrade", self.upgrade)) + * info.append(("chunked", self.chunked)) + * info.append(("url", self.url)) # <<<<<<<<<<<<<< + * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + * return '' + */ + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_u_url); + __Pyx_GIVEREF(__pyx_n_u_url); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_url); + __Pyx_INCREF(__pyx_v_self->url); + __Pyx_GIVEREF(__pyx_v_self->url); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->url); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":130 + * info.append(("chunked", self.chunked)) + * info.append(("url", self.url)) + * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< + * return '' + * + */ + __pyx_t_1 = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___genexpr(((PyObject*)__pyx_cur_scope)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_Generator_Next(__pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyUnicode_Join(__pyx_kp_u__2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_sinfo = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":131 + * info.append(("url", self.url)) + * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + * return '' # <<<<<<<<<<<<<< + * + * def _replace(self, **dct): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyUnicode_ConcatSafe(__pyx_kp_u_RawRequestMessage, __pyx_v_sinfo); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyUnicode_Concat(__pyx_t_1, __pyx_kp_u__3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":118 + * self.url = url + * + * def __repr__(self): # <<<<<<<<<<<<<< + * info = [] + * info.append(("method", self.method)) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("aiohttp._http_parser.RawRequestMessage.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_sinfo); + __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":133 + * return '' + * + * def _replace(self, **dct): # <<<<<<<<<<<<<< + * cdef RawRequestMessage ret + * ret = _new_request_message(self.method, + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_5_replace(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_5_replace(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_dct = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_replace (wrapper)", 0); + if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) { + __Pyx_RaiseArgtupleInvalid("_replace", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return NULL;} + if (__pyx_kwds && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "_replace", 1))) return NULL; + __pyx_v_dct = (__pyx_kwds) ? PyDict_Copy(__pyx_kwds) : PyDict_New(); if (unlikely(!__pyx_v_dct)) return NULL; + __Pyx_GOTREF(__pyx_v_dct); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self), __pyx_v_dct); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_dct); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self, PyObject *__pyx_v_dct) { + struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_ret = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + int __pyx_t_9; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + __Pyx_RefNannySetupContext("_replace", 0); + + /* "aiohttp/_http_parser.pyx":135 + * def _replace(self, **dct): + * cdef RawRequestMessage ret + * ret = _new_request_message(self.method, # <<<<<<<<<<<<<< + * self.path, + * self.version, + */ + __pyx_t_1 = __pyx_v_self->method; + __Pyx_INCREF(__pyx_t_1); + + /* "aiohttp/_http_parser.pyx":136 + * cdef RawRequestMessage ret + * ret = _new_request_message(self.method, + * self.path, # <<<<<<<<<<<<<< + * self.version, + * self.headers, + */ + __pyx_t_2 = __pyx_v_self->path; + __Pyx_INCREF(__pyx_t_2); + + /* "aiohttp/_http_parser.pyx":137 + * ret = _new_request_message(self.method, + * self.path, + * self.version, # <<<<<<<<<<<<<< + * self.headers, + * self.raw_headers, + */ + __pyx_t_3 = __pyx_v_self->version; + __Pyx_INCREF(__pyx_t_3); + + /* "aiohttp/_http_parser.pyx":138 + * self.path, + * self.version, + * self.headers, # <<<<<<<<<<<<<< + * self.raw_headers, + * self.should_close, + */ + __pyx_t_4 = __pyx_v_self->headers; + __Pyx_INCREF(__pyx_t_4); + + /* "aiohttp/_http_parser.pyx":139 + * self.version, + * self.headers, + * self.raw_headers, # <<<<<<<<<<<<<< + * self.should_close, + * self.compression, + */ + __pyx_t_5 = __pyx_v_self->raw_headers; + __Pyx_INCREF(__pyx_t_5); + + /* "aiohttp/_http_parser.pyx":140 + * self.headers, + * self.raw_headers, + * self.should_close, # <<<<<<<<<<<<<< + * self.compression, + * self.upgrade, + */ + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_v_self->should_close); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 140, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":141 + * self.raw_headers, + * self.should_close, + * self.compression, # <<<<<<<<<<<<<< + * self.upgrade, + * self.chunked, + */ + __pyx_t_7 = __pyx_v_self->compression; + __Pyx_INCREF(__pyx_t_7); + + /* "aiohttp/_http_parser.pyx":142 + * self.should_close, + * self.compression, + * self.upgrade, # <<<<<<<<<<<<<< + * self.chunked, + * self.url) + */ + __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_v_self->upgrade); if (unlikely((__pyx_t_8 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 142, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":143 + * self.compression, + * self.upgrade, + * self.chunked, # <<<<<<<<<<<<<< + * self.url) + * if "method" in dct: + */ + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_self->chunked); if (unlikely((__pyx_t_9 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 143, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":144 + * self.upgrade, + * self.chunked, + * self.url) # <<<<<<<<<<<<<< + * if "method" in dct: + * ret.method = dct["method"] + */ + __pyx_t_10 = __pyx_v_self->url; + __Pyx_INCREF(__pyx_t_10); + + /* "aiohttp/_http_parser.pyx":135 + * def _replace(self, **dct): + * cdef RawRequestMessage ret + * ret = _new_request_message(self.method, # <<<<<<<<<<<<<< + * self.path, + * self.version, + */ + __pyx_t_11 = __pyx_f_7aiohttp_12_http_parser__new_request_message(((PyObject*)__pyx_t_1), ((PyObject*)__pyx_t_2), __pyx_t_3, __pyx_t_4, __pyx_t_5, __pyx_t_6, __pyx_t_7, __pyx_t_8, __pyx_t_9, __pyx_t_10); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 135, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + if (!(likely(((__pyx_t_11) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_11, __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage))))) __PYX_ERR(0, 135, __pyx_L1_error) + __pyx_v_ret = ((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_t_11); + __pyx_t_11 = 0; + + /* "aiohttp/_http_parser.pyx":145 + * self.chunked, + * self.url) + * if "method" in dct: # <<<<<<<<<<<<<< + * ret.method = dct["method"] + * if "path" in dct: + */ + __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_method, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 145, __pyx_L1_error) + __pyx_t_8 = (__pyx_t_9 != 0); + if (__pyx_t_8) { + + /* "aiohttp/_http_parser.pyx":146 + * self.url) + * if "method" in dct: + * ret.method = dct["method"] # <<<<<<<<<<<<<< + * if "path" in dct: + * ret.path = dct["path"] + */ + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_method); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 146, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (!(likely(PyUnicode_CheckExact(__pyx_t_11))||((__pyx_t_11) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_11)->tp_name), 0))) __PYX_ERR(0, 146, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_11); + __Pyx_GOTREF(__pyx_v_ret->method); + __Pyx_DECREF(__pyx_v_ret->method); + __pyx_v_ret->method = ((PyObject*)__pyx_t_11); + __pyx_t_11 = 0; + + /* "aiohttp/_http_parser.pyx":145 + * self.chunked, + * self.url) + * if "method" in dct: # <<<<<<<<<<<<<< + * ret.method = dct["method"] + * if "path" in dct: + */ + } + + /* "aiohttp/_http_parser.pyx":147 + * if "method" in dct: + * ret.method = dct["method"] + * if "path" in dct: # <<<<<<<<<<<<<< + * ret.path = dct["path"] + * if "version" in dct: + */ + __pyx_t_8 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_path, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 147, __pyx_L1_error) + __pyx_t_9 = (__pyx_t_8 != 0); + if (__pyx_t_9) { + + /* "aiohttp/_http_parser.pyx":148 + * ret.method = dct["method"] + * if "path" in dct: + * ret.path = dct["path"] # <<<<<<<<<<<<<< + * if "version" in dct: + * ret.version = dct["version"] + */ + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_path); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 148, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (!(likely(PyUnicode_CheckExact(__pyx_t_11))||((__pyx_t_11) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_11)->tp_name), 0))) __PYX_ERR(0, 148, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_11); + __Pyx_GOTREF(__pyx_v_ret->path); + __Pyx_DECREF(__pyx_v_ret->path); + __pyx_v_ret->path = ((PyObject*)__pyx_t_11); + __pyx_t_11 = 0; + + /* "aiohttp/_http_parser.pyx":147 + * if "method" in dct: + * ret.method = dct["method"] + * if "path" in dct: # <<<<<<<<<<<<<< + * ret.path = dct["path"] + * if "version" in dct: + */ + } + + /* "aiohttp/_http_parser.pyx":149 + * if "path" in dct: + * ret.path = dct["path"] + * if "version" in dct: # <<<<<<<<<<<<<< + * ret.version = dct["version"] + * if "headers" in dct: + */ + __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_version, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 149, __pyx_L1_error) + __pyx_t_8 = (__pyx_t_9 != 0); + if (__pyx_t_8) { + + /* "aiohttp/_http_parser.pyx":150 + * ret.path = dct["path"] + * if "version" in dct: + * ret.version = dct["version"] # <<<<<<<<<<<<<< + * if "headers" in dct: + * ret.headers = dct["headers"] + */ + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_version); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 150, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_GIVEREF(__pyx_t_11); + __Pyx_GOTREF(__pyx_v_ret->version); + __Pyx_DECREF(__pyx_v_ret->version); + __pyx_v_ret->version = __pyx_t_11; + __pyx_t_11 = 0; + + /* "aiohttp/_http_parser.pyx":149 + * if "path" in dct: + * ret.path = dct["path"] + * if "version" in dct: # <<<<<<<<<<<<<< + * ret.version = dct["version"] + * if "headers" in dct: + */ + } + + /* "aiohttp/_http_parser.pyx":151 + * if "version" in dct: + * ret.version = dct["version"] + * if "headers" in dct: # <<<<<<<<<<<<<< + * ret.headers = dct["headers"] + * if "raw_headers" in dct: + */ + __pyx_t_8 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_headers, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 151, __pyx_L1_error) + __pyx_t_9 = (__pyx_t_8 != 0); + if (__pyx_t_9) { + + /* "aiohttp/_http_parser.pyx":152 + * ret.version = dct["version"] + * if "headers" in dct: + * ret.headers = dct["headers"] # <<<<<<<<<<<<<< + * if "raw_headers" in dct: + * ret.raw_headers = dct["raw_headers"] + */ + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_headers); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 152, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_GIVEREF(__pyx_t_11); + __Pyx_GOTREF(__pyx_v_ret->headers); + __Pyx_DECREF(__pyx_v_ret->headers); + __pyx_v_ret->headers = __pyx_t_11; + __pyx_t_11 = 0; + + /* "aiohttp/_http_parser.pyx":151 + * if "version" in dct: + * ret.version = dct["version"] + * if "headers" in dct: # <<<<<<<<<<<<<< + * ret.headers = dct["headers"] + * if "raw_headers" in dct: + */ + } + + /* "aiohttp/_http_parser.pyx":153 + * if "headers" in dct: + * ret.headers = dct["headers"] + * if "raw_headers" in dct: # <<<<<<<<<<<<<< + * ret.raw_headers = dct["raw_headers"] + * if "should_close" in dct: + */ + __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_raw_headers, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 153, __pyx_L1_error) + __pyx_t_8 = (__pyx_t_9 != 0); + if (__pyx_t_8) { + + /* "aiohttp/_http_parser.pyx":154 + * ret.headers = dct["headers"] + * if "raw_headers" in dct: + * ret.raw_headers = dct["raw_headers"] # <<<<<<<<<<<<<< + * if "should_close" in dct: + * ret.should_close = dct["should_close"] + */ + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_raw_headers); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 154, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_GIVEREF(__pyx_t_11); + __Pyx_GOTREF(__pyx_v_ret->raw_headers); + __Pyx_DECREF(__pyx_v_ret->raw_headers); + __pyx_v_ret->raw_headers = __pyx_t_11; + __pyx_t_11 = 0; + + /* "aiohttp/_http_parser.pyx":153 + * if "headers" in dct: + * ret.headers = dct["headers"] + * if "raw_headers" in dct: # <<<<<<<<<<<<<< + * ret.raw_headers = dct["raw_headers"] + * if "should_close" in dct: + */ + } + + /* "aiohttp/_http_parser.pyx":155 + * if "raw_headers" in dct: + * ret.raw_headers = dct["raw_headers"] + * if "should_close" in dct: # <<<<<<<<<<<<<< + * ret.should_close = dct["should_close"] + * if "compression" in dct: + */ + __pyx_t_8 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_should_close, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 155, __pyx_L1_error) + __pyx_t_9 = (__pyx_t_8 != 0); + if (__pyx_t_9) { + + /* "aiohttp/_http_parser.pyx":156 + * ret.raw_headers = dct["raw_headers"] + * if "should_close" in dct: + * ret.should_close = dct["should_close"] # <<<<<<<<<<<<<< + * if "compression" in dct: + * ret.compression = dct["compression"] + */ + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_should_close); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 156, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_GIVEREF(__pyx_t_11); + __Pyx_GOTREF(__pyx_v_ret->should_close); + __Pyx_DECREF(__pyx_v_ret->should_close); + __pyx_v_ret->should_close = __pyx_t_11; + __pyx_t_11 = 0; + + /* "aiohttp/_http_parser.pyx":155 + * if "raw_headers" in dct: + * ret.raw_headers = dct["raw_headers"] + * if "should_close" in dct: # <<<<<<<<<<<<<< + * ret.should_close = dct["should_close"] + * if "compression" in dct: + */ + } + + /* "aiohttp/_http_parser.pyx":157 + * if "should_close" in dct: + * ret.should_close = dct["should_close"] + * if "compression" in dct: # <<<<<<<<<<<<<< + * ret.compression = dct["compression"] + * if "upgrade" in dct: + */ + __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_compression, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 157, __pyx_L1_error) + __pyx_t_8 = (__pyx_t_9 != 0); + if (__pyx_t_8) { + + /* "aiohttp/_http_parser.pyx":158 + * ret.should_close = dct["should_close"] + * if "compression" in dct: + * ret.compression = dct["compression"] # <<<<<<<<<<<<<< + * if "upgrade" in dct: + * ret.upgrade = dct["upgrade"] + */ + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_compression); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 158, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_GIVEREF(__pyx_t_11); + __Pyx_GOTREF(__pyx_v_ret->compression); + __Pyx_DECREF(__pyx_v_ret->compression); + __pyx_v_ret->compression = __pyx_t_11; + __pyx_t_11 = 0; + + /* "aiohttp/_http_parser.pyx":157 + * if "should_close" in dct: + * ret.should_close = dct["should_close"] + * if "compression" in dct: # <<<<<<<<<<<<<< + * ret.compression = dct["compression"] + * if "upgrade" in dct: + */ + } + + /* "aiohttp/_http_parser.pyx":159 + * if "compression" in dct: + * ret.compression = dct["compression"] + * if "upgrade" in dct: # <<<<<<<<<<<<<< + * ret.upgrade = dct["upgrade"] + * if "chunked" in dct: + */ + __pyx_t_8 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_upgrade, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 159, __pyx_L1_error) + __pyx_t_9 = (__pyx_t_8 != 0); + if (__pyx_t_9) { + + /* "aiohttp/_http_parser.pyx":160 + * ret.compression = dct["compression"] + * if "upgrade" in dct: + * ret.upgrade = dct["upgrade"] # <<<<<<<<<<<<<< + * if "chunked" in dct: + * ret.chunked = dct["chunked"] + */ + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_upgrade); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 160, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_GIVEREF(__pyx_t_11); + __Pyx_GOTREF(__pyx_v_ret->upgrade); + __Pyx_DECREF(__pyx_v_ret->upgrade); + __pyx_v_ret->upgrade = __pyx_t_11; + __pyx_t_11 = 0; + + /* "aiohttp/_http_parser.pyx":159 + * if "compression" in dct: + * ret.compression = dct["compression"] + * if "upgrade" in dct: # <<<<<<<<<<<<<< + * ret.upgrade = dct["upgrade"] + * if "chunked" in dct: + */ + } + + /* "aiohttp/_http_parser.pyx":161 + * if "upgrade" in dct: + * ret.upgrade = dct["upgrade"] + * if "chunked" in dct: # <<<<<<<<<<<<<< + * ret.chunked = dct["chunked"] + * if "url" in dct: + */ + __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_chunked, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 161, __pyx_L1_error) + __pyx_t_8 = (__pyx_t_9 != 0); + if (__pyx_t_8) { + + /* "aiohttp/_http_parser.pyx":162 + * ret.upgrade = dct["upgrade"] + * if "chunked" in dct: + * ret.chunked = dct["chunked"] # <<<<<<<<<<<<<< + * if "url" in dct: + * ret.url = dct["url"] + */ + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_chunked); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 162, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_GIVEREF(__pyx_t_11); + __Pyx_GOTREF(__pyx_v_ret->chunked); + __Pyx_DECREF(__pyx_v_ret->chunked); + __pyx_v_ret->chunked = __pyx_t_11; + __pyx_t_11 = 0; + + /* "aiohttp/_http_parser.pyx":161 + * if "upgrade" in dct: + * ret.upgrade = dct["upgrade"] + * if "chunked" in dct: # <<<<<<<<<<<<<< + * ret.chunked = dct["chunked"] + * if "url" in dct: + */ + } + + /* "aiohttp/_http_parser.pyx":163 + * if "chunked" in dct: + * ret.chunked = dct["chunked"] + * if "url" in dct: # <<<<<<<<<<<<<< + * ret.url = dct["url"] + * return ret + */ + __pyx_t_8 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_url, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 163, __pyx_L1_error) + __pyx_t_9 = (__pyx_t_8 != 0); + if (__pyx_t_9) { + + /* "aiohttp/_http_parser.pyx":164 + * ret.chunked = dct["chunked"] + * if "url" in dct: + * ret.url = dct["url"] # <<<<<<<<<<<<<< + * return ret + * + */ + __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_url); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 164, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_GIVEREF(__pyx_t_11); + __Pyx_GOTREF(__pyx_v_ret->url); + __Pyx_DECREF(__pyx_v_ret->url); + __pyx_v_ret->url = __pyx_t_11; + __pyx_t_11 = 0; + + /* "aiohttp/_http_parser.pyx":163 + * if "chunked" in dct: + * ret.chunked = dct["chunked"] + * if "url" in dct: # <<<<<<<<<<<<<< + * ret.url = dct["url"] + * return ret + */ + } + + /* "aiohttp/_http_parser.pyx":165 + * if "url" in dct: + * ret.url = dct["url"] + * return ret # <<<<<<<<<<<<<< + * + * cdef _new_request_message(str method, + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_ret)); + __pyx_r = ((PyObject *)__pyx_v_ret); + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":133 + * return '' + * + * def _replace(self, **dct): # <<<<<<<<<<<<<< + * cdef RawRequestMessage ret + * ret = _new_request_message(self.method, + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_AddTraceback("aiohttp._http_parser.RawRequestMessage._replace", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_ret); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":94 + * @cython.freelist(DEFAULT_FREELIST_SIZE) + * cdef class RawRequestMessage: + * cdef readonly str method # <<<<<<<<<<<<<< + * cdef readonly str path + * cdef readonly object version # HttpVersion + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_6method_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_6method_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_6method___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_6method___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->method); + __pyx_r = __pyx_v_self->method; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":95 + * cdef class RawRequestMessage: + * cdef readonly str method + * cdef readonly str path # <<<<<<<<<<<<<< + * cdef readonly object version # HttpVersion + * cdef readonly object headers # CIMultiDict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_4path_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_4path_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4path___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4path___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->path); + __pyx_r = __pyx_v_self->path; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":96 + * cdef readonly str method + * cdef readonly str path + * cdef readonly object version # HttpVersion # <<<<<<<<<<<<<< + * cdef readonly object headers # CIMultiDict + * cdef readonly object raw_headers # tuple + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7version_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7version_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7version___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7version___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->version); + __pyx_r = __pyx_v_self->version; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":97 + * cdef readonly str path + * cdef readonly object version # HttpVersion + * cdef readonly object headers # CIMultiDict # <<<<<<<<<<<<<< + * cdef readonly object raw_headers # tuple + * cdef readonly object should_close + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7headers_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7headers_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7headers___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7headers___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->headers); + __pyx_r = __pyx_v_self->headers; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":98 + * cdef readonly object version # HttpVersion + * cdef readonly object headers # CIMultiDict + * cdef readonly object raw_headers # tuple # <<<<<<<<<<<<<< + * cdef readonly object should_close + * cdef readonly object compression + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_11raw_headers_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_11raw_headers_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_11raw_headers___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_11raw_headers___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->raw_headers); + __pyx_r = __pyx_v_self->raw_headers; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":99 + * cdef readonly object headers # CIMultiDict + * cdef readonly object raw_headers # tuple + * cdef readonly object should_close # <<<<<<<<<<<<<< + * cdef readonly object compression + * cdef readonly object upgrade + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_12should_close_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_12should_close_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_12should_close___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_12should_close___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->should_close); + __pyx_r = __pyx_v_self->should_close; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":100 + * cdef readonly object raw_headers # tuple + * cdef readonly object should_close + * cdef readonly object compression # <<<<<<<<<<<<<< + * cdef readonly object upgrade + * cdef readonly object chunked + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_11compression_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_11compression_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_11compression___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_11compression___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->compression); + __pyx_r = __pyx_v_self->compression; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":101 + * cdef readonly object should_close + * cdef readonly object compression + * cdef readonly object upgrade # <<<<<<<<<<<<<< + * cdef readonly object chunked + * cdef readonly object url # yarl.URL + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7upgrade_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7upgrade_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7upgrade___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7upgrade___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->upgrade); + __pyx_r = __pyx_v_self->upgrade; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":102 + * cdef readonly object compression + * cdef readonly object upgrade + * cdef readonly object chunked # <<<<<<<<<<<<<< + * cdef readonly object url # yarl.URL + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7chunked_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7chunked_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7chunked___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7chunked___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->chunked); + __pyx_r = __pyx_v_self->chunked; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":103 + * cdef readonly object upgrade + * cdef readonly object chunked + * cdef readonly object url # yarl.URL # <<<<<<<<<<<<<< + * + * def __init__(self, method, path, version, headers, raw_headers, + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_3url_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_3url_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_3url___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_3url___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->url); + __pyx_r = __pyx_v_self->url; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_6__reduce_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_6__reduce_cython__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self.chunked, self.compression, self.headers, self.method, self.path, self.raw_headers, self.should_close, self.upgrade, self.url, self.version) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = PyTuple_New(10); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_self->chunked); + __Pyx_GIVEREF(__pyx_v_self->chunked); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->chunked); + __Pyx_INCREF(__pyx_v_self->compression); + __Pyx_GIVEREF(__pyx_v_self->compression); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->compression); + __Pyx_INCREF(__pyx_v_self->headers); + __Pyx_GIVEREF(__pyx_v_self->headers); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_self->headers); + __Pyx_INCREF(__pyx_v_self->method); + __Pyx_GIVEREF(__pyx_v_self->method); + PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_v_self->method); + __Pyx_INCREF(__pyx_v_self->path); + __Pyx_GIVEREF(__pyx_v_self->path); + PyTuple_SET_ITEM(__pyx_t_1, 4, __pyx_v_self->path); + __Pyx_INCREF(__pyx_v_self->raw_headers); + __Pyx_GIVEREF(__pyx_v_self->raw_headers); + PyTuple_SET_ITEM(__pyx_t_1, 5, __pyx_v_self->raw_headers); + __Pyx_INCREF(__pyx_v_self->should_close); + __Pyx_GIVEREF(__pyx_v_self->should_close); + PyTuple_SET_ITEM(__pyx_t_1, 6, __pyx_v_self->should_close); + __Pyx_INCREF(__pyx_v_self->upgrade); + __Pyx_GIVEREF(__pyx_v_self->upgrade); + PyTuple_SET_ITEM(__pyx_t_1, 7, __pyx_v_self->upgrade); + __Pyx_INCREF(__pyx_v_self->url); + __Pyx_GIVEREF(__pyx_v_self->url); + PyTuple_SET_ITEM(__pyx_t_1, 8, __pyx_v_self->url); + __Pyx_INCREF(__pyx_v_self->version); + __Pyx_GIVEREF(__pyx_v_self->version); + PyTuple_SET_ITEM(__pyx_t_1, 9, __pyx_v_self->version); + __pyx_v_state = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self.chunked, self.compression, self.headers, self.method, self.path, self.raw_headers, self.should_close, self.upgrade, self.url, self.version) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v__dict = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":7 + * state = (self.chunked, self.compression, self.headers, self.method, self.path, self.raw_headers, self.should_close, self.upgrade, self.url, self.version) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_2 = (__pyx_v__dict != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict); + __pyx_t_4 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.method is not None or self.path is not None or self.raw_headers is not None or self.should_close is not None or self.upgrade is not None or self.url is not None or self.version is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self.chunked, self.compression, self.headers, self.method, self.path, self.raw_headers, self.should_close, self.upgrade, self.url, self.version) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.method is not None or self.path is not None or self.raw_headers is not None or self.should_close is not None or self.upgrade is not None or self.url is not None or self.version is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, None), state + */ + /*else*/ { + __pyx_t_2 = (__pyx_v_self->chunked != Py_None); + __pyx_t_5 = (__pyx_t_2 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_3 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_self->compression != Py_None); + __pyx_t_2 = (__pyx_t_5 != 0); + if (!__pyx_t_2) { + } else { + __pyx_t_3 = __pyx_t_2; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_2 = (__pyx_v_self->headers != Py_None); + __pyx_t_5 = (__pyx_t_2 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_3 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_self->method != ((PyObject*)Py_None)); + __pyx_t_2 = (__pyx_t_5 != 0); + if (!__pyx_t_2) { + } else { + __pyx_t_3 = __pyx_t_2; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_2 = (__pyx_v_self->path != ((PyObject*)Py_None)); + __pyx_t_5 = (__pyx_t_2 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_3 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_self->raw_headers != Py_None); + __pyx_t_2 = (__pyx_t_5 != 0); + if (!__pyx_t_2) { + } else { + __pyx_t_3 = __pyx_t_2; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_2 = (__pyx_v_self->should_close != Py_None); + __pyx_t_5 = (__pyx_t_2 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_3 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_self->upgrade != Py_None); + __pyx_t_2 = (__pyx_t_5 != 0); + if (!__pyx_t_2) { + } else { + __pyx_t_3 = __pyx_t_2; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_2 = (__pyx_v_self->url != Py_None); + __pyx_t_5 = (__pyx_t_2 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_3 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_self->version != Py_None); + __pyx_t_2 = (__pyx_t_5 != 0); + __pyx_t_3 = __pyx_t_2; + __pyx_L4_bool_binop_done:; + __pyx_v_use_setstate = __pyx_t_3; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.method is not None or self.path is not None or self.raw_headers is not None or self.should_close is not None or self.upgrade is not None or self.url is not None or self.version is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, None), state + * else: + */ + __pyx_t_3 = (__pyx_v_use_setstate != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":13 + * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.method is not None or self.path is not None or self.raw_headers is not None or self.should_close is not None or self.upgrade is not None or self.url is not None or self.version is not None + * if use_setstate: + * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_RawRequestMessage); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_21004882); + __Pyx_GIVEREF(__pyx_int_21004882); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_21004882); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None); + __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_1); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_v_state); + __pyx_t_4 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.method is not None or self.path is not None or self.raw_headers is not None or self.should_close is not None or self.upgrade is not None or self.url is not None or self.version is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, None), state + * else: + * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_RawRequestMessage__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_pyx_unpickle_RawRequestMessage); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_21004882); + __Pyx_GIVEREF(__pyx_int_21004882); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_21004882); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state); + __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); + __pyx_t_6 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("aiohttp._http_parser.RawRequestMessage.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_RawRequestMessage__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_8__setstate_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_8__setstate_cython__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":17 + * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_RawRequestMessage__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawRequestMessage__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_RawRequestMessage__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser.RawRequestMessage.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":167 + * return ret + * + * cdef _new_request_message(str method, # <<<<<<<<<<<<<< + * str path, + * object version, + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject *__pyx_v_method, PyObject *__pyx_v_path, PyObject *__pyx_v_version, PyObject *__pyx_v_headers, PyObject *__pyx_v_raw_headers, int __pyx_v_should_close, PyObject *__pyx_v_compression, int __pyx_v_upgrade, int __pyx_v_chunked, PyObject *__pyx_v_url) { + struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_ret = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("_new_request_message", 0); + + /* "aiohttp/_http_parser.pyx":178 + * object url): + * cdef RawRequestMessage ret + * ret = RawRequestMessage.__new__(RawRequestMessage) # <<<<<<<<<<<<<< + * ret.method = method + * ret.path = path + */ + __pyx_t_1 = ((PyObject *)__pyx_tp_new_7aiohttp_12_http_parser_RawRequestMessage(((PyTypeObject *)__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage), __pyx_empty_tuple, NULL)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 178, __pyx_L1_error) + __Pyx_GOTREF(((PyObject *)__pyx_t_1)); + __pyx_v_ret = ((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":179 + * cdef RawRequestMessage ret + * ret = RawRequestMessage.__new__(RawRequestMessage) + * ret.method = method # <<<<<<<<<<<<<< + * ret.path = path + * ret.version = version + */ + __Pyx_INCREF(__pyx_v_method); + __Pyx_GIVEREF(__pyx_v_method); + __Pyx_GOTREF(__pyx_v_ret->method); + __Pyx_DECREF(__pyx_v_ret->method); + __pyx_v_ret->method = __pyx_v_method; + + /* "aiohttp/_http_parser.pyx":180 + * ret = RawRequestMessage.__new__(RawRequestMessage) + * ret.method = method + * ret.path = path # <<<<<<<<<<<<<< + * ret.version = version + * ret.headers = headers + */ + __Pyx_INCREF(__pyx_v_path); + __Pyx_GIVEREF(__pyx_v_path); + __Pyx_GOTREF(__pyx_v_ret->path); + __Pyx_DECREF(__pyx_v_ret->path); + __pyx_v_ret->path = __pyx_v_path; + + /* "aiohttp/_http_parser.pyx":181 + * ret.method = method + * ret.path = path + * ret.version = version # <<<<<<<<<<<<<< + * ret.headers = headers + * ret.raw_headers = raw_headers + */ + __Pyx_INCREF(__pyx_v_version); + __Pyx_GIVEREF(__pyx_v_version); + __Pyx_GOTREF(__pyx_v_ret->version); + __Pyx_DECREF(__pyx_v_ret->version); + __pyx_v_ret->version = __pyx_v_version; + + /* "aiohttp/_http_parser.pyx":182 + * ret.path = path + * ret.version = version + * ret.headers = headers # <<<<<<<<<<<<<< + * ret.raw_headers = raw_headers + * ret.should_close = should_close + */ + __Pyx_INCREF(__pyx_v_headers); + __Pyx_GIVEREF(__pyx_v_headers); + __Pyx_GOTREF(__pyx_v_ret->headers); + __Pyx_DECREF(__pyx_v_ret->headers); + __pyx_v_ret->headers = __pyx_v_headers; + + /* "aiohttp/_http_parser.pyx":183 + * ret.version = version + * ret.headers = headers + * ret.raw_headers = raw_headers # <<<<<<<<<<<<<< + * ret.should_close = should_close + * ret.compression = compression + */ + __Pyx_INCREF(__pyx_v_raw_headers); + __Pyx_GIVEREF(__pyx_v_raw_headers); + __Pyx_GOTREF(__pyx_v_ret->raw_headers); + __Pyx_DECREF(__pyx_v_ret->raw_headers); + __pyx_v_ret->raw_headers = __pyx_v_raw_headers; + + /* "aiohttp/_http_parser.pyx":184 + * ret.headers = headers + * ret.raw_headers = raw_headers + * ret.should_close = should_close # <<<<<<<<<<<<<< + * ret.compression = compression + * ret.upgrade = upgrade + */ + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_should_close); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 184, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_ret->should_close); + __Pyx_DECREF(__pyx_v_ret->should_close); + __pyx_v_ret->should_close = __pyx_t_1; + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":185 + * ret.raw_headers = raw_headers + * ret.should_close = should_close + * ret.compression = compression # <<<<<<<<<<<<<< + * ret.upgrade = upgrade + * ret.chunked = chunked + */ + __Pyx_INCREF(__pyx_v_compression); + __Pyx_GIVEREF(__pyx_v_compression); + __Pyx_GOTREF(__pyx_v_ret->compression); + __Pyx_DECREF(__pyx_v_ret->compression); + __pyx_v_ret->compression = __pyx_v_compression; + + /* "aiohttp/_http_parser.pyx":186 + * ret.should_close = should_close + * ret.compression = compression + * ret.upgrade = upgrade # <<<<<<<<<<<<<< + * ret.chunked = chunked + * ret.url = url + */ + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_upgrade); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 186, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_ret->upgrade); + __Pyx_DECREF(__pyx_v_ret->upgrade); + __pyx_v_ret->upgrade = __pyx_t_1; + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":187 + * ret.compression = compression + * ret.upgrade = upgrade + * ret.chunked = chunked # <<<<<<<<<<<<<< + * ret.url = url + * return ret + */ + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_chunked); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_ret->chunked); + __Pyx_DECREF(__pyx_v_ret->chunked); + __pyx_v_ret->chunked = __pyx_t_1; + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":188 + * ret.upgrade = upgrade + * ret.chunked = chunked + * ret.url = url # <<<<<<<<<<<<<< + * return ret + * + */ + __Pyx_INCREF(__pyx_v_url); + __Pyx_GIVEREF(__pyx_v_url); + __Pyx_GOTREF(__pyx_v_ret->url); + __Pyx_DECREF(__pyx_v_ret->url); + __pyx_v_ret->url = __pyx_v_url; + + /* "aiohttp/_http_parser.pyx":189 + * ret.chunked = chunked + * ret.url = url + * return ret # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_ret)); + __pyx_r = ((PyObject *)__pyx_v_ret); + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":167 + * return ret + * + * cdef _new_request_message(str method, # <<<<<<<<<<<<<< + * str path, + * object version, + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser._new_request_message", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_ret); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":204 + * cdef readonly object chunked + * + * def __init__(self, version, code, reason, headers, raw_headers, # <<<<<<<<<<<<<< + * should_close, compression, upgrade, chunked): + * self.version = version + */ + +/* Python wrapper */ +static int __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_version = 0; + PyObject *__pyx_v_code = 0; + PyObject *__pyx_v_reason = 0; + PyObject *__pyx_v_headers = 0; + PyObject *__pyx_v_raw_headers = 0; + PyObject *__pyx_v_should_close = 0; + PyObject *__pyx_v_compression = 0; + PyObject *__pyx_v_upgrade = 0; + PyObject *__pyx_v_chunked = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_version,&__pyx_n_s_code,&__pyx_n_s_reason,&__pyx_n_s_headers,&__pyx_n_s_raw_headers,&__pyx_n_s_should_close,&__pyx_n_s_compression,&__pyx_n_s_upgrade,&__pyx_n_s_chunked,0}; + PyObject* values[9] = {0,0,0,0,0,0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + CYTHON_FALLTHROUGH; + case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + CYTHON_FALLTHROUGH; + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_version)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_code)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 1); __PYX_ERR(0, 204, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_reason)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 2); __PYX_ERR(0, 204, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_headers)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 3); __PYX_ERR(0, 204, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 4: + if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_raw_headers)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 4); __PYX_ERR(0, 204, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 5: + if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_should_close)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 5); __PYX_ERR(0, 204, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 6: + if (likely((values[6] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_compression)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 6); __PYX_ERR(0, 204, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 7: + if (likely((values[7] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_upgrade)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 7); __PYX_ERR(0, 204, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 8: + if (likely((values[8] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_chunked)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 8); __PYX_ERR(0, 204, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 204, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 9) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + } + __pyx_v_version = values[0]; + __pyx_v_code = values[1]; + __pyx_v_reason = values[2]; + __pyx_v_headers = values[3]; + __pyx_v_raw_headers = values[4]; + __pyx_v_should_close = values[5]; + __pyx_v_compression = values[6]; + __pyx_v_upgrade = values[7]; + __pyx_v_chunked = values[8]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 204, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._http_parser.RawResponseMessage.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage___init__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self), __pyx_v_version, __pyx_v_code, __pyx_v_reason, __pyx_v_headers, __pyx_v_raw_headers, __pyx_v_should_close, __pyx_v_compression, __pyx_v_upgrade, __pyx_v_chunked); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage___init__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self, PyObject *__pyx_v_version, PyObject *__pyx_v_code, PyObject *__pyx_v_reason, PyObject *__pyx_v_headers, PyObject *__pyx_v_raw_headers, PyObject *__pyx_v_should_close, PyObject *__pyx_v_compression, PyObject *__pyx_v_upgrade, PyObject *__pyx_v_chunked) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "aiohttp/_http_parser.pyx":206 + * def __init__(self, version, code, reason, headers, raw_headers, + * should_close, compression, upgrade, chunked): + * self.version = version # <<<<<<<<<<<<<< + * self.code = code + * self.reason = reason + */ + __Pyx_INCREF(__pyx_v_version); + __Pyx_GIVEREF(__pyx_v_version); + __Pyx_GOTREF(__pyx_v_self->version); + __Pyx_DECREF(__pyx_v_self->version); + __pyx_v_self->version = __pyx_v_version; + + /* "aiohttp/_http_parser.pyx":207 + * should_close, compression, upgrade, chunked): + * self.version = version + * self.code = code # <<<<<<<<<<<<<< + * self.reason = reason + * self.headers = headers + */ + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_code); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 207, __pyx_L1_error) + __pyx_v_self->code = __pyx_t_1; + + /* "aiohttp/_http_parser.pyx":208 + * self.version = version + * self.code = code + * self.reason = reason # <<<<<<<<<<<<<< + * self.headers = headers + * self.raw_headers = raw_headers + */ + if (!(likely(PyUnicode_CheckExact(__pyx_v_reason))||((__pyx_v_reason) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_v_reason)->tp_name), 0))) __PYX_ERR(0, 208, __pyx_L1_error) + __pyx_t_2 = __pyx_v_reason; + __Pyx_INCREF(__pyx_t_2); + __Pyx_GIVEREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_v_self->reason); + __Pyx_DECREF(__pyx_v_self->reason); + __pyx_v_self->reason = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":209 + * self.code = code + * self.reason = reason + * self.headers = headers # <<<<<<<<<<<<<< + * self.raw_headers = raw_headers + * self.should_close = should_close + */ + __Pyx_INCREF(__pyx_v_headers); + __Pyx_GIVEREF(__pyx_v_headers); + __Pyx_GOTREF(__pyx_v_self->headers); + __Pyx_DECREF(__pyx_v_self->headers); + __pyx_v_self->headers = __pyx_v_headers; + + /* "aiohttp/_http_parser.pyx":210 + * self.reason = reason + * self.headers = headers + * self.raw_headers = raw_headers # <<<<<<<<<<<<<< + * self.should_close = should_close + * self.compression = compression + */ + __Pyx_INCREF(__pyx_v_raw_headers); + __Pyx_GIVEREF(__pyx_v_raw_headers); + __Pyx_GOTREF(__pyx_v_self->raw_headers); + __Pyx_DECREF(__pyx_v_self->raw_headers); + __pyx_v_self->raw_headers = __pyx_v_raw_headers; + + /* "aiohttp/_http_parser.pyx":211 + * self.headers = headers + * self.raw_headers = raw_headers + * self.should_close = should_close # <<<<<<<<<<<<<< + * self.compression = compression + * self.upgrade = upgrade + */ + __Pyx_INCREF(__pyx_v_should_close); + __Pyx_GIVEREF(__pyx_v_should_close); + __Pyx_GOTREF(__pyx_v_self->should_close); + __Pyx_DECREF(__pyx_v_self->should_close); + __pyx_v_self->should_close = __pyx_v_should_close; + + /* "aiohttp/_http_parser.pyx":212 + * self.raw_headers = raw_headers + * self.should_close = should_close + * self.compression = compression # <<<<<<<<<<<<<< + * self.upgrade = upgrade + * self.chunked = chunked + */ + __Pyx_INCREF(__pyx_v_compression); + __Pyx_GIVEREF(__pyx_v_compression); + __Pyx_GOTREF(__pyx_v_self->compression); + __Pyx_DECREF(__pyx_v_self->compression); + __pyx_v_self->compression = __pyx_v_compression; + + /* "aiohttp/_http_parser.pyx":213 + * self.should_close = should_close + * self.compression = compression + * self.upgrade = upgrade # <<<<<<<<<<<<<< + * self.chunked = chunked + * + */ + __Pyx_INCREF(__pyx_v_upgrade); + __Pyx_GIVEREF(__pyx_v_upgrade); + __Pyx_GOTREF(__pyx_v_self->upgrade); + __Pyx_DECREF(__pyx_v_self->upgrade); + __pyx_v_self->upgrade = __pyx_v_upgrade; + + /* "aiohttp/_http_parser.pyx":214 + * self.compression = compression + * self.upgrade = upgrade + * self.chunked = chunked # <<<<<<<<<<<<<< + * + * def __repr__(self): + */ + __Pyx_INCREF(__pyx_v_chunked); + __Pyx_GIVEREF(__pyx_v_chunked); + __Pyx_GOTREF(__pyx_v_self->chunked); + __Pyx_DECREF(__pyx_v_self->chunked); + __pyx_v_self->chunked = __pyx_v_chunked; + + /* "aiohttp/_http_parser.pyx":204 + * cdef readonly object chunked + * + * def __init__(self, version, code, reason, headers, raw_headers, # <<<<<<<<<<<<<< + * should_close, compression, upgrade, chunked): + * self.version = version + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("aiohttp._http_parser.RawResponseMessage.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":216 + * self.chunked = chunked + * + * def __repr__(self): # <<<<<<<<<<<<<< + * info = [] + * info.append(("version", self.version)) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_3__repr__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_3__repr__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_2__repr__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} +static PyObject *__pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr___2generator1(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */ + +/* "aiohttp/_http_parser.pyx":227 + * info.append(("upgrade", self.upgrade)) + * info.append(("chunked", self.chunked)) + * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< + * return '' + * + */ + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_8__repr___genexpr(PyObject *__pyx_self) { + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *__pyx_cur_scope; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("genexpr", 0); + __pyx_cur_scope = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *)__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr(__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr, __pyx_empty_tuple, NULL); + if (unlikely(!__pyx_cur_scope)) { + __pyx_cur_scope = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *)Py_None); + __Pyx_INCREF(Py_None); + __PYX_ERR(0, 227, __pyx_L1_error) + } else { + __Pyx_GOTREF(__pyx_cur_scope); + } + __pyx_cur_scope->__pyx_outer_scope = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *) __pyx_self; + __Pyx_INCREF(((PyObject *)__pyx_cur_scope->__pyx_outer_scope)); + __Pyx_GIVEREF(__pyx_cur_scope->__pyx_outer_scope); + { + __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr___2generator1, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_genexpr, __pyx_n_s_repr___locals_genexpr, __pyx_n_s_aiohttp__http_parser); if (unlikely(!gen)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_DECREF(__pyx_cur_scope); + __Pyx_RefNannyFinishContext(); + return (PyObject *) gen; + } + + /* function exit code */ + __pyx_L1_error:; + __Pyx_AddTraceback("aiohttp._http_parser.RawResponseMessage.__repr__.genexpr", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr___2generator1(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value) /* generator body */ +{ + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *__pyx_cur_scope = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *)__pyx_generator->closure); + PyObject *__pyx_r = NULL; + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *(*__pyx_t_7)(PyObject *); + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("genexpr", 0); + switch (__pyx_generator->resume_label) { + case 0: goto __pyx_L3_first_run; + default: /* CPython raises the right error here */ + __Pyx_RefNannyFinishContext(); + return NULL; + } + __pyx_L3_first_run:; + if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 227, __pyx_L1_error) + __pyx_r = PyList_New(0); if (unlikely(!__pyx_r)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_r); + if (unlikely(!__pyx_cur_scope->__pyx_outer_scope->__pyx_v_info)) { __Pyx_RaiseClosureNameError("info"); __PYX_ERR(0, 227, __pyx_L1_error) } + if (unlikely(__pyx_cur_scope->__pyx_outer_scope->__pyx_v_info == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 227, __pyx_L1_error) + } + __pyx_t_1 = __pyx_cur_scope->__pyx_outer_scope->__pyx_v_info; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 227, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + if ((likely(PyTuple_CheckExact(__pyx_t_3))) || (PyList_CheckExact(__pyx_t_3))) { + PyObject* sequence = __pyx_t_3; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 227, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_5 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_4 = PyList_GET_ITEM(sequence, 0); + __pyx_t_5 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + #else + __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + #endif + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_6 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = Py_TYPE(__pyx_t_6)->tp_iternext; + index = 0; __pyx_t_4 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_4)) goto __pyx_L6_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + index = 1; __pyx_t_5 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_5)) goto __pyx_L6_unpacking_failed; + __Pyx_GOTREF(__pyx_t_5); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_7(__pyx_t_6), 2) < 0) __PYX_ERR(0, 227, __pyx_L1_error) + __pyx_t_7 = NULL; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L7_unpacking_done; + __pyx_L6_unpacking_failed:; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_7 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 227, __pyx_L1_error) + __pyx_L7_unpacking_done:; + } + __Pyx_XGOTREF(__pyx_cur_scope->__pyx_v_name); + __Pyx_XDECREF_SET(__pyx_cur_scope->__pyx_v_name, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + __pyx_t_4 = 0; + __Pyx_XGOTREF(__pyx_cur_scope->__pyx_v_val); + __Pyx_XDECREF_SET(__pyx_cur_scope->__pyx_v_val, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_5); + __pyx_t_5 = 0; + __pyx_t_3 = PyNumber_Add(__pyx_cur_scope->__pyx_v_name, __pyx_kp_u_); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_Repr(__pyx_cur_scope->__pyx_v_val); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_4 = PyNumber_Add(__pyx_t_3, __pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(__Pyx_ListComp_Append(__pyx_r, (PyObject*)__pyx_t_4))) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + CYTHON_MAYBE_UNUSED_VAR(__pyx_cur_scope); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_r); __pyx_r = 0; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("genexpr", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + #if !CYTHON_USE_EXC_INFO_STACK + __Pyx_Coroutine_ResetAndClearException(__pyx_generator); + #endif + __pyx_generator->resume_label = -1; + __Pyx_Coroutine_clear((PyObject*)__pyx_generator); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":216 + * self.chunked = chunked + * + * def __repr__(self): # <<<<<<<<<<<<<< + * info = [] + * info.append(("version", self.version)) + */ + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_2__repr__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *__pyx_cur_scope; + PyObject *__pyx_v_sinfo = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("__repr__", 0); + __pyx_cur_scope = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *)__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__(__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__, __pyx_empty_tuple, NULL); + if (unlikely(!__pyx_cur_scope)) { + __pyx_cur_scope = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *)Py_None); + __Pyx_INCREF(Py_None); + __PYX_ERR(0, 216, __pyx_L1_error) + } else { + __Pyx_GOTREF(__pyx_cur_scope); + } + + /* "aiohttp/_http_parser.pyx":217 + * + * def __repr__(self): + * info = [] # <<<<<<<<<<<<<< + * info.append(("version", self.version)) + * info.append(("code", self.code)) + */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 217, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __pyx_cur_scope->__pyx_v_info = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":218 + * def __repr__(self): + * info = [] + * info.append(("version", self.version)) # <<<<<<<<<<<<<< + * info.append(("code", self.code)) + * info.append(("reason", self.reason)) + */ + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 218, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_u_version); + __Pyx_GIVEREF(__pyx_n_u_version); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_version); + __Pyx_INCREF(__pyx_v_self->version); + __Pyx_GIVEREF(__pyx_v_self->version); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->version); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 218, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":219 + * info = [] + * info.append(("version", self.version)) + * info.append(("code", self.code)) # <<<<<<<<<<<<<< + * info.append(("reason", self.reason)) + * info.append(("headers", self.headers)) + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 219, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 219, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_u_code); + __Pyx_GIVEREF(__pyx_n_u_code); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_code); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 219, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":220 + * info.append(("version", self.version)) + * info.append(("code", self.code)) + * info.append(("reason", self.reason)) # <<<<<<<<<<<<<< + * info.append(("headers", self.headers)) + * info.append(("raw_headers", self.raw_headers)) + */ + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 220, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_u_reason); + __Pyx_GIVEREF(__pyx_n_u_reason); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_reason); + __Pyx_INCREF(__pyx_v_self->reason); + __Pyx_GIVEREF(__pyx_v_self->reason); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->reason); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 220, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":221 + * info.append(("code", self.code)) + * info.append(("reason", self.reason)) + * info.append(("headers", self.headers)) # <<<<<<<<<<<<<< + * info.append(("raw_headers", self.raw_headers)) + * info.append(("should_close", self.should_close)) + */ + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 221, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_u_headers); + __Pyx_GIVEREF(__pyx_n_u_headers); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_headers); + __Pyx_INCREF(__pyx_v_self->headers); + __Pyx_GIVEREF(__pyx_v_self->headers); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->headers); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 221, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":222 + * info.append(("reason", self.reason)) + * info.append(("headers", self.headers)) + * info.append(("raw_headers", self.raw_headers)) # <<<<<<<<<<<<<< + * info.append(("should_close", self.should_close)) + * info.append(("compression", self.compression)) + */ + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 222, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_u_raw_headers); + __Pyx_GIVEREF(__pyx_n_u_raw_headers); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_raw_headers); + __Pyx_INCREF(__pyx_v_self->raw_headers); + __Pyx_GIVEREF(__pyx_v_self->raw_headers); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->raw_headers); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 222, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":223 + * info.append(("headers", self.headers)) + * info.append(("raw_headers", self.raw_headers)) + * info.append(("should_close", self.should_close)) # <<<<<<<<<<<<<< + * info.append(("compression", self.compression)) + * info.append(("upgrade", self.upgrade)) + */ + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 223, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_u_should_close); + __Pyx_GIVEREF(__pyx_n_u_should_close); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_should_close); + __Pyx_INCREF(__pyx_v_self->should_close); + __Pyx_GIVEREF(__pyx_v_self->should_close); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->should_close); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 223, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":224 + * info.append(("raw_headers", self.raw_headers)) + * info.append(("should_close", self.should_close)) + * info.append(("compression", self.compression)) # <<<<<<<<<<<<<< + * info.append(("upgrade", self.upgrade)) + * info.append(("chunked", self.chunked)) + */ + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 224, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_u_compression); + __Pyx_GIVEREF(__pyx_n_u_compression); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_compression); + __Pyx_INCREF(__pyx_v_self->compression); + __Pyx_GIVEREF(__pyx_v_self->compression); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->compression); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 224, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":225 + * info.append(("should_close", self.should_close)) + * info.append(("compression", self.compression)) + * info.append(("upgrade", self.upgrade)) # <<<<<<<<<<<<<< + * info.append(("chunked", self.chunked)) + * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + */ + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 225, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_u_upgrade); + __Pyx_GIVEREF(__pyx_n_u_upgrade); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_upgrade); + __Pyx_INCREF(__pyx_v_self->upgrade); + __Pyx_GIVEREF(__pyx_v_self->upgrade); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->upgrade); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 225, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":226 + * info.append(("compression", self.compression)) + * info.append(("upgrade", self.upgrade)) + * info.append(("chunked", self.chunked)) # <<<<<<<<<<<<<< + * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + * return '' + */ + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 226, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_u_chunked); + __Pyx_GIVEREF(__pyx_n_u_chunked); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_chunked); + __Pyx_INCREF(__pyx_v_self->chunked); + __Pyx_GIVEREF(__pyx_v_self->chunked); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->chunked); + __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 226, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":227 + * info.append(("upgrade", self.upgrade)) + * info.append(("chunked", self.chunked)) + * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< + * return '' + * + */ + __pyx_t_3 = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_8__repr___genexpr(((PyObject*)__pyx_cur_scope)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_Generator_Next(__pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PyUnicode_Join(__pyx_kp_u__2, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_sinfo = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":228 + * info.append(("chunked", self.chunked)) + * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + * return '' # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = __Pyx_PyUnicode_ConcatSafe(__pyx_kp_u_RawResponseMessage, __pyx_v_sinfo); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 228, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_PyUnicode_Concat(__pyx_t_3, __pyx_kp_u__3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 228, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":216 + * self.chunked = chunked + * + * def __repr__(self): # <<<<<<<<<<<<<< + * info = [] + * info.append(("version", self.version)) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("aiohttp._http_parser.RawResponseMessage.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_sinfo); + __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":194 + * @cython.freelist(DEFAULT_FREELIST_SIZE) + * cdef class RawResponseMessage: + * cdef readonly object version # HttpVersion # <<<<<<<<<<<<<< + * cdef readonly int code + * cdef readonly str reason + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7version_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7version_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7version___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7version___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->version); + __pyx_r = __pyx_v_self->version; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":195 + * cdef class RawResponseMessage: + * cdef readonly object version # HttpVersion + * cdef readonly int code # <<<<<<<<<<<<<< + * cdef readonly str reason + * cdef readonly object headers # CIMultiDict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_4code_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_4code_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_4code___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_4code___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 195, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser.RawResponseMessage.code.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":196 + * cdef readonly object version # HttpVersion + * cdef readonly int code + * cdef readonly str reason # <<<<<<<<<<<<<< + * cdef readonly object headers # CIMultiDict + * cdef readonly object raw_headers # tuple + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_6reason_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_6reason_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_6reason___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_6reason___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->reason); + __pyx_r = __pyx_v_self->reason; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":197 + * cdef readonly int code + * cdef readonly str reason + * cdef readonly object headers # CIMultiDict # <<<<<<<<<<<<<< + * cdef readonly object raw_headers # tuple + * cdef readonly object should_close + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7headers_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7headers_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7headers___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7headers___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->headers); + __pyx_r = __pyx_v_self->headers; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":198 + * cdef readonly str reason + * cdef readonly object headers # CIMultiDict + * cdef readonly object raw_headers # tuple # <<<<<<<<<<<<<< + * cdef readonly object should_close + * cdef readonly object compression + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_11raw_headers_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_11raw_headers_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_11raw_headers___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_11raw_headers___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->raw_headers); + __pyx_r = __pyx_v_self->raw_headers; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":199 + * cdef readonly object headers # CIMultiDict + * cdef readonly object raw_headers # tuple + * cdef readonly object should_close # <<<<<<<<<<<<<< + * cdef readonly object compression + * cdef readonly object upgrade + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_12should_close_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_12should_close_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_12should_close___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_12should_close___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->should_close); + __pyx_r = __pyx_v_self->should_close; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":200 + * cdef readonly object raw_headers # tuple + * cdef readonly object should_close + * cdef readonly object compression # <<<<<<<<<<<<<< + * cdef readonly object upgrade + * cdef readonly object chunked + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_11compression_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_11compression_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_11compression___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_11compression___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->compression); + __pyx_r = __pyx_v_self->compression; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":201 + * cdef readonly object should_close + * cdef readonly object compression + * cdef readonly object upgrade # <<<<<<<<<<<<<< + * cdef readonly object chunked + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7upgrade_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7upgrade_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7upgrade___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7upgrade___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->upgrade); + __pyx_r = __pyx_v_self->upgrade; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":202 + * cdef readonly object compression + * cdef readonly object upgrade + * cdef readonly object chunked # <<<<<<<<<<<<<< + * + * def __init__(self, version, code, reason, headers, raw_headers, + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7chunked_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7chunked_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7chunked___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7chunked___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->chunked); + __pyx_r = __pyx_v_self->chunked; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_4__reduce_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_4__reduce_cython__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self.chunked, self.code, self.compression, self.headers, self.raw_headers, self.reason, self.should_close, self.upgrade, self.version) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->code); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyTuple_New(9); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v_self->chunked); + __Pyx_GIVEREF(__pyx_v_self->chunked); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_self->chunked); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_1); + __Pyx_INCREF(__pyx_v_self->compression); + __Pyx_GIVEREF(__pyx_v_self->compression); + PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_v_self->compression); + __Pyx_INCREF(__pyx_v_self->headers); + __Pyx_GIVEREF(__pyx_v_self->headers); + PyTuple_SET_ITEM(__pyx_t_2, 3, __pyx_v_self->headers); + __Pyx_INCREF(__pyx_v_self->raw_headers); + __Pyx_GIVEREF(__pyx_v_self->raw_headers); + PyTuple_SET_ITEM(__pyx_t_2, 4, __pyx_v_self->raw_headers); + __Pyx_INCREF(__pyx_v_self->reason); + __Pyx_GIVEREF(__pyx_v_self->reason); + PyTuple_SET_ITEM(__pyx_t_2, 5, __pyx_v_self->reason); + __Pyx_INCREF(__pyx_v_self->should_close); + __Pyx_GIVEREF(__pyx_v_self->should_close); + PyTuple_SET_ITEM(__pyx_t_2, 6, __pyx_v_self->should_close); + __Pyx_INCREF(__pyx_v_self->upgrade); + __Pyx_GIVEREF(__pyx_v_self->upgrade); + PyTuple_SET_ITEM(__pyx_t_2, 7, __pyx_v_self->upgrade); + __Pyx_INCREF(__pyx_v_self->version); + __Pyx_GIVEREF(__pyx_v_self->version); + PyTuple_SET_ITEM(__pyx_t_2, 8, __pyx_v_self->version); + __pyx_t_1 = 0; + __pyx_v_state = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self.chunked, self.code, self.compression, self.headers, self.raw_headers, self.reason, self.should_close, self.upgrade, self.version) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_2 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_v__dict = __pyx_t_2; + __pyx_t_2 = 0; + + /* "(tree fragment)":7 + * state = (self.chunked, self.code, self.compression, self.headers, self.raw_headers, self.reason, self.should_close, self.upgrade, self.version) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_3 = (__pyx_v__dict != Py_None); + __pyx_t_4 = (__pyx_t_3 != 0); + if (__pyx_t_4) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v__dict); + __pyx_t_1 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_1)); + __pyx_t_1 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.raw_headers is not None or self.reason is not None or self.should_close is not None or self.upgrade is not None or self.version is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self.chunked, self.code, self.compression, self.headers, self.raw_headers, self.reason, self.should_close, self.upgrade, self.version) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.raw_headers is not None or self.reason is not None or self.should_close is not None or self.upgrade is not None or self.version is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, None), state + */ + /*else*/ { + __pyx_t_3 = (__pyx_v_self->chunked != Py_None); + __pyx_t_5 = (__pyx_t_3 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_4 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_self->compression != Py_None); + __pyx_t_3 = (__pyx_t_5 != 0); + if (!__pyx_t_3) { + } else { + __pyx_t_4 = __pyx_t_3; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_3 = (__pyx_v_self->headers != Py_None); + __pyx_t_5 = (__pyx_t_3 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_4 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_self->raw_headers != Py_None); + __pyx_t_3 = (__pyx_t_5 != 0); + if (!__pyx_t_3) { + } else { + __pyx_t_4 = __pyx_t_3; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_3 = (__pyx_v_self->reason != ((PyObject*)Py_None)); + __pyx_t_5 = (__pyx_t_3 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_4 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_self->should_close != Py_None); + __pyx_t_3 = (__pyx_t_5 != 0); + if (!__pyx_t_3) { + } else { + __pyx_t_4 = __pyx_t_3; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_3 = (__pyx_v_self->upgrade != Py_None); + __pyx_t_5 = (__pyx_t_3 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_4 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_self->version != Py_None); + __pyx_t_3 = (__pyx_t_5 != 0); + __pyx_t_4 = __pyx_t_3; + __pyx_L4_bool_binop_done:; + __pyx_v_use_setstate = __pyx_t_4; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.raw_headers is not None or self.reason is not None or self.should_close is not None or self.upgrade is not None or self.version is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, None), state + * else: + */ + __pyx_t_4 = (__pyx_v_use_setstate != 0); + if (__pyx_t_4) { + + /* "(tree fragment)":13 + * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.raw_headers is not None or self.reason is not None or self.should_close is not None or self.upgrade is not None or self.version is not None + * if use_setstate: + * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_pyx_unpickle_RawResponseMessag); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_209127132); + __Pyx_GIVEREF(__pyx_int_209127132); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_209127132); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_2, 2, Py_None); + __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_2); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_v_state); + __pyx_t_1 = 0; + __pyx_t_2 = 0; + __pyx_r = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.raw_headers is not None or self.reason is not None or self.should_close is not None or self.upgrade is not None or self.version is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, None), state + * else: + * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_RawResponseMessage__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_pyx_unpickle_RawResponseMessag); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_209127132); + __Pyx_GIVEREF(__pyx_int_209127132); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_209127132); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_v_state); + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_2); + __pyx_t_6 = 0; + __pyx_t_2 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("aiohttp._http_parser.RawResponseMessage.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_RawResponseMessage__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_6__setstate_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_6__setstate_cython__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":17 + * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_RawResponseMessage__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawResponseMessage__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_RawResponseMessage__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser.RawResponseMessage.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":231 + * + * + * cdef _new_response_message(object version, # <<<<<<<<<<<<<< + * int code, + * str reason, + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject *__pyx_v_version, int __pyx_v_code, PyObject *__pyx_v_reason, PyObject *__pyx_v_headers, PyObject *__pyx_v_raw_headers, int __pyx_v_should_close, PyObject *__pyx_v_compression, int __pyx_v_upgrade, int __pyx_v_chunked) { + struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_ret = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("_new_response_message", 0); + + /* "aiohttp/_http_parser.pyx":241 + * bint chunked): + * cdef RawResponseMessage ret + * ret = RawResponseMessage.__new__(RawResponseMessage) # <<<<<<<<<<<<<< + * ret.version = version + * ret.code = code + */ + __pyx_t_1 = ((PyObject *)__pyx_tp_new_7aiohttp_12_http_parser_RawResponseMessage(((PyTypeObject *)__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage), __pyx_empty_tuple, NULL)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 241, __pyx_L1_error) + __Pyx_GOTREF(((PyObject *)__pyx_t_1)); + __pyx_v_ret = ((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":242 + * cdef RawResponseMessage ret + * ret = RawResponseMessage.__new__(RawResponseMessage) + * ret.version = version # <<<<<<<<<<<<<< + * ret.code = code + * ret.reason = reason + */ + __Pyx_INCREF(__pyx_v_version); + __Pyx_GIVEREF(__pyx_v_version); + __Pyx_GOTREF(__pyx_v_ret->version); + __Pyx_DECREF(__pyx_v_ret->version); + __pyx_v_ret->version = __pyx_v_version; + + /* "aiohttp/_http_parser.pyx":243 + * ret = RawResponseMessage.__new__(RawResponseMessage) + * ret.version = version + * ret.code = code # <<<<<<<<<<<<<< + * ret.reason = reason + * ret.headers = headers + */ + __pyx_v_ret->code = __pyx_v_code; + + /* "aiohttp/_http_parser.pyx":244 + * ret.version = version + * ret.code = code + * ret.reason = reason # <<<<<<<<<<<<<< + * ret.headers = headers + * ret.raw_headers = raw_headers + */ + __Pyx_INCREF(__pyx_v_reason); + __Pyx_GIVEREF(__pyx_v_reason); + __Pyx_GOTREF(__pyx_v_ret->reason); + __Pyx_DECREF(__pyx_v_ret->reason); + __pyx_v_ret->reason = __pyx_v_reason; + + /* "aiohttp/_http_parser.pyx":245 + * ret.code = code + * ret.reason = reason + * ret.headers = headers # <<<<<<<<<<<<<< + * ret.raw_headers = raw_headers + * ret.should_close = should_close + */ + __Pyx_INCREF(__pyx_v_headers); + __Pyx_GIVEREF(__pyx_v_headers); + __Pyx_GOTREF(__pyx_v_ret->headers); + __Pyx_DECREF(__pyx_v_ret->headers); + __pyx_v_ret->headers = __pyx_v_headers; + + /* "aiohttp/_http_parser.pyx":246 + * ret.reason = reason + * ret.headers = headers + * ret.raw_headers = raw_headers # <<<<<<<<<<<<<< + * ret.should_close = should_close + * ret.compression = compression + */ + __Pyx_INCREF(__pyx_v_raw_headers); + __Pyx_GIVEREF(__pyx_v_raw_headers); + __Pyx_GOTREF(__pyx_v_ret->raw_headers); + __Pyx_DECREF(__pyx_v_ret->raw_headers); + __pyx_v_ret->raw_headers = __pyx_v_raw_headers; + + /* "aiohttp/_http_parser.pyx":247 + * ret.headers = headers + * ret.raw_headers = raw_headers + * ret.should_close = should_close # <<<<<<<<<<<<<< + * ret.compression = compression + * ret.upgrade = upgrade + */ + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_should_close); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 247, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_ret->should_close); + __Pyx_DECREF(__pyx_v_ret->should_close); + __pyx_v_ret->should_close = __pyx_t_1; + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":248 + * ret.raw_headers = raw_headers + * ret.should_close = should_close + * ret.compression = compression # <<<<<<<<<<<<<< + * ret.upgrade = upgrade + * ret.chunked = chunked + */ + __Pyx_INCREF(__pyx_v_compression); + __Pyx_GIVEREF(__pyx_v_compression); + __Pyx_GOTREF(__pyx_v_ret->compression); + __Pyx_DECREF(__pyx_v_ret->compression); + __pyx_v_ret->compression = __pyx_v_compression; + + /* "aiohttp/_http_parser.pyx":249 + * ret.should_close = should_close + * ret.compression = compression + * ret.upgrade = upgrade # <<<<<<<<<<<<<< + * ret.chunked = chunked + * return ret + */ + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_upgrade); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 249, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_ret->upgrade); + __Pyx_DECREF(__pyx_v_ret->upgrade); + __pyx_v_ret->upgrade = __pyx_t_1; + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":250 + * ret.compression = compression + * ret.upgrade = upgrade + * ret.chunked = chunked # <<<<<<<<<<<<<< + * return ret + * + */ + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_chunked); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 250, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_ret->chunked); + __Pyx_DECREF(__pyx_v_ret->chunked); + __pyx_v_ret->chunked = __pyx_t_1; + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":251 + * ret.upgrade = upgrade + * ret.chunked = chunked + * return ret # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_ret)); + __pyx_r = ((PyObject *)__pyx_v_ret); + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":231 + * + * + * cdef _new_response_message(object version, # <<<<<<<<<<<<<< + * int code, + * str reason, + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser._new_response_message", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_ret); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":293 + * Py_buffer py_buf + * + * def __cinit__(self): # <<<<<<<<<<<<<< + * self._cparser = \ + * PyMem_Malloc(sizeof(cparser.http_parser)) + */ + +/* Python wrapper */ +static int __pyx_pw_7aiohttp_12_http_parser_10HttpParser_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_7aiohttp_12_http_parser_10HttpParser_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); + if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) { + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;} + if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__cinit__", 0))) return -1; + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser___cinit__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_7aiohttp_12_http_parser_10HttpParser___cinit__(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__cinit__", 0); + + /* "aiohttp/_http_parser.pyx":294 + * + * def __cinit__(self): + * self._cparser = \ # <<<<<<<<<<<<<< + * PyMem_Malloc(sizeof(cparser.http_parser)) + * if self._cparser is NULL: + */ + __pyx_v_self->_cparser = ((struct http_parser *)PyMem_Malloc((sizeof(struct http_parser)))); + + /* "aiohttp/_http_parser.pyx":296 + * self._cparser = \ + * PyMem_Malloc(sizeof(cparser.http_parser)) + * if self._cparser is NULL: # <<<<<<<<<<<<<< + * raise MemoryError() + * + */ + __pyx_t_1 = ((__pyx_v_self->_cparser == NULL) != 0); + if (unlikely(__pyx_t_1)) { + + /* "aiohttp/_http_parser.pyx":297 + * PyMem_Malloc(sizeof(cparser.http_parser)) + * if self._cparser is NULL: + * raise MemoryError() # <<<<<<<<<<<<<< + * + * self._csettings = \ + */ + PyErr_NoMemory(); __PYX_ERR(0, 297, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":296 + * self._cparser = \ + * PyMem_Malloc(sizeof(cparser.http_parser)) + * if self._cparser is NULL: # <<<<<<<<<<<<<< + * raise MemoryError() + * + */ + } + + /* "aiohttp/_http_parser.pyx":299 + * raise MemoryError() + * + * self._csettings = \ # <<<<<<<<<<<<<< + * PyMem_Malloc(sizeof(cparser.http_parser_settings)) + * if self._csettings is NULL: + */ + __pyx_v_self->_csettings = ((struct http_parser_settings *)PyMem_Malloc((sizeof(struct http_parser_settings)))); + + /* "aiohttp/_http_parser.pyx":301 + * self._csettings = \ + * PyMem_Malloc(sizeof(cparser.http_parser_settings)) + * if self._csettings is NULL: # <<<<<<<<<<<<<< + * raise MemoryError() + * + */ + __pyx_t_1 = ((__pyx_v_self->_csettings == NULL) != 0); + if (unlikely(__pyx_t_1)) { + + /* "aiohttp/_http_parser.pyx":302 + * PyMem_Malloc(sizeof(cparser.http_parser_settings)) + * if self._csettings is NULL: + * raise MemoryError() # <<<<<<<<<<<<<< + * + * def __dealloc__(self): + */ + PyErr_NoMemory(); __PYX_ERR(0, 302, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":301 + * self._csettings = \ + * PyMem_Malloc(sizeof(cparser.http_parser_settings)) + * if self._csettings is NULL: # <<<<<<<<<<<<<< + * raise MemoryError() + * + */ + } + + /* "aiohttp/_http_parser.pyx":293 + * Py_buffer py_buf + * + * def __cinit__(self): # <<<<<<<<<<<<<< + * self._cparser = \ + * PyMem_Malloc(sizeof(cparser.http_parser)) + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":304 + * raise MemoryError() + * + * def __dealloc__(self): # <<<<<<<<<<<<<< + * PyMem_Free(self._cparser) + * PyMem_Free(self._csettings) + */ + +/* Python wrapper */ +static void __pyx_pw_7aiohttp_12_http_parser_10HttpParser_3__dealloc__(PyObject *__pyx_v_self); /*proto*/ +static void __pyx_pw_7aiohttp_12_http_parser_10HttpParser_3__dealloc__(PyObject *__pyx_v_self) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0); + __pyx_pf_7aiohttp_12_http_parser_10HttpParser_2__dealloc__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +static void __pyx_pf_7aiohttp_12_http_parser_10HttpParser_2__dealloc__(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__dealloc__", 0); + + /* "aiohttp/_http_parser.pyx":305 + * + * def __dealloc__(self): + * PyMem_Free(self._cparser) # <<<<<<<<<<<<<< + * PyMem_Free(self._csettings) + * + */ + PyMem_Free(__pyx_v_self->_cparser); + + /* "aiohttp/_http_parser.pyx":306 + * def __dealloc__(self): + * PyMem_Free(self._cparser) + * PyMem_Free(self._csettings) # <<<<<<<<<<<<<< + * + * cdef _init(self, cparser.http_parser_type mode, + */ + PyMem_Free(__pyx_v_self->_csettings); + + /* "aiohttp/_http_parser.pyx":304 + * raise MemoryError() + * + * def __dealloc__(self): # <<<<<<<<<<<<<< + * PyMem_Free(self._cparser) + * PyMem_Free(self._csettings) + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +/* "aiohttp/_http_parser.pyx":308 + * PyMem_Free(self._csettings) + * + * cdef _init(self, cparser.http_parser_type mode, # <<<<<<<<<<<<<< + * object protocol, object loop, object timer=None, + * size_t max_line_size=8190, size_t max_headers=32768, + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, enum http_parser_type __pyx_v_mode, PyObject *__pyx_v_protocol, PyObject *__pyx_v_loop, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init *__pyx_optional_args) { + + /* "aiohttp/_http_parser.pyx":309 + * + * cdef _init(self, cparser.http_parser_type mode, + * object protocol, object loop, object timer=None, # <<<<<<<<<<<<<< + * size_t max_line_size=8190, size_t max_headers=32768, + * size_t max_field_size=8190, payload_exception=None, + */ + PyObject *__pyx_v_timer = ((PyObject *)Py_None); + size_t __pyx_v_max_line_size = ((size_t)0x1FFE); + size_t __pyx_v_max_headers = ((size_t)0x8000); + size_t __pyx_v_max_field_size = ((size_t)0x1FFE); + + /* "aiohttp/_http_parser.pyx":311 + * object protocol, object loop, object timer=None, + * size_t max_line_size=8190, size_t max_headers=32768, + * size_t max_field_size=8190, payload_exception=None, # <<<<<<<<<<<<<< + * bint response_with_body=True, bint auto_decompress=True): + * cparser.http_parser_init(self._cparser, mode) + */ + PyObject *__pyx_v_payload_exception = ((PyObject *)Py_None); + + /* "aiohttp/_http_parser.pyx":312 + * size_t max_line_size=8190, size_t max_headers=32768, + * size_t max_field_size=8190, payload_exception=None, + * bint response_with_body=True, bint auto_decompress=True): # <<<<<<<<<<<<<< + * cparser.http_parser_init(self._cparser, mode) + * self._cparser.data = self + */ + int __pyx_v_response_with_body = ((int)1); + int __pyx_v_auto_decompress = ((int)1); + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("_init", 0); + if (__pyx_optional_args) { + if (__pyx_optional_args->__pyx_n > 0) { + __pyx_v_timer = __pyx_optional_args->timer; + if (__pyx_optional_args->__pyx_n > 1) { + __pyx_v_max_line_size = __pyx_optional_args->max_line_size; + if (__pyx_optional_args->__pyx_n > 2) { + __pyx_v_max_headers = __pyx_optional_args->max_headers; + if (__pyx_optional_args->__pyx_n > 3) { + __pyx_v_max_field_size = __pyx_optional_args->max_field_size; + if (__pyx_optional_args->__pyx_n > 4) { + __pyx_v_payload_exception = __pyx_optional_args->payload_exception; + if (__pyx_optional_args->__pyx_n > 5) { + __pyx_v_response_with_body = __pyx_optional_args->response_with_body; + if (__pyx_optional_args->__pyx_n > 6) { + __pyx_v_auto_decompress = __pyx_optional_args->auto_decompress; + } + } + } + } + } + } + } + } + + /* "aiohttp/_http_parser.pyx":313 + * size_t max_field_size=8190, payload_exception=None, + * bint response_with_body=True, bint auto_decompress=True): + * cparser.http_parser_init(self._cparser, mode) # <<<<<<<<<<<<<< + * self._cparser.data = self + * self._cparser.content_length = 0 + */ + http_parser_init(__pyx_v_self->_cparser, __pyx_v_mode); + + /* "aiohttp/_http_parser.pyx":314 + * bint response_with_body=True, bint auto_decompress=True): + * cparser.http_parser_init(self._cparser, mode) + * self._cparser.data = self # <<<<<<<<<<<<<< + * self._cparser.content_length = 0 + * + */ + __pyx_v_self->_cparser->data = ((void *)__pyx_v_self); + + /* "aiohttp/_http_parser.pyx":315 + * cparser.http_parser_init(self._cparser, mode) + * self._cparser.data = self + * self._cparser.content_length = 0 # <<<<<<<<<<<<<< + * + * cparser.http_parser_settings_init(self._csettings) + */ + __pyx_v_self->_cparser->content_length = 0; + + /* "aiohttp/_http_parser.pyx":317 + * self._cparser.content_length = 0 + * + * cparser.http_parser_settings_init(self._csettings) # <<<<<<<<<<<<<< + * + * self._protocol = protocol + */ + http_parser_settings_init(__pyx_v_self->_csettings); + + /* "aiohttp/_http_parser.pyx":319 + * cparser.http_parser_settings_init(self._csettings) + * + * self._protocol = protocol # <<<<<<<<<<<<<< + * self._loop = loop + * self._timer = timer + */ + __Pyx_INCREF(__pyx_v_protocol); + __Pyx_GIVEREF(__pyx_v_protocol); + __Pyx_GOTREF(__pyx_v_self->_protocol); + __Pyx_DECREF(__pyx_v_self->_protocol); + __pyx_v_self->_protocol = __pyx_v_protocol; + + /* "aiohttp/_http_parser.pyx":320 + * + * self._protocol = protocol + * self._loop = loop # <<<<<<<<<<<<<< + * self._timer = timer + * + */ + __Pyx_INCREF(__pyx_v_loop); + __Pyx_GIVEREF(__pyx_v_loop); + __Pyx_GOTREF(__pyx_v_self->_loop); + __Pyx_DECREF(__pyx_v_self->_loop); + __pyx_v_self->_loop = __pyx_v_loop; + + /* "aiohttp/_http_parser.pyx":321 + * self._protocol = protocol + * self._loop = loop + * self._timer = timer # <<<<<<<<<<<<<< + * + * self._buf = bytearray() + */ + __Pyx_INCREF(__pyx_v_timer); + __Pyx_GIVEREF(__pyx_v_timer); + __Pyx_GOTREF(__pyx_v_self->_timer); + __Pyx_DECREF(__pyx_v_self->_timer); + __pyx_v_self->_timer = __pyx_v_timer; + + /* "aiohttp/_http_parser.pyx":323 + * self._timer = timer + * + * self._buf = bytearray() # <<<<<<<<<<<<<< + * self._payload = None + * self._payload_error = 0 + */ + __pyx_t_1 = __Pyx_PyObject_CallNoArg(((PyObject *)(&PyByteArray_Type))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 323, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_buf); + __Pyx_DECREF(__pyx_v_self->_buf); + __pyx_v_self->_buf = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":324 + * + * self._buf = bytearray() + * self._payload = None # <<<<<<<<<<<<<< + * self._payload_error = 0 + * self._payload_exception = payload_exception + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_payload); + __Pyx_DECREF(__pyx_v_self->_payload); + __pyx_v_self->_payload = Py_None; + + /* "aiohttp/_http_parser.pyx":325 + * self._buf = bytearray() + * self._payload = None + * self._payload_error = 0 # <<<<<<<<<<<<<< + * self._payload_exception = payload_exception + * self._messages = [] + */ + __pyx_v_self->_payload_error = 0; + + /* "aiohttp/_http_parser.pyx":326 + * self._payload = None + * self._payload_error = 0 + * self._payload_exception = payload_exception # <<<<<<<<<<<<<< + * self._messages = [] + * + */ + __Pyx_INCREF(__pyx_v_payload_exception); + __Pyx_GIVEREF(__pyx_v_payload_exception); + __Pyx_GOTREF(__pyx_v_self->_payload_exception); + __Pyx_DECREF(__pyx_v_self->_payload_exception); + __pyx_v_self->_payload_exception = __pyx_v_payload_exception; + + /* "aiohttp/_http_parser.pyx":327 + * self._payload_error = 0 + * self._payload_exception = payload_exception + * self._messages = [] # <<<<<<<<<<<<<< + * + * self._raw_name = bytearray() + */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 327, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_messages); + __Pyx_DECREF(__pyx_v_self->_messages); + __pyx_v_self->_messages = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":329 + * self._messages = [] + * + * self._raw_name = bytearray() # <<<<<<<<<<<<<< + * self._raw_value = bytearray() + * self._has_value = False + */ + __pyx_t_1 = __Pyx_PyObject_CallNoArg(((PyObject *)(&PyByteArray_Type))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 329, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_raw_name); + __Pyx_DECREF(__pyx_v_self->_raw_name); + __pyx_v_self->_raw_name = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":330 + * + * self._raw_name = bytearray() + * self._raw_value = bytearray() # <<<<<<<<<<<<<< + * self._has_value = False + * + */ + __pyx_t_1 = __Pyx_PyObject_CallNoArg(((PyObject *)(&PyByteArray_Type))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 330, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_raw_value); + __Pyx_DECREF(__pyx_v_self->_raw_value); + __pyx_v_self->_raw_value = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":331 + * self._raw_name = bytearray() + * self._raw_value = bytearray() + * self._has_value = False # <<<<<<<<<<<<<< + * + * self._max_line_size = max_line_size + */ + __pyx_v_self->_has_value = 0; + + /* "aiohttp/_http_parser.pyx":333 + * self._has_value = False + * + * self._max_line_size = max_line_size # <<<<<<<<<<<<<< + * self._max_headers = max_headers + * self._max_field_size = max_field_size + */ + __pyx_v_self->_max_line_size = __pyx_v_max_line_size; + + /* "aiohttp/_http_parser.pyx":334 + * + * self._max_line_size = max_line_size + * self._max_headers = max_headers # <<<<<<<<<<<<<< + * self._max_field_size = max_field_size + * self._response_with_body = response_with_body + */ + __pyx_v_self->_max_headers = __pyx_v_max_headers; + + /* "aiohttp/_http_parser.pyx":335 + * self._max_line_size = max_line_size + * self._max_headers = max_headers + * self._max_field_size = max_field_size # <<<<<<<<<<<<<< + * self._response_with_body = response_with_body + * self._upgraded = False + */ + __pyx_v_self->_max_field_size = __pyx_v_max_field_size; + + /* "aiohttp/_http_parser.pyx":336 + * self._max_headers = max_headers + * self._max_field_size = max_field_size + * self._response_with_body = response_with_body # <<<<<<<<<<<<<< + * self._upgraded = False + * self._auto_decompress = auto_decompress + */ + __pyx_v_self->_response_with_body = __pyx_v_response_with_body; + + /* "aiohttp/_http_parser.pyx":337 + * self._max_field_size = max_field_size + * self._response_with_body = response_with_body + * self._upgraded = False # <<<<<<<<<<<<<< + * self._auto_decompress = auto_decompress + * self._content_encoding = None + */ + __pyx_v_self->_upgraded = 0; + + /* "aiohttp/_http_parser.pyx":338 + * self._response_with_body = response_with_body + * self._upgraded = False + * self._auto_decompress = auto_decompress # <<<<<<<<<<<<<< + * self._content_encoding = None + * + */ + __pyx_v_self->_auto_decompress = __pyx_v_auto_decompress; + + /* "aiohttp/_http_parser.pyx":339 + * self._upgraded = False + * self._auto_decompress = auto_decompress + * self._content_encoding = None # <<<<<<<<<<<<<< + * + * self._csettings.on_url = cb_on_url + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_content_encoding); + __Pyx_DECREF(__pyx_v_self->_content_encoding); + __pyx_v_self->_content_encoding = ((PyObject*)Py_None); + + /* "aiohttp/_http_parser.pyx":341 + * self._content_encoding = None + * + * self._csettings.on_url = cb_on_url # <<<<<<<<<<<<<< + * self._csettings.on_status = cb_on_status + * self._csettings.on_header_field = cb_on_header_field + */ + __pyx_v_self->_csettings->on_url = __pyx_f_7aiohttp_12_http_parser_cb_on_url; + + /* "aiohttp/_http_parser.pyx":342 + * + * self._csettings.on_url = cb_on_url + * self._csettings.on_status = cb_on_status # <<<<<<<<<<<<<< + * self._csettings.on_header_field = cb_on_header_field + * self._csettings.on_header_value = cb_on_header_value + */ + __pyx_v_self->_csettings->on_status = __pyx_f_7aiohttp_12_http_parser_cb_on_status; + + /* "aiohttp/_http_parser.pyx":343 + * self._csettings.on_url = cb_on_url + * self._csettings.on_status = cb_on_status + * self._csettings.on_header_field = cb_on_header_field # <<<<<<<<<<<<<< + * self._csettings.on_header_value = cb_on_header_value + * self._csettings.on_headers_complete = cb_on_headers_complete + */ + __pyx_v_self->_csettings->on_header_field = __pyx_f_7aiohttp_12_http_parser_cb_on_header_field; + + /* "aiohttp/_http_parser.pyx":344 + * self._csettings.on_status = cb_on_status + * self._csettings.on_header_field = cb_on_header_field + * self._csettings.on_header_value = cb_on_header_value # <<<<<<<<<<<<<< + * self._csettings.on_headers_complete = cb_on_headers_complete + * self._csettings.on_body = cb_on_body + */ + __pyx_v_self->_csettings->on_header_value = __pyx_f_7aiohttp_12_http_parser_cb_on_header_value; + + /* "aiohttp/_http_parser.pyx":345 + * self._csettings.on_header_field = cb_on_header_field + * self._csettings.on_header_value = cb_on_header_value + * self._csettings.on_headers_complete = cb_on_headers_complete # <<<<<<<<<<<<<< + * self._csettings.on_body = cb_on_body + * self._csettings.on_message_begin = cb_on_message_begin + */ + __pyx_v_self->_csettings->on_headers_complete = __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete; + + /* "aiohttp/_http_parser.pyx":346 + * self._csettings.on_header_value = cb_on_header_value + * self._csettings.on_headers_complete = cb_on_headers_complete + * self._csettings.on_body = cb_on_body # <<<<<<<<<<<<<< + * self._csettings.on_message_begin = cb_on_message_begin + * self._csettings.on_message_complete = cb_on_message_complete + */ + __pyx_v_self->_csettings->on_body = __pyx_f_7aiohttp_12_http_parser_cb_on_body; + + /* "aiohttp/_http_parser.pyx":347 + * self._csettings.on_headers_complete = cb_on_headers_complete + * self._csettings.on_body = cb_on_body + * self._csettings.on_message_begin = cb_on_message_begin # <<<<<<<<<<<<<< + * self._csettings.on_message_complete = cb_on_message_complete + * self._csettings.on_chunk_header = cb_on_chunk_header + */ + __pyx_v_self->_csettings->on_message_begin = __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin; + + /* "aiohttp/_http_parser.pyx":348 + * self._csettings.on_body = cb_on_body + * self._csettings.on_message_begin = cb_on_message_begin + * self._csettings.on_message_complete = cb_on_message_complete # <<<<<<<<<<<<<< + * self._csettings.on_chunk_header = cb_on_chunk_header + * self._csettings.on_chunk_complete = cb_on_chunk_complete + */ + __pyx_v_self->_csettings->on_message_complete = __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete; + + /* "aiohttp/_http_parser.pyx":349 + * self._csettings.on_message_begin = cb_on_message_begin + * self._csettings.on_message_complete = cb_on_message_complete + * self._csettings.on_chunk_header = cb_on_chunk_header # <<<<<<<<<<<<<< + * self._csettings.on_chunk_complete = cb_on_chunk_complete + * + */ + __pyx_v_self->_csettings->on_chunk_header = __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header; + + /* "aiohttp/_http_parser.pyx":350 + * self._csettings.on_message_complete = cb_on_message_complete + * self._csettings.on_chunk_header = cb_on_chunk_header + * self._csettings.on_chunk_complete = cb_on_chunk_complete # <<<<<<<<<<<<<< + * + * self._last_error = None + */ + __pyx_v_self->_csettings->on_chunk_complete = __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete; + + /* "aiohttp/_http_parser.pyx":352 + * self._csettings.on_chunk_complete = cb_on_chunk_complete + * + * self._last_error = None # <<<<<<<<<<<<<< + * + * cdef _process_header(self): + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_last_error); + __Pyx_DECREF(__pyx_v_self->_last_error); + __pyx_v_self->_last_error = Py_None; + + /* "aiohttp/_http_parser.pyx":308 + * PyMem_Free(self._csettings) + * + * cdef _init(self, cparser.http_parser_type mode, # <<<<<<<<<<<<<< + * object protocol, object loop, object timer=None, + * size_t max_line_size=8190, size_t max_headers=32768, + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._init", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":354 + * self._last_error = None + * + * cdef _process_header(self): # <<<<<<<<<<<<<< + * if self._raw_name: + * raw_name = bytes(self._raw_name) + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { + PyObject *__pyx_v_raw_name = NULL; + PyObject *__pyx_v_raw_value = NULL; + PyObject *__pyx_v_name = NULL; + PyObject *__pyx_v_value = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + int __pyx_t_8; + __Pyx_RefNannySetupContext("_process_header", 0); + + /* "aiohttp/_http_parser.pyx":355 + * + * cdef _process_header(self): + * if self._raw_name: # <<<<<<<<<<<<<< + * raw_name = bytes(self._raw_name) + * raw_value = bytes(self._raw_value) + */ + __pyx_t_1 = (__pyx_v_self->_raw_name != Py_None)&&(PyByteArray_GET_SIZE(__pyx_v_self->_raw_name) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_parser.pyx":356 + * cdef _process_header(self): + * if self._raw_name: + * raw_name = bytes(self._raw_name) # <<<<<<<<<<<<<< + * raw_value = bytes(self._raw_value) + * + */ + __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_self->_raw_name); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 356, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_v_raw_name = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":357 + * if self._raw_name: + * raw_name = bytes(self._raw_name) + * raw_value = bytes(self._raw_value) # <<<<<<<<<<<<<< + * + * name = find_header(raw_name) + */ + __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_self->_raw_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 357, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_v_raw_value = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":359 + * raw_value = bytes(self._raw_value) + * + * name = find_header(raw_name) # <<<<<<<<<<<<<< + * value = raw_value.decode('utf-8', 'surrogateescape') + * + */ + __pyx_t_2 = __pyx_f_7aiohttp_12_http_parser_find_header(__pyx_v_raw_name); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 359, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_v_name = __pyx_t_2; + __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":360 + * + * name = find_header(raw_name) + * value = raw_value.decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< + * + * self._headers.add(name, value) + */ + __pyx_t_2 = __Pyx_decode_bytes(__pyx_v_raw_value, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 360, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_v_value = __pyx_t_2; + __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":362 + * value = raw_value.decode('utf-8', 'surrogateescape') + * + * self._headers.add(name, value) # <<<<<<<<<<<<<< + * + * if name is CONTENT_ENCODING: + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_headers, __pyx_n_s_add); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 362, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_v_name, __pyx_v_value}; + __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 362, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_v_name, __pyx_v_value}; + __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 362, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + { + __pyx_t_6 = PyTuple_New(2+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 362, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_INCREF(__pyx_v_name); + __Pyx_GIVEREF(__pyx_v_name); + PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_v_name); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_v_value); + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_6, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 362, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":364 + * self._headers.add(name, value) + * + * if name is CONTENT_ENCODING: # <<<<<<<<<<<<<< + * self._content_encoding = value + * + */ + __pyx_t_1 = (__pyx_v_name == __pyx_v_7aiohttp_12_http_parser_CONTENT_ENCODING); + __pyx_t_7 = (__pyx_t_1 != 0); + if (__pyx_t_7) { + + /* "aiohttp/_http_parser.pyx":365 + * + * if name is CONTENT_ENCODING: + * self._content_encoding = value # <<<<<<<<<<<<<< + * + * PyByteArray_Resize(self._raw_name, 0) + */ + if (!(likely(PyUnicode_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 365, __pyx_L1_error) + __pyx_t_2 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_2); + __Pyx_GIVEREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_v_self->_content_encoding); + __Pyx_DECREF(__pyx_v_self->_content_encoding); + __pyx_v_self->_content_encoding = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":364 + * self._headers.add(name, value) + * + * if name is CONTENT_ENCODING: # <<<<<<<<<<<<<< + * self._content_encoding = value + * + */ + } + + /* "aiohttp/_http_parser.pyx":367 + * self._content_encoding = value + * + * PyByteArray_Resize(self._raw_name, 0) # <<<<<<<<<<<<<< + * PyByteArray_Resize(self._raw_value, 0) + * self._has_value = False + */ + __pyx_t_2 = __pyx_v_self->_raw_name; + __Pyx_INCREF(__pyx_t_2); + __pyx_t_5 = PyByteArray_Resize(__pyx_t_2, 0); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(0, 367, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":368 + * + * PyByteArray_Resize(self._raw_name, 0) + * PyByteArray_Resize(self._raw_value, 0) # <<<<<<<<<<<<<< + * self._has_value = False + * self._raw_headers.append((raw_name, raw_value)) + */ + __pyx_t_2 = __pyx_v_self->_raw_value; + __Pyx_INCREF(__pyx_t_2); + __pyx_t_5 = PyByteArray_Resize(__pyx_t_2, 0); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(0, 368, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":369 + * PyByteArray_Resize(self._raw_name, 0) + * PyByteArray_Resize(self._raw_value, 0) + * self._has_value = False # <<<<<<<<<<<<<< + * self._raw_headers.append((raw_name, raw_value)) + * + */ + __pyx_v_self->_has_value = 0; + + /* "aiohttp/_http_parser.pyx":370 + * PyByteArray_Resize(self._raw_value, 0) + * self._has_value = False + * self._raw_headers.append((raw_name, raw_value)) # <<<<<<<<<<<<<< + * + * cdef _on_header_field(self, char* at, size_t length): + */ + if (unlikely(__pyx_v_self->_raw_headers == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); + __PYX_ERR(0, 370, __pyx_L1_error) + } + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 370, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v_raw_name); + __Pyx_GIVEREF(__pyx_v_raw_name); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_raw_name); + __Pyx_INCREF(__pyx_v_raw_value); + __Pyx_GIVEREF(__pyx_v_raw_value); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_raw_value); + __pyx_t_8 = __Pyx_PyList_Append(__pyx_v_self->_raw_headers, __pyx_t_2); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 370, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":355 + * + * cdef _process_header(self): + * if self._raw_name: # <<<<<<<<<<<<<< + * raw_name = bytes(self._raw_name) + * raw_value = bytes(self._raw_value) + */ + } + + /* "aiohttp/_http_parser.pyx":354 + * self._last_error = None + * + * cdef _process_header(self): # <<<<<<<<<<<<<< + * if self._raw_name: + * raw_name = bytes(self._raw_name) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._process_header", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_raw_name); + __Pyx_XDECREF(__pyx_v_raw_value); + __Pyx_XDECREF(__pyx_v_name); + __Pyx_XDECREF(__pyx_v_value); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":372 + * self._raw_headers.append((raw_name, raw_value)) + * + * cdef _on_header_field(self, char* at, size_t length): # <<<<<<<<<<<<<< + * cdef Py_ssize_t size + * cdef char *buf + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, char *__pyx_v_at, size_t __pyx_v_length) { + Py_ssize_t __pyx_v_size; + char *__pyx_v_buf; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + Py_ssize_t __pyx_t_3; + int __pyx_t_4; + __Pyx_RefNannySetupContext("_on_header_field", 0); + + /* "aiohttp/_http_parser.pyx":375 + * cdef Py_ssize_t size + * cdef char *buf + * if self._has_value: # <<<<<<<<<<<<<< + * self._process_header() + * + */ + __pyx_t_1 = (__pyx_v_self->_has_value != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_parser.pyx":376 + * cdef char *buf + * if self._has_value: + * self._process_header() # <<<<<<<<<<<<<< + * + * size = PyByteArray_Size(self._raw_name) + */ + __pyx_t_2 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_process_header(__pyx_v_self); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 376, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":375 + * cdef Py_ssize_t size + * cdef char *buf + * if self._has_value: # <<<<<<<<<<<<<< + * self._process_header() + * + */ + } + + /* "aiohttp/_http_parser.pyx":378 + * self._process_header() + * + * size = PyByteArray_Size(self._raw_name) # <<<<<<<<<<<<<< + * PyByteArray_Resize(self._raw_name, size + length) + * buf = PyByteArray_AsString(self._raw_name) + */ + __pyx_t_2 = __pyx_v_self->_raw_name; + __Pyx_INCREF(__pyx_t_2); + __pyx_t_3 = PyByteArray_Size(__pyx_t_2); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1L))) __PYX_ERR(0, 378, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_size = __pyx_t_3; + + /* "aiohttp/_http_parser.pyx":379 + * + * size = PyByteArray_Size(self._raw_name) + * PyByteArray_Resize(self._raw_name, size + length) # <<<<<<<<<<<<<< + * buf = PyByteArray_AsString(self._raw_name) + * memcpy(buf + size, at, length) + */ + __pyx_t_2 = __pyx_v_self->_raw_name; + __Pyx_INCREF(__pyx_t_2); + __pyx_t_4 = PyByteArray_Resize(__pyx_t_2, (__pyx_v_size + __pyx_v_length)); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 379, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":380 + * size = PyByteArray_Size(self._raw_name) + * PyByteArray_Resize(self._raw_name, size + length) + * buf = PyByteArray_AsString(self._raw_name) # <<<<<<<<<<<<<< + * memcpy(buf + size, at, length) + * + */ + __pyx_t_2 = __pyx_v_self->_raw_name; + __Pyx_INCREF(__pyx_t_2); + __pyx_v_buf = PyByteArray_AsString(__pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":381 + * PyByteArray_Resize(self._raw_name, size + length) + * buf = PyByteArray_AsString(self._raw_name) + * memcpy(buf + size, at, length) # <<<<<<<<<<<<<< + * + * cdef _on_header_value(self, char* at, size_t length): + */ + (void)(memcpy((__pyx_v_buf + __pyx_v_size), __pyx_v_at, __pyx_v_length)); + + /* "aiohttp/_http_parser.pyx":372 + * self._raw_headers.append((raw_name, raw_value)) + * + * cdef _on_header_field(self, char* at, size_t length): # <<<<<<<<<<<<<< + * cdef Py_ssize_t size + * cdef char *buf + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":383 + * memcpy(buf + size, at, length) + * + * cdef _on_header_value(self, char* at, size_t length): # <<<<<<<<<<<<<< + * cdef Py_ssize_t size + * cdef char *buf + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, char *__pyx_v_at, size_t __pyx_v_length) { + Py_ssize_t __pyx_v_size; + char *__pyx_v_buf; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + int __pyx_t_3; + __Pyx_RefNannySetupContext("_on_header_value", 0); + + /* "aiohttp/_http_parser.pyx":387 + * cdef char *buf + * + * size = PyByteArray_Size(self._raw_value) # <<<<<<<<<<<<<< + * PyByteArray_Resize(self._raw_value, size + length) + * buf = PyByteArray_AsString(self._raw_value) + */ + __pyx_t_1 = __pyx_v_self->_raw_value; + __Pyx_INCREF(__pyx_t_1); + __pyx_t_2 = PyByteArray_Size(__pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1L))) __PYX_ERR(0, 387, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_size = __pyx_t_2; + + /* "aiohttp/_http_parser.pyx":388 + * + * size = PyByteArray_Size(self._raw_value) + * PyByteArray_Resize(self._raw_value, size + length) # <<<<<<<<<<<<<< + * buf = PyByteArray_AsString(self._raw_value) + * memcpy(buf + size, at, length) + */ + __pyx_t_1 = __pyx_v_self->_raw_value; + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = PyByteArray_Resize(__pyx_t_1, (__pyx_v_size + __pyx_v_length)); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 388, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":389 + * size = PyByteArray_Size(self._raw_value) + * PyByteArray_Resize(self._raw_value, size + length) + * buf = PyByteArray_AsString(self._raw_value) # <<<<<<<<<<<<<< + * memcpy(buf + size, at, length) + * self._has_value = True + */ + __pyx_t_1 = __pyx_v_self->_raw_value; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_buf = PyByteArray_AsString(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":390 + * PyByteArray_Resize(self._raw_value, size + length) + * buf = PyByteArray_AsString(self._raw_value) + * memcpy(buf + size, at, length) # <<<<<<<<<<<<<< + * self._has_value = True + * + */ + (void)(memcpy((__pyx_v_buf + __pyx_v_size), __pyx_v_at, __pyx_v_length)); + + /* "aiohttp/_http_parser.pyx":391 + * buf = PyByteArray_AsString(self._raw_value) + * memcpy(buf + size, at, length) + * self._has_value = True # <<<<<<<<<<<<<< + * + * cdef _on_headers_complete(self): + */ + __pyx_v_self->_has_value = 1; + + /* "aiohttp/_http_parser.pyx":383 + * memcpy(buf + size, at, length) + * + * cdef _on_header_value(self, char* at, size_t length): # <<<<<<<<<<<<<< + * cdef Py_ssize_t size + * cdef char *buf + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":393 + * self._has_value = True + * + * cdef _on_headers_complete(self): # <<<<<<<<<<<<<< + * self._process_header() + * + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { + PyObject *__pyx_v_method = NULL; + int __pyx_v_should_close; + unsigned int __pyx_v_upgrade; + unsigned int __pyx_v_chunked; + PyObject *__pyx_v_raw_headers = NULL; + PyObject *__pyx_v_headers = NULL; + PyObject *__pyx_v_encoding = NULL; + PyObject *__pyx_v_enc = NULL; + PyObject *__pyx_v_msg = NULL; + PyObject *__pyx_v_payload = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + unsigned int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + int __pyx_t_9; + int __pyx_t_10; + __Pyx_RefNannySetupContext("_on_headers_complete", 0); + + /* "aiohttp/_http_parser.pyx":394 + * + * cdef _on_headers_complete(self): + * self._process_header() # <<<<<<<<<<<<<< + * + * method = http_method_str(self._cparser.method) + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_process_header(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 394, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":396 + * self._process_header() + * + * method = http_method_str(self._cparser.method) # <<<<<<<<<<<<<< + * should_close = not cparser.http_should_keep_alive(self._cparser) + * upgrade = self._cparser.upgrade + */ + __pyx_t_1 = __pyx_f_7aiohttp_12_http_parser_http_method_str(__pyx_v_self->_cparser->method); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 396, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_method = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":397 + * + * method = http_method_str(self._cparser.method) + * should_close = not cparser.http_should_keep_alive(self._cparser) # <<<<<<<<<<<<<< + * upgrade = self._cparser.upgrade + * chunked = self._cparser.flags & cparser.F_CHUNKED + */ + __pyx_v_should_close = (!(http_should_keep_alive(__pyx_v_self->_cparser) != 0)); + + /* "aiohttp/_http_parser.pyx":398 + * method = http_method_str(self._cparser.method) + * should_close = not cparser.http_should_keep_alive(self._cparser) + * upgrade = self._cparser.upgrade # <<<<<<<<<<<<<< + * chunked = self._cparser.flags & cparser.F_CHUNKED + * + */ + __pyx_t_2 = __pyx_v_self->_cparser->upgrade; + __pyx_v_upgrade = __pyx_t_2; + + /* "aiohttp/_http_parser.pyx":399 + * should_close = not cparser.http_should_keep_alive(self._cparser) + * upgrade = self._cparser.upgrade + * chunked = self._cparser.flags & cparser.F_CHUNKED # <<<<<<<<<<<<<< + * + * raw_headers = tuple(self._raw_headers) + */ + __pyx_v_chunked = (__pyx_v_self->_cparser->flags & F_CHUNKED); + + /* "aiohttp/_http_parser.pyx":401 + * chunked = self._cparser.flags & cparser.F_CHUNKED + * + * raw_headers = tuple(self._raw_headers) # <<<<<<<<<<<<<< + * headers = CIMultiDictProxy(self._headers) + * + */ + if (unlikely(__pyx_v_self->_raw_headers == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 401, __pyx_L1_error) + } + __pyx_t_1 = PyList_AsTuple(__pyx_v_self->_raw_headers); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 401, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_raw_headers = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":402 + * + * raw_headers = tuple(self._raw_headers) + * headers = CIMultiDictProxy(self._headers) # <<<<<<<<<<<<<< + * + * if upgrade or self._cparser.method == 5: # cparser.CONNECT: + */ + __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy); + __pyx_t_3 = __pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy; __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_v_self->_headers) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_self->_headers); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 402, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_headers = __pyx_t_1; + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":404 + * headers = CIMultiDictProxy(self._headers) + * + * if upgrade or self._cparser.method == 5: # cparser.CONNECT: # <<<<<<<<<<<<<< + * self._upgraded = True + * + */ + __pyx_t_6 = (__pyx_v_upgrade != 0); + if (!__pyx_t_6) { + } else { + __pyx_t_5 = __pyx_t_6; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_6 = ((__pyx_v_self->_cparser->method == 5) != 0); + __pyx_t_5 = __pyx_t_6; + __pyx_L4_bool_binop_done:; + if (__pyx_t_5) { + + /* "aiohttp/_http_parser.pyx":405 + * + * if upgrade or self._cparser.method == 5: # cparser.CONNECT: + * self._upgraded = True # <<<<<<<<<<<<<< + * + * # do not support old websocket spec + */ + __pyx_v_self->_upgraded = 1; + + /* "aiohttp/_http_parser.pyx":404 + * headers = CIMultiDictProxy(self._headers) + * + * if upgrade or self._cparser.method == 5: # cparser.CONNECT: # <<<<<<<<<<<<<< + * self._upgraded = True + * + */ + } + + /* "aiohttp/_http_parser.pyx":408 + * + * # do not support old websocket spec + * if SEC_WEBSOCKET_KEY1 in headers: # <<<<<<<<<<<<<< + * raise InvalidHeader(SEC_WEBSOCKET_KEY1) + * + */ + __pyx_t_5 = (__Pyx_PySequence_ContainsTF(__pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1, __pyx_v_headers, Py_EQ)); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 408, __pyx_L1_error) + __pyx_t_6 = (__pyx_t_5 != 0); + if (unlikely(__pyx_t_6)) { + + /* "aiohttp/_http_parser.pyx":409 + * # do not support old websocket spec + * if SEC_WEBSOCKET_KEY1 in headers: + * raise InvalidHeader(SEC_WEBSOCKET_KEY1) # <<<<<<<<<<<<<< + * + * encoding = None + */ + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_InvalidHeader); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 409, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 409, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 409, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":408 + * + * # do not support old websocket spec + * if SEC_WEBSOCKET_KEY1 in headers: # <<<<<<<<<<<<<< + * raise InvalidHeader(SEC_WEBSOCKET_KEY1) + * + */ + } + + /* "aiohttp/_http_parser.pyx":411 + * raise InvalidHeader(SEC_WEBSOCKET_KEY1) + * + * encoding = None # <<<<<<<<<<<<<< + * enc = self._content_encoding + * if enc is not None: + */ + __Pyx_INCREF(Py_None); + __pyx_v_encoding = Py_None; + + /* "aiohttp/_http_parser.pyx":412 + * + * encoding = None + * enc = self._content_encoding # <<<<<<<<<<<<<< + * if enc is not None: + * self._content_encoding = None + */ + __pyx_t_1 = __pyx_v_self->_content_encoding; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_enc = __pyx_t_1; + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":413 + * encoding = None + * enc = self._content_encoding + * if enc is not None: # <<<<<<<<<<<<<< + * self._content_encoding = None + * enc = enc.lower() + */ + __pyx_t_6 = (__pyx_v_enc != Py_None); + __pyx_t_5 = (__pyx_t_6 != 0); + if (__pyx_t_5) { + + /* "aiohttp/_http_parser.pyx":414 + * enc = self._content_encoding + * if enc is not None: + * self._content_encoding = None # <<<<<<<<<<<<<< + * enc = enc.lower() + * if enc in ('gzip', 'deflate', 'br'): + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_content_encoding); + __Pyx_DECREF(__pyx_v_self->_content_encoding); + __pyx_v_self->_content_encoding = ((PyObject*)Py_None); + + /* "aiohttp/_http_parser.pyx":415 + * if enc is not None: + * self._content_encoding = None + * enc = enc.lower() # <<<<<<<<<<<<<< + * if enc in ('gzip', 'deflate', 'br'): + * encoding = enc + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_enc, __pyx_n_s_lower); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 415, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_4) : __Pyx_PyObject_CallNoArg(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 415, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_enc, __pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":416 + * self._content_encoding = None + * enc = enc.lower() + * if enc in ('gzip', 'deflate', 'br'): # <<<<<<<<<<<<<< + * encoding = enc + * + */ + __Pyx_INCREF(__pyx_v_enc); + __pyx_t_1 = __pyx_v_enc; + __pyx_t_6 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_n_u_gzip, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 416, __pyx_L1_error) + if (!__pyx_t_6) { + } else { + __pyx_t_5 = __pyx_t_6; + goto __pyx_L9_bool_binop_done; + } + __pyx_t_6 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_n_u_deflate, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 416, __pyx_L1_error) + if (!__pyx_t_6) { + } else { + __pyx_t_5 = __pyx_t_6; + goto __pyx_L9_bool_binop_done; + } + __pyx_t_6 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_n_u_br, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 416, __pyx_L1_error) + __pyx_t_5 = __pyx_t_6; + __pyx_L9_bool_binop_done:; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_6 = (__pyx_t_5 != 0); + if (__pyx_t_6) { + + /* "aiohttp/_http_parser.pyx":417 + * enc = enc.lower() + * if enc in ('gzip', 'deflate', 'br'): + * encoding = enc # <<<<<<<<<<<<<< + * + * if self._cparser.type == cparser.HTTP_REQUEST: + */ + __Pyx_INCREF(__pyx_v_enc); + __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_v_enc); + + /* "aiohttp/_http_parser.pyx":416 + * self._content_encoding = None + * enc = enc.lower() + * if enc in ('gzip', 'deflate', 'br'): # <<<<<<<<<<<<<< + * encoding = enc + * + */ + } + + /* "aiohttp/_http_parser.pyx":413 + * encoding = None + * enc = self._content_encoding + * if enc is not None: # <<<<<<<<<<<<<< + * self._content_encoding = None + * enc = enc.lower() + */ + } + + /* "aiohttp/_http_parser.pyx":419 + * encoding = enc + * + * if self._cparser.type == cparser.HTTP_REQUEST: # <<<<<<<<<<<<<< + * msg = _new_request_message( + * method, self._path, + */ + __pyx_t_6 = ((__pyx_v_self->_cparser->type == HTTP_REQUEST) != 0); + if (__pyx_t_6) { + + /* "aiohttp/_http_parser.pyx":421 + * if self._cparser.type == cparser.HTTP_REQUEST: + * msg = _new_request_message( + * method, self._path, # <<<<<<<<<<<<<< + * self.http_version(), headers, raw_headers, + * should_close, encoding, upgrade, chunked, self._url) + */ + __pyx_t_1 = __pyx_v_self->_path; + __Pyx_INCREF(__pyx_t_1); + + /* "aiohttp/_http_parser.pyx":422 + * msg = _new_request_message( + * method, self._path, + * self.http_version(), headers, raw_headers, # <<<<<<<<<<<<<< + * should_close, encoding, upgrade, chunked, self._url) + * else: + */ + __pyx_t_3 = __pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 422, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + + /* "aiohttp/_http_parser.pyx":423 + * method, self._path, + * self.http_version(), headers, raw_headers, + * should_close, encoding, upgrade, chunked, self._url) # <<<<<<<<<<<<<< + * else: + * msg = _new_response_message( + */ + __pyx_t_4 = __pyx_v_self->_url; + __Pyx_INCREF(__pyx_t_4); + + /* "aiohttp/_http_parser.pyx":420 + * + * if self._cparser.type == cparser.HTTP_REQUEST: + * msg = _new_request_message( # <<<<<<<<<<<<<< + * method, self._path, + * self.http_version(), headers, raw_headers, + */ + __pyx_t_7 = __pyx_f_7aiohttp_12_http_parser__new_request_message(__pyx_v_method, ((PyObject*)__pyx_t_1), __pyx_t_3, __pyx_v_headers, __pyx_v_raw_headers, __pyx_v_should_close, __pyx_v_encoding, __pyx_v_upgrade, __pyx_v_chunked, __pyx_t_4); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 420, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_msg = __pyx_t_7; + __pyx_t_7 = 0; + + /* "aiohttp/_http_parser.pyx":419 + * encoding = enc + * + * if self._cparser.type == cparser.HTTP_REQUEST: # <<<<<<<<<<<<<< + * msg = _new_request_message( + * method, self._path, + */ + goto __pyx_L12; + } + + /* "aiohttp/_http_parser.pyx":425 + * should_close, encoding, upgrade, chunked, self._url) + * else: + * msg = _new_response_message( # <<<<<<<<<<<<<< + * self.http_version(), self._cparser.status_code, self._reason, + * headers, raw_headers, should_close, encoding, + */ + /*else*/ { + + /* "aiohttp/_http_parser.pyx":426 + * else: + * msg = _new_response_message( + * self.http_version(), self._cparser.status_code, self._reason, # <<<<<<<<<<<<<< + * headers, raw_headers, should_close, encoding, + * upgrade, chunked) + */ + __pyx_t_7 = __pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version(__pyx_v_self); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 426, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_4 = __pyx_v_self->_reason; + __Pyx_INCREF(__pyx_t_4); + + /* "aiohttp/_http_parser.pyx":425 + * should_close, encoding, upgrade, chunked, self._url) + * else: + * msg = _new_response_message( # <<<<<<<<<<<<<< + * self.http_version(), self._cparser.status_code, self._reason, + * headers, raw_headers, should_close, encoding, + */ + __pyx_t_3 = __pyx_f_7aiohttp_12_http_parser__new_response_message(__pyx_t_7, __pyx_v_self->_cparser->status_code, ((PyObject*)__pyx_t_4), __pyx_v_headers, __pyx_v_raw_headers, __pyx_v_should_close, __pyx_v_encoding, __pyx_v_upgrade, __pyx_v_chunked); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 425, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_msg = __pyx_t_3; + __pyx_t_3 = 0; + } + __pyx_L12:; + + /* "aiohttp/_http_parser.pyx":430 + * upgrade, chunked) + * + * if (self._cparser.content_length > 0 or chunked or # <<<<<<<<<<<<<< + * self._cparser.method == 5): # CONNECT: 5 + * payload = StreamReader( + */ + __pyx_t_5 = ((__pyx_v_self->_cparser->content_length > 0) != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_6 = __pyx_t_5; + goto __pyx_L14_bool_binop_done; + } + __pyx_t_5 = (__pyx_v_chunked != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_6 = __pyx_t_5; + goto __pyx_L14_bool_binop_done; + } + + /* "aiohttp/_http_parser.pyx":431 + * + * if (self._cparser.content_length > 0 or chunked or + * self._cparser.method == 5): # CONNECT: 5 # <<<<<<<<<<<<<< + * payload = StreamReader( + * self._protocol, timer=self._timer, loop=self._loop) + */ + __pyx_t_5 = ((__pyx_v_self->_cparser->method == 5) != 0); + __pyx_t_6 = __pyx_t_5; + __pyx_L14_bool_binop_done:; + + /* "aiohttp/_http_parser.pyx":430 + * upgrade, chunked) + * + * if (self._cparser.content_length > 0 or chunked or # <<<<<<<<<<<<<< + * self._cparser.method == 5): # CONNECT: 5 + * payload = StreamReader( + */ + if (__pyx_t_6) { + + /* "aiohttp/_http_parser.pyx":432 + * if (self._cparser.content_length > 0 or chunked or + * self._cparser.method == 5): # CONNECT: 5 + * payload = StreamReader( # <<<<<<<<<<<<<< + * self._protocol, timer=self._timer, loop=self._loop) + * else: + */ + __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 432, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_v_self->_protocol); + __Pyx_GIVEREF(__pyx_v_self->_protocol); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_self->_protocol); + + /* "aiohttp/_http_parser.pyx":433 + * self._cparser.method == 5): # CONNECT: 5 + * payload = StreamReader( + * self._protocol, timer=self._timer, loop=self._loop) # <<<<<<<<<<<<<< + * else: + * payload = EMPTY_PAYLOAD + */ + __pyx_t_4 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 433, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_timer, __pyx_v_self->_timer) < 0) __PYX_ERR(0, 433, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_loop, __pyx_v_self->_loop) < 0) __PYX_ERR(0, 433, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":432 + * if (self._cparser.content_length > 0 or chunked or + * self._cparser.method == 5): # CONNECT: 5 + * payload = StreamReader( # <<<<<<<<<<<<<< + * self._protocol, timer=self._timer, loop=self._loop) + * else: + */ + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_v_7aiohttp_12_http_parser_StreamReader, __pyx_t_3, __pyx_t_4); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 432, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_payload = __pyx_t_7; + __pyx_t_7 = 0; + + /* "aiohttp/_http_parser.pyx":430 + * upgrade, chunked) + * + * if (self._cparser.content_length > 0 or chunked or # <<<<<<<<<<<<<< + * self._cparser.method == 5): # CONNECT: 5 + * payload = StreamReader( + */ + goto __pyx_L13; + } + + /* "aiohttp/_http_parser.pyx":435 + * self._protocol, timer=self._timer, loop=self._loop) + * else: + * payload = EMPTY_PAYLOAD # <<<<<<<<<<<<<< + * + * self._payload = payload + */ + /*else*/ { + __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD); + __pyx_v_payload = __pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD; + } + __pyx_L13:; + + /* "aiohttp/_http_parser.pyx":437 + * payload = EMPTY_PAYLOAD + * + * self._payload = payload # <<<<<<<<<<<<<< + * if encoding is not None and self._auto_decompress: + * self._payload = DeflateBuffer(payload, encoding) + */ + __Pyx_INCREF(__pyx_v_payload); + __Pyx_GIVEREF(__pyx_v_payload); + __Pyx_GOTREF(__pyx_v_self->_payload); + __Pyx_DECREF(__pyx_v_self->_payload); + __pyx_v_self->_payload = __pyx_v_payload; + + /* "aiohttp/_http_parser.pyx":438 + * + * self._payload = payload + * if encoding is not None and self._auto_decompress: # <<<<<<<<<<<<<< + * self._payload = DeflateBuffer(payload, encoding) + * + */ + __pyx_t_5 = (__pyx_v_encoding != Py_None); + __pyx_t_8 = (__pyx_t_5 != 0); + if (__pyx_t_8) { + } else { + __pyx_t_6 = __pyx_t_8; + goto __pyx_L18_bool_binop_done; + } + __pyx_t_8 = (__pyx_v_self->_auto_decompress != 0); + __pyx_t_6 = __pyx_t_8; + __pyx_L18_bool_binop_done:; + if (__pyx_t_6) { + + /* "aiohttp/_http_parser.pyx":439 + * self._payload = payload + * if encoding is not None and self._auto_decompress: + * self._payload = DeflateBuffer(payload, encoding) # <<<<<<<<<<<<<< + * + * if not self._response_with_body: + */ + __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_DeflateBuffer); + __pyx_t_4 = __pyx_v_7aiohttp_12_http_parser_DeflateBuffer; __pyx_t_3 = NULL; + __pyx_t_9 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_9 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_v_payload, __pyx_v_encoding}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_9, 2+__pyx_t_9); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 439, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_v_payload, __pyx_v_encoding}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_9, 2+__pyx_t_9); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 439, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + { + __pyx_t_1 = PyTuple_New(2+__pyx_t_9); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 439, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (__pyx_t_3) { + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_3); __pyx_t_3 = NULL; + } + __Pyx_INCREF(__pyx_v_payload); + __Pyx_GIVEREF(__pyx_v_payload); + PyTuple_SET_ITEM(__pyx_t_1, 0+__pyx_t_9, __pyx_v_payload); + __Pyx_INCREF(__pyx_v_encoding); + __Pyx_GIVEREF(__pyx_v_encoding); + PyTuple_SET_ITEM(__pyx_t_1, 1+__pyx_t_9, __pyx_v_encoding); + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_1, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 439, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GIVEREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_v_self->_payload); + __Pyx_DECREF(__pyx_v_self->_payload); + __pyx_v_self->_payload = __pyx_t_7; + __pyx_t_7 = 0; + + /* "aiohttp/_http_parser.pyx":438 + * + * self._payload = payload + * if encoding is not None and self._auto_decompress: # <<<<<<<<<<<<<< + * self._payload = DeflateBuffer(payload, encoding) + * + */ + } + + /* "aiohttp/_http_parser.pyx":441 + * self._payload = DeflateBuffer(payload, encoding) + * + * if not self._response_with_body: # <<<<<<<<<<<<<< + * payload = EMPTY_PAYLOAD + * + */ + __pyx_t_6 = ((!(__pyx_v_self->_response_with_body != 0)) != 0); + if (__pyx_t_6) { + + /* "aiohttp/_http_parser.pyx":442 + * + * if not self._response_with_body: + * payload = EMPTY_PAYLOAD # <<<<<<<<<<<<<< + * + * self._messages.append((msg, payload)) + */ + __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD); + __Pyx_DECREF_SET(__pyx_v_payload, __pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD); + + /* "aiohttp/_http_parser.pyx":441 + * self._payload = DeflateBuffer(payload, encoding) + * + * if not self._response_with_body: # <<<<<<<<<<<<<< + * payload = EMPTY_PAYLOAD + * + */ + } + + /* "aiohttp/_http_parser.pyx":444 + * payload = EMPTY_PAYLOAD + * + * self._messages.append((msg, payload)) # <<<<<<<<<<<<<< + * + * cdef _on_message_complete(self): + */ + if (unlikely(__pyx_v_self->_messages == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); + __PYX_ERR(0, 444, __pyx_L1_error) + } + __pyx_t_7 = PyTuple_New(2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 444, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_INCREF(__pyx_v_msg); + __Pyx_GIVEREF(__pyx_v_msg); + PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_v_msg); + __Pyx_INCREF(__pyx_v_payload); + __Pyx_GIVEREF(__pyx_v_payload); + PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_v_payload); + __pyx_t_10 = __Pyx_PyList_Append(__pyx_v_self->_messages, __pyx_t_7); if (unlikely(__pyx_t_10 == ((int)-1))) __PYX_ERR(0, 444, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "aiohttp/_http_parser.pyx":393 + * self._has_value = True + * + * cdef _on_headers_complete(self): # <<<<<<<<<<<<<< + * self._process_header() + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_method); + __Pyx_XDECREF(__pyx_v_raw_headers); + __Pyx_XDECREF(__pyx_v_headers); + __Pyx_XDECREF(__pyx_v_encoding); + __Pyx_XDECREF(__pyx_v_enc); + __Pyx_XDECREF(__pyx_v_msg); + __Pyx_XDECREF(__pyx_v_payload); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":446 + * self._messages.append((msg, payload)) + * + * cdef _on_message_complete(self): # <<<<<<<<<<<<<< + * self._payload.feed_eof() + * self._payload = None + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_message_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("_on_message_complete", 0); + + /* "aiohttp/_http_parser.pyx":447 + * + * cdef _on_message_complete(self): + * self._payload.feed_eof() # <<<<<<<<<<<<<< + * self._payload = None + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_feed_eof); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 447, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 447, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":448 + * cdef _on_message_complete(self): + * self._payload.feed_eof() + * self._payload = None # <<<<<<<<<<<<<< + * + * cdef _on_chunk_header(self): + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_payload); + __Pyx_DECREF(__pyx_v_self->_payload); + __pyx_v_self->_payload = Py_None; + + /* "aiohttp/_http_parser.pyx":446 + * self._messages.append((msg, payload)) + * + * cdef _on_message_complete(self): # <<<<<<<<<<<<<< + * self._payload.feed_eof() + * self._payload = None + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_message_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":450 + * self._payload = None + * + * cdef _on_chunk_header(self): # <<<<<<<<<<<<<< + * self._payload.begin_http_chunk_receiving() + * + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_header(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("_on_chunk_header", 0); + + /* "aiohttp/_http_parser.pyx":451 + * + * cdef _on_chunk_header(self): + * self._payload.begin_http_chunk_receiving() # <<<<<<<<<<<<<< + * + * cdef _on_chunk_complete(self): + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_begin_http_chunk_receiving); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 451, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 451, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":450 + * self._payload = None + * + * cdef _on_chunk_header(self): # <<<<<<<<<<<<<< + * self._payload.begin_http_chunk_receiving() + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":453 + * self._payload.begin_http_chunk_receiving() + * + * cdef _on_chunk_complete(self): # <<<<<<<<<<<<<< + * self._payload.end_http_chunk_receiving() + * + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("_on_chunk_complete", 0); + + /* "aiohttp/_http_parser.pyx":454 + * + * cdef _on_chunk_complete(self): + * self._payload.end_http_chunk_receiving() # <<<<<<<<<<<<<< + * + * cdef object _on_status_complete(self): + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_end_http_chunk_receiving); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 454, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 454, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":453 + * self._payload.begin_http_chunk_receiving() + * + * cdef _on_chunk_complete(self): # <<<<<<<<<<<<<< + * self._payload.end_http_chunk_receiving() + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":456 + * self._payload.end_http_chunk_receiving() + * + * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< + * pass + * + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_status_complete(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_on_status_complete", 0); + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":459 + * pass + * + * cdef inline http_version(self): # <<<<<<<<<<<<<< + * cdef cparser.http_parser* parser = self._cparser + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { + struct http_parser *__pyx_v_parser; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + struct http_parser *__pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + PyObject *__pyx_t_9 = NULL; + __Pyx_RefNannySetupContext("http_version", 0); + + /* "aiohttp/_http_parser.pyx":460 + * + * cdef inline http_version(self): + * cdef cparser.http_parser* parser = self._cparser # <<<<<<<<<<<<<< + * + * if parser.http_major == 1: + */ + __pyx_t_1 = __pyx_v_self->_cparser; + __pyx_v_parser = __pyx_t_1; + + /* "aiohttp/_http_parser.pyx":462 + * cdef cparser.http_parser* parser = self._cparser + * + * if parser.http_major == 1: # <<<<<<<<<<<<<< + * if parser.http_minor == 0: + * return HttpVersion10 + */ + __pyx_t_2 = ((__pyx_v_parser->http_major == 1) != 0); + if (__pyx_t_2) { + + /* "aiohttp/_http_parser.pyx":463 + * + * if parser.http_major == 1: + * if parser.http_minor == 0: # <<<<<<<<<<<<<< + * return HttpVersion10 + * elif parser.http_minor == 1: + */ + switch (__pyx_v_parser->http_minor) { + case 0: + + /* "aiohttp/_http_parser.pyx":464 + * if parser.http_major == 1: + * if parser.http_minor == 0: + * return HttpVersion10 # <<<<<<<<<<<<<< + * elif parser.http_minor == 1: + * return HttpVersion11 + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion10); + __pyx_r = __pyx_v_7aiohttp_12_http_parser_HttpVersion10; + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":463 + * + * if parser.http_major == 1: + * if parser.http_minor == 0: # <<<<<<<<<<<<<< + * return HttpVersion10 + * elif parser.http_minor == 1: + */ + break; + case 1: + + /* "aiohttp/_http_parser.pyx":466 + * return HttpVersion10 + * elif parser.http_minor == 1: + * return HttpVersion11 # <<<<<<<<<<<<<< + * + * return HttpVersion(parser.http_major, parser.http_minor) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion11); + __pyx_r = __pyx_v_7aiohttp_12_http_parser_HttpVersion11; + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":465 + * if parser.http_minor == 0: + * return HttpVersion10 + * elif parser.http_minor == 1: # <<<<<<<<<<<<<< + * return HttpVersion11 + * + */ + break; + default: break; + } + + /* "aiohttp/_http_parser.pyx":462 + * cdef cparser.http_parser* parser = self._cparser + * + * if parser.http_major == 1: # <<<<<<<<<<<<<< + * if parser.http_minor == 0: + * return HttpVersion10 + */ + } + + /* "aiohttp/_http_parser.pyx":468 + * return HttpVersion11 + * + * return HttpVersion(parser.http_major, parser.http_minor) # <<<<<<<<<<<<<< + * + * ### Public API ### + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyInt_From_unsigned_short(__pyx_v_parser->http_major); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 468, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyInt_From_unsigned_short(__pyx_v_parser->http_minor); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 468, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion); + __pyx_t_6 = __pyx_v_7aiohttp_12_http_parser_HttpVersion; __pyx_t_7 = NULL; + __pyx_t_8 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_8 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_t_4, __pyx_t_5}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 468, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_t_4, __pyx_t_5}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 468, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + { + __pyx_t_9 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 468, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_8, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_8, __pyx_t_5); + __pyx_t_4 = 0; + __pyx_t_5 = 0; + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_9, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 468, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":459 + * pass + * + * cdef inline http_version(self): # <<<<<<<<<<<<<< + * cdef cparser.http_parser* parser = self._cparser + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.http_version", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":472 + * ### Public API ### + * + * def feed_eof(self): # <<<<<<<<<<<<<< + * cdef bytes desc + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_5feed_eof(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_5feed_eof(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("feed_eof (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { + PyObject *__pyx_v_desc = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + __Pyx_RefNannySetupContext("feed_eof", 0); + + /* "aiohttp/_http_parser.pyx":475 + * cdef bytes desc + * + * if self._payload is not None: # <<<<<<<<<<<<<< + * if self._cparser.flags & cparser.F_CHUNKED: + * raise TransferEncodingError( + */ + __pyx_t_1 = (__pyx_v_self->_payload != Py_None); + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "aiohttp/_http_parser.pyx":476 + * + * if self._payload is not None: + * if self._cparser.flags & cparser.F_CHUNKED: # <<<<<<<<<<<<<< + * raise TransferEncodingError( + * "Not enough data for satisfy transfer length header.") + */ + __pyx_t_2 = ((__pyx_v_self->_cparser->flags & F_CHUNKED) != 0); + if (unlikely(__pyx_t_2)) { + + /* "aiohttp/_http_parser.pyx":477 + * if self._payload is not None: + * if self._cparser.flags & cparser.F_CHUNKED: + * raise TransferEncodingError( # <<<<<<<<<<<<<< + * "Not enough data for satisfy transfer length header.") + * elif self._cparser.flags & cparser.F_CONTENTLENGTH: + */ + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_TransferEncodingError); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 477, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_5, __pyx_kp_u_Not_enough_data_for_satisfy_tran) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_kp_u_Not_enough_data_for_satisfy_tran); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 477, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(0, 477, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":476 + * + * if self._payload is not None: + * if self._cparser.flags & cparser.F_CHUNKED: # <<<<<<<<<<<<<< + * raise TransferEncodingError( + * "Not enough data for satisfy transfer length header.") + */ + } + + /* "aiohttp/_http_parser.pyx":479 + * raise TransferEncodingError( + * "Not enough data for satisfy transfer length header.") + * elif self._cparser.flags & cparser.F_CONTENTLENGTH: # <<<<<<<<<<<<<< + * raise ContentLengthError( + * "Not enough data for satisfy content length header.") + */ + __pyx_t_2 = ((__pyx_v_self->_cparser->flags & F_CONTENTLENGTH) != 0); + if (unlikely(__pyx_t_2)) { + + /* "aiohttp/_http_parser.pyx":480 + * "Not enough data for satisfy transfer length header.") + * elif self._cparser.flags & cparser.F_CONTENTLENGTH: + * raise ContentLengthError( # <<<<<<<<<<<<<< + * "Not enough data for satisfy content length header.") + * elif self._cparser.http_errno != cparser.HPE_OK: + */ + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_ContentLengthError); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 480, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_5, __pyx_kp_u_Not_enough_data_for_satisfy_cont) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_kp_u_Not_enough_data_for_satisfy_cont); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 480, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(0, 480, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":479 + * raise TransferEncodingError( + * "Not enough data for satisfy transfer length header.") + * elif self._cparser.flags & cparser.F_CONTENTLENGTH: # <<<<<<<<<<<<<< + * raise ContentLengthError( + * "Not enough data for satisfy content length header.") + */ + } + + /* "aiohttp/_http_parser.pyx":482 + * raise ContentLengthError( + * "Not enough data for satisfy content length header.") + * elif self._cparser.http_errno != cparser.HPE_OK: # <<<<<<<<<<<<<< + * desc = cparser.http_errno_description( + * self._cparser.http_errno) + */ + __pyx_t_2 = ((__pyx_v_self->_cparser->http_errno != HPE_OK) != 0); + if (unlikely(__pyx_t_2)) { + + /* "aiohttp/_http_parser.pyx":483 + * "Not enough data for satisfy content length header.") + * elif self._cparser.http_errno != cparser.HPE_OK: + * desc = cparser.http_errno_description( # <<<<<<<<<<<<<< + * self._cparser.http_errno) + * raise PayloadEncodingError(desc.decode('latin-1')) + */ + __pyx_t_3 = __Pyx_PyBytes_FromString(http_errno_description(((enum http_errno)__pyx_v_self->_cparser->http_errno))); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 483, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_v_desc = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":485 + * desc = cparser.http_errno_description( + * self._cparser.http_errno) + * raise PayloadEncodingError(desc.decode('latin-1')) # <<<<<<<<<<<<<< + * else: + * self._payload.feed_eof() + */ + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_PayloadEncodingError); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 485, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_decode_bytes(__pyx_v_desc, 0, PY_SSIZE_T_MAX, NULL, NULL, PyUnicode_DecodeLatin1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 485, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_3 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 485, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(0, 485, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":482 + * raise ContentLengthError( + * "Not enough data for satisfy content length header.") + * elif self._cparser.http_errno != cparser.HPE_OK: # <<<<<<<<<<<<<< + * desc = cparser.http_errno_description( + * self._cparser.http_errno) + */ + } + + /* "aiohttp/_http_parser.pyx":487 + * raise PayloadEncodingError(desc.decode('latin-1')) + * else: + * self._payload.feed_eof() # <<<<<<<<<<<<<< + * elif self._started: + * self._on_headers_complete() + */ + /*else*/ { + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_feed_eof); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 487, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5) : __Pyx_PyObject_CallNoArg(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 487, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + + /* "aiohttp/_http_parser.pyx":475 + * cdef bytes desc + * + * if self._payload is not None: # <<<<<<<<<<<<<< + * if self._cparser.flags & cparser.F_CHUNKED: + * raise TransferEncodingError( + */ + goto __pyx_L3; + } + + /* "aiohttp/_http_parser.pyx":488 + * else: + * self._payload.feed_eof() + * elif self._started: # <<<<<<<<<<<<<< + * self._on_headers_complete() + * if self._messages: + */ + __pyx_t_2 = (__pyx_v_self->_started != 0); + if (__pyx_t_2) { + + /* "aiohttp/_http_parser.pyx":489 + * self._payload.feed_eof() + * elif self._started: + * self._on_headers_complete() # <<<<<<<<<<<<<< + * if self._messages: + * return self._messages[-1][0] + */ + __pyx_t_3 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_on_headers_complete(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 489, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":490 + * elif self._started: + * self._on_headers_complete() + * if self._messages: # <<<<<<<<<<<<<< + * return self._messages[-1][0] + * + */ + __pyx_t_2 = (__pyx_v_self->_messages != Py_None)&&(PyList_GET_SIZE(__pyx_v_self->_messages) != 0); + if (__pyx_t_2) { + + /* "aiohttp/_http_parser.pyx":491 + * self._on_headers_complete() + * if self._messages: + * return self._messages[-1][0] # <<<<<<<<<<<<<< + * + * def feed_data(self, data): + */ + __Pyx_XDECREF(__pyx_r); + if (unlikely(__pyx_v_self->_messages == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 491, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_GetItemInt_List(__pyx_v_self->_messages, -1L, long, 1, __Pyx_PyInt_From_long, 1, 1, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 491, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_GetItemInt(__pyx_t_3, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 491, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":490 + * elif self._started: + * self._on_headers_complete() + * if self._messages: # <<<<<<<<<<<<<< + * return self._messages[-1][0] + * + */ + } + + /* "aiohttp/_http_parser.pyx":488 + * else: + * self._payload.feed_eof() + * elif self._started: # <<<<<<<<<<<<<< + * self._on_headers_complete() + * if self._messages: + */ + } + __pyx_L3:; + + /* "aiohttp/_http_parser.pyx":472 + * ### Public API ### + * + * def feed_eof(self): # <<<<<<<<<<<<<< + * cdef bytes desc + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.feed_eof", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_desc); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":493 + * return self._messages[-1][0] + * + * def feed_data(self, data): # <<<<<<<<<<<<<< + * cdef: + * size_t data_len + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_7feed_data(PyObject *__pyx_v_self, PyObject *__pyx_v_data); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_7feed_data(PyObject *__pyx_v_self, PyObject *__pyx_v_data) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("feed_data (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), ((PyObject *)__pyx_v_data)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_data) { + size_t __pyx_v_data_len; + size_t __pyx_v_nb; + PyObject *__pyx_v_ex = NULL; + PyObject *__pyx_v_messages = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + __Pyx_RefNannySetupContext("feed_data", 0); + + /* "aiohttp/_http_parser.pyx":498 + * size_t nb + * + * PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) # <<<<<<<<<<<<<< + * data_len = self.py_buf.len + * + */ + __pyx_t_1 = PyObject_GetBuffer(__pyx_v_data, (&__pyx_v_self->py_buf), PyBUF_SIMPLE); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 498, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":499 + * + * PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) + * data_len = self.py_buf.len # <<<<<<<<<<<<<< + * + * nb = cparser.http_parser_execute( + */ + __pyx_v_data_len = ((size_t)__pyx_v_self->py_buf.len); + + /* "aiohttp/_http_parser.pyx":501 + * data_len = self.py_buf.len + * + * nb = cparser.http_parser_execute( # <<<<<<<<<<<<<< + * self._cparser, + * self._csettings, + */ + __pyx_v_nb = http_parser_execute(__pyx_v_self->_cparser, __pyx_v_self->_csettings, ((char *)__pyx_v_self->py_buf.buf), __pyx_v_data_len); + + /* "aiohttp/_http_parser.pyx":507 + * data_len) + * + * PyBuffer_Release(&self.py_buf) # <<<<<<<<<<<<<< + * + * # i am not sure about cparser.HPE_INVALID_METHOD, + */ + PyBuffer_Release((&__pyx_v_self->py_buf)); + + /* "aiohttp/_http_parser.pyx":512 + * # seems get err for valid request + * # test_client_functional.py::test_post_data_with_bytesio_file + * if (self._cparser.http_errno != cparser.HPE_OK and # <<<<<<<<<<<<<< + * (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or + * self._cparser.method == 0)): + */ + __pyx_t_3 = ((__pyx_v_self->_cparser->http_errno != HPE_OK) != 0); + if (__pyx_t_3) { + } else { + __pyx_t_2 = __pyx_t_3; + goto __pyx_L4_bool_binop_done; + } + + /* "aiohttp/_http_parser.pyx":513 + * # test_client_functional.py::test_post_data_with_bytesio_file + * if (self._cparser.http_errno != cparser.HPE_OK and + * (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or # <<<<<<<<<<<<<< + * self._cparser.method == 0)): + * if self._payload_error == 0: + */ + __pyx_t_3 = ((__pyx_v_self->_cparser->http_errno != HPE_INVALID_METHOD) != 0); + if (!__pyx_t_3) { + } else { + __pyx_t_2 = __pyx_t_3; + goto __pyx_L4_bool_binop_done; + } + + /* "aiohttp/_http_parser.pyx":514 + * if (self._cparser.http_errno != cparser.HPE_OK and + * (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or + * self._cparser.method == 0)): # <<<<<<<<<<<<<< + * if self._payload_error == 0: + * if self._last_error is not None: + */ + __pyx_t_3 = ((__pyx_v_self->_cparser->method == 0) != 0); + __pyx_t_2 = __pyx_t_3; + __pyx_L4_bool_binop_done:; + + /* "aiohttp/_http_parser.pyx":512 + * # seems get err for valid request + * # test_client_functional.py::test_post_data_with_bytesio_file + * if (self._cparser.http_errno != cparser.HPE_OK and # <<<<<<<<<<<<<< + * (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or + * self._cparser.method == 0)): + */ + if (__pyx_t_2) { + + /* "aiohttp/_http_parser.pyx":515 + * (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or + * self._cparser.method == 0)): + * if self._payload_error == 0: # <<<<<<<<<<<<<< + * if self._last_error is not None: + * ex = self._last_error + */ + __pyx_t_2 = ((__pyx_v_self->_payload_error == 0) != 0); + if (__pyx_t_2) { + + /* "aiohttp/_http_parser.pyx":516 + * self._cparser.method == 0)): + * if self._payload_error == 0: + * if self._last_error is not None: # <<<<<<<<<<<<<< + * ex = self._last_error + * self._last_error = None + */ + __pyx_t_2 = (__pyx_v_self->_last_error != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "aiohttp/_http_parser.pyx":517 + * if self._payload_error == 0: + * if self._last_error is not None: + * ex = self._last_error # <<<<<<<<<<<<<< + * self._last_error = None + * else: + */ + __pyx_t_4 = __pyx_v_self->_last_error; + __Pyx_INCREF(__pyx_t_4); + __pyx_v_ex = __pyx_t_4; + __pyx_t_4 = 0; + + /* "aiohttp/_http_parser.pyx":518 + * if self._last_error is not None: + * ex = self._last_error + * self._last_error = None # <<<<<<<<<<<<<< + * else: + * ex = parser_error_from_errno( + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_last_error); + __Pyx_DECREF(__pyx_v_self->_last_error); + __pyx_v_self->_last_error = Py_None; + + /* "aiohttp/_http_parser.pyx":516 + * self._cparser.method == 0)): + * if self._payload_error == 0: + * if self._last_error is not None: # <<<<<<<<<<<<<< + * ex = self._last_error + * self._last_error = None + */ + goto __pyx_L8; + } + + /* "aiohttp/_http_parser.pyx":520 + * self._last_error = None + * else: + * ex = parser_error_from_errno( # <<<<<<<<<<<<<< + * self._cparser.http_errno) + * self._payload = None + */ + /*else*/ { + + /* "aiohttp/_http_parser.pyx":521 + * else: + * ex = parser_error_from_errno( + * self._cparser.http_errno) # <<<<<<<<<<<<<< + * self._payload = None + * raise ex + */ + __pyx_t_4 = __pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(((enum http_errno)__pyx_v_self->_cparser->http_errno)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 520, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_v_ex = __pyx_t_4; + __pyx_t_4 = 0; + } + __pyx_L8:; + + /* "aiohttp/_http_parser.pyx":522 + * ex = parser_error_from_errno( + * self._cparser.http_errno) + * self._payload = None # <<<<<<<<<<<<<< + * raise ex + * + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_payload); + __Pyx_DECREF(__pyx_v_self->_payload); + __pyx_v_self->_payload = Py_None; + + /* "aiohttp/_http_parser.pyx":523 + * self._cparser.http_errno) + * self._payload = None + * raise ex # <<<<<<<<<<<<<< + * + * if self._messages: + */ + __Pyx_Raise(__pyx_v_ex, 0, 0, 0); + __PYX_ERR(0, 523, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":515 + * (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or + * self._cparser.method == 0)): + * if self._payload_error == 0: # <<<<<<<<<<<<<< + * if self._last_error is not None: + * ex = self._last_error + */ + } + + /* "aiohttp/_http_parser.pyx":512 + * # seems get err for valid request + * # test_client_functional.py::test_post_data_with_bytesio_file + * if (self._cparser.http_errno != cparser.HPE_OK and # <<<<<<<<<<<<<< + * (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or + * self._cparser.method == 0)): + */ + } + + /* "aiohttp/_http_parser.pyx":525 + * raise ex + * + * if self._messages: # <<<<<<<<<<<<<< + * messages = self._messages + * self._messages = [] + */ + __pyx_t_3 = (__pyx_v_self->_messages != Py_None)&&(PyList_GET_SIZE(__pyx_v_self->_messages) != 0); + if (__pyx_t_3) { + + /* "aiohttp/_http_parser.pyx":526 + * + * if self._messages: + * messages = self._messages # <<<<<<<<<<<<<< + * self._messages = [] + * else: + */ + __pyx_t_4 = __pyx_v_self->_messages; + __Pyx_INCREF(__pyx_t_4); + __pyx_v_messages = __pyx_t_4; + __pyx_t_4 = 0; + + /* "aiohttp/_http_parser.pyx":527 + * if self._messages: + * messages = self._messages + * self._messages = [] # <<<<<<<<<<<<<< + * else: + * messages = () + */ + __pyx_t_4 = PyList_New(0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 527, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_v_self->_messages); + __Pyx_DECREF(__pyx_v_self->_messages); + __pyx_v_self->_messages = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + + /* "aiohttp/_http_parser.pyx":525 + * raise ex + * + * if self._messages: # <<<<<<<<<<<<<< + * messages = self._messages + * self._messages = [] + */ + goto __pyx_L9; + } + + /* "aiohttp/_http_parser.pyx":529 + * self._messages = [] + * else: + * messages = () # <<<<<<<<<<<<<< + * + * if self._upgraded: + */ + /*else*/ { + __Pyx_INCREF(__pyx_empty_tuple); + __pyx_v_messages = __pyx_empty_tuple; + } + __pyx_L9:; + + /* "aiohttp/_http_parser.pyx":531 + * messages = () + * + * if self._upgraded: # <<<<<<<<<<<<<< + * return messages, True, data[nb:] + * else: + */ + __pyx_t_3 = (__pyx_v_self->_upgraded != 0); + if (__pyx_t_3) { + + /* "aiohttp/_http_parser.pyx":532 + * + * if self._upgraded: + * return messages, True, data[nb:] # <<<<<<<<<<<<<< + * else: + * return messages, False, b'' + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyObject_GetSlice(__pyx_v_data, __pyx_v_nb, 0, NULL, NULL, NULL, 1, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 532, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 532, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_v_messages); + __Pyx_GIVEREF(__pyx_v_messages); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_messages); + __Pyx_INCREF(Py_True); + __Pyx_GIVEREF(Py_True); + PyTuple_SET_ITEM(__pyx_t_5, 1, Py_True); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_t_4); + __pyx_t_4 = 0; + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":531 + * messages = () + * + * if self._upgraded: # <<<<<<<<<<<<<< + * return messages, True, data[nb:] + * else: + */ + } + + /* "aiohttp/_http_parser.pyx":534 + * return messages, True, data[nb:] + * else: + * return messages, False, b'' # <<<<<<<<<<<<<< + * + * + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 534, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_v_messages); + __Pyx_GIVEREF(__pyx_v_messages); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_messages); + __Pyx_INCREF(Py_False); + __Pyx_GIVEREF(Py_False); + PyTuple_SET_ITEM(__pyx_t_5, 1, Py_False); + __Pyx_INCREF(__pyx_kp_b__4); + __Pyx_GIVEREF(__pyx_kp_b__4); + PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_kp_b__4); + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L0; + } + + /* "aiohttp/_http_parser.pyx":493 + * return self._messages[-1][0] + * + * def feed_data(self, data): # <<<<<<<<<<<<<< + * cdef: + * size_t data_len + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.feed_data", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_XDECREF(__pyx_v_messages); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_9__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_9__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser_8__reduce_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_8__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 2, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_11__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_11__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser_10__setstate_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_10__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":4 + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 4, __pyx_L1_error) + + /* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":539 + * cdef class HttpRequestParser(HttpParser): + * + * def __init__(self, protocol, loop, timer=None, # <<<<<<<<<<<<<< + * size_t max_line_size=8190, size_t max_headers=32768, + * size_t max_field_size=8190, payload_exception=None, + */ + +/* Python wrapper */ +static int __pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_protocol = 0; + PyObject *__pyx_v_loop = 0; + PyObject *__pyx_v_timer = 0; + size_t __pyx_v_max_line_size; + size_t __pyx_v_max_headers; + size_t __pyx_v_max_field_size; + PyObject *__pyx_v_payload_exception = 0; + int __pyx_v_response_with_body; + CYTHON_UNUSED int __pyx_v_read_until_eof; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_protocol,&__pyx_n_s_loop,&__pyx_n_s_timer,&__pyx_n_s_max_line_size,&__pyx_n_s_max_headers,&__pyx_n_s_max_field_size,&__pyx_n_s_payload_exception,&__pyx_n_s_response_with_body,&__pyx_n_s_read_until_eof,0}; + PyObject* values[9] = {0,0,0,0,0,0,0,0,0}; + values[2] = ((PyObject *)Py_None); + + /* "aiohttp/_http_parser.pyx":541 + * def __init__(self, protocol, loop, timer=None, + * size_t max_line_size=8190, size_t max_headers=32768, + * size_t max_field_size=8190, payload_exception=None, # <<<<<<<<<<<<<< + * bint response_with_body=True, bint read_until_eof=False): + * self._init(cparser.HTTP_REQUEST, protocol, loop, timer, + */ + values[6] = ((PyObject *)Py_None); + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + CYTHON_FALLTHROUGH; + case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + CYTHON_FALLTHROUGH; + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_protocol)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_loop)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 0, 2, 9, 1); __PYX_ERR(0, 539, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_timer); + if (value) { values[2] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 3: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_line_size); + if (value) { values[3] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 4: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_headers); + if (value) { values[4] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 5: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_field_size); + if (value) { values[5] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 6: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_payload_exception); + if (value) { values[6] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 7: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_response_with_body); + if (value) { values[7] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 8: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_read_until_eof); + if (value) { values[8] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 539, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + CYTHON_FALLTHROUGH; + case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + CYTHON_FALLTHROUGH; + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_protocol = values[0]; + __pyx_v_loop = values[1]; + __pyx_v_timer = values[2]; + if (values[3]) { + __pyx_v_max_line_size = __Pyx_PyInt_As_size_t(values[3]); if (unlikely((__pyx_v_max_line_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 540, __pyx_L3_error) + } else { + __pyx_v_max_line_size = ((size_t)0x1FFE); + } + if (values[4]) { + __pyx_v_max_headers = __Pyx_PyInt_As_size_t(values[4]); if (unlikely((__pyx_v_max_headers == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 540, __pyx_L3_error) + } else { + __pyx_v_max_headers = ((size_t)0x8000); + } + if (values[5]) { + __pyx_v_max_field_size = __Pyx_PyInt_As_size_t(values[5]); if (unlikely((__pyx_v_max_field_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 541, __pyx_L3_error) + } else { + __pyx_v_max_field_size = ((size_t)0x1FFE); + } + __pyx_v_payload_exception = values[6]; + if (values[7]) { + __pyx_v_response_with_body = __Pyx_PyObject_IsTrue(values[7]); if (unlikely((__pyx_v_response_with_body == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 542, __pyx_L3_error) + } else { + + /* "aiohttp/_http_parser.pyx":542 + * size_t max_line_size=8190, size_t max_headers=32768, + * size_t max_field_size=8190, payload_exception=None, + * bint response_with_body=True, bint read_until_eof=False): # <<<<<<<<<<<<<< + * self._init(cparser.HTTP_REQUEST, protocol, loop, timer, + * max_line_size, max_headers, max_field_size, + */ + __pyx_v_response_with_body = ((int)1); + } + if (values[8]) { + __pyx_v_read_until_eof = __Pyx_PyObject_IsTrue(values[8]); if (unlikely((__pyx_v_read_until_eof == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 542, __pyx_L3_error) + } else { + __pyx_v_read_until_eof = ((int)0); + } + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 0, 2, 9, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 539, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._http_parser.HttpRequestParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser___init__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *)__pyx_v_self), __pyx_v_protocol, __pyx_v_loop, __pyx_v_timer, __pyx_v_max_line_size, __pyx_v_max_headers, __pyx_v_max_field_size, __pyx_v_payload_exception, __pyx_v_response_with_body, __pyx_v_read_until_eof); + + /* "aiohttp/_http_parser.pyx":539 + * cdef class HttpRequestParser(HttpParser): + * + * def __init__(self, protocol, loop, timer=None, # <<<<<<<<<<<<<< + * size_t max_line_size=8190, size_t max_headers=32768, + * size_t max_field_size=8190, payload_exception=None, + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser___init__(struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *__pyx_v_self, PyObject *__pyx_v_protocol, PyObject *__pyx_v_loop, PyObject *__pyx_v_timer, size_t __pyx_v_max_line_size, size_t __pyx_v_max_headers, size_t __pyx_v_max_field_size, PyObject *__pyx_v_payload_exception, int __pyx_v_response_with_body, CYTHON_UNUSED int __pyx_v_read_until_eof) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init __pyx_t_2; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "aiohttp/_http_parser.pyx":543 + * size_t max_field_size=8190, payload_exception=None, + * bint response_with_body=True, bint read_until_eof=False): + * self._init(cparser.HTTP_REQUEST, protocol, loop, timer, # <<<<<<<<<<<<<< + * max_line_size, max_headers, max_field_size, + * payload_exception, response_with_body) + */ + __pyx_t_2.__pyx_n = 6; + __pyx_t_2.timer = __pyx_v_timer; + __pyx_t_2.max_line_size = __pyx_v_max_line_size; + __pyx_t_2.max_headers = __pyx_v_max_headers; + __pyx_t_2.max_field_size = __pyx_v_max_field_size; + __pyx_t_2.payload_exception = __pyx_v_payload_exception; + __pyx_t_2.response_with_body = __pyx_v_response_with_body; + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParser *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._init(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), HTTP_REQUEST, __pyx_v_protocol, __pyx_v_loop, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 543, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":539 + * cdef class HttpRequestParser(HttpParser): + * + * def __init__(self, protocol, loop, timer=None, # <<<<<<<<<<<<<< + * size_t max_line_size=8190, size_t max_headers=32768, + * size_t max_field_size=8190, payload_exception=None, + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser.HttpRequestParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":547 + * payload_exception, response_with_body) + * + * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< + * cdef Py_buffer py_buf + * if not self._buf: + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *__pyx_v_self) { + Py_buffer __pyx_v_py_buf; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + int __pyx_t_7; + char const *__pyx_t_8; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + __Pyx_RefNannySetupContext("_on_status_complete", 0); + + /* "aiohttp/_http_parser.pyx":549 + * cdef object _on_status_complete(self): + * cdef Py_buffer py_buf + * if not self._buf: # <<<<<<<<<<<<<< + * return + * self._path = self._buf.decode('utf-8', 'surrogateescape') + */ + __pyx_t_1 = (__pyx_v_self->__pyx_base._buf != Py_None)&&(PyByteArray_GET_SIZE(__pyx_v_self->__pyx_base._buf) != 0); + __pyx_t_2 = ((!__pyx_t_1) != 0); + if (__pyx_t_2) { + + /* "aiohttp/_http_parser.pyx":550 + * cdef Py_buffer py_buf + * if not self._buf: + * return # <<<<<<<<<<<<<< + * self._path = self._buf.decode('utf-8', 'surrogateescape') + * if self._cparser.method == 5: # CONNECT + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":549 + * cdef object _on_status_complete(self): + * cdef Py_buffer py_buf + * if not self._buf: # <<<<<<<<<<<<<< + * return + * self._path = self._buf.decode('utf-8', 'surrogateescape') + */ + } + + /* "aiohttp/_http_parser.pyx":551 + * if not self._buf: + * return + * self._path = self._buf.decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< + * if self._cparser.method == 5: # CONNECT + * self._url = URL(self._path) + */ + if (unlikely(__pyx_v_self->__pyx_base._buf == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "decode"); + __PYX_ERR(0, 551, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_decode_bytearray(__pyx_v_self->__pyx_base._buf, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 551, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_v_self->__pyx_base._path); + __Pyx_DECREF(__pyx_v_self->__pyx_base._path); + __pyx_v_self->__pyx_base._path = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":552 + * return + * self._path = self._buf.decode('utf-8', 'surrogateescape') + * if self._cparser.method == 5: # CONNECT # <<<<<<<<<<<<<< + * self._url = URL(self._path) + * else: + */ + __pyx_t_2 = ((__pyx_v_self->__pyx_base._cparser->method == 5) != 0); + if (__pyx_t_2) { + + /* "aiohttp/_http_parser.pyx":553 + * self._path = self._buf.decode('utf-8', 'surrogateescape') + * if self._cparser.method == 5: # CONNECT + * self._url = URL(self._path) # <<<<<<<<<<<<<< + * else: + * PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) + */ + __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_URL); + __pyx_t_4 = __pyx_v_7aiohttp_12_http_parser_URL; __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_5, __pyx_v_self->__pyx_base._path) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_v_self->__pyx_base._path); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 553, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GIVEREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_v_self->__pyx_base._url); + __Pyx_DECREF(__pyx_v_self->__pyx_base._url); + __pyx_v_self->__pyx_base._url = __pyx_t_3; + __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":552 + * return + * self._path = self._buf.decode('utf-8', 'surrogateescape') + * if self._cparser.method == 5: # CONNECT # <<<<<<<<<<<<<< + * self._url = URL(self._path) + * else: + */ + goto __pyx_L4; + } + + /* "aiohttp/_http_parser.pyx":555 + * self._url = URL(self._path) + * else: + * PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) # <<<<<<<<<<<<<< + * try: + * self._url = _parse_url(py_buf.buf, + */ + /*else*/ { + __pyx_t_3 = __pyx_v_self->__pyx_base._buf; + __Pyx_INCREF(__pyx_t_3); + __pyx_t_6 = PyObject_GetBuffer(__pyx_t_3, (&__pyx_v_py_buf), PyBUF_SIMPLE); if (unlikely(__pyx_t_6 == ((int)-1))) __PYX_ERR(0, 555, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":556 + * else: + * PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) + * try: # <<<<<<<<<<<<<< + * self._url = _parse_url(py_buf.buf, + * py_buf.len) + */ + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":557 + * PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) + * try: + * self._url = _parse_url(py_buf.buf, # <<<<<<<<<<<<<< + * py_buf.len) + * finally: + */ + __pyx_t_3 = __pyx_f_7aiohttp_12_http_parser__parse_url(((char *)__pyx_v_py_buf.buf), __pyx_v_py_buf.len); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 557, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_v_self->__pyx_base._url); + __Pyx_DECREF(__pyx_v_self->__pyx_base._url); + __pyx_v_self->__pyx_base._url = __pyx_t_3; + __pyx_t_3 = 0; + } + + /* "aiohttp/_http_parser.pyx":560 + * py_buf.len) + * finally: + * PyBuffer_Release(&py_buf) # <<<<<<<<<<<<<< + * PyByteArray_Resize(self._buf, 0) + * + */ + /*finally:*/ { + /*normal exit:*/{ + PyBuffer_Release((&__pyx_v_py_buf)); + goto __pyx_L7; + } + __pyx_L6_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_9 = 0; __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_12, &__pyx_t_13, &__pyx_t_14); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_9, &__pyx_t_10, &__pyx_t_11) < 0)) __Pyx_ErrFetch(&__pyx_t_9, &__pyx_t_10, &__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_9); + __Pyx_XGOTREF(__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_14); + __pyx_t_6 = __pyx_lineno; __pyx_t_7 = __pyx_clineno; __pyx_t_8 = __pyx_filename; + { + PyBuffer_Release((&__pyx_v_py_buf)); + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_ExceptionReset(__pyx_t_12, __pyx_t_13, __pyx_t_14); + } + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_ErrRestore(__pyx_t_9, __pyx_t_10, __pyx_t_11); + __pyx_t_9 = 0; __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; + __pyx_lineno = __pyx_t_6; __pyx_clineno = __pyx_t_7; __pyx_filename = __pyx_t_8; + goto __pyx_L1_error; + } + __pyx_L7:; + } + } + __pyx_L4:; + + /* "aiohttp/_http_parser.pyx":561 + * finally: + * PyBuffer_Release(&py_buf) + * PyByteArray_Resize(self._buf, 0) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_3 = __pyx_v_self->__pyx_base._buf; + __Pyx_INCREF(__pyx_t_3); + __pyx_t_7 = PyByteArray_Resize(__pyx_t_3, 0); if (unlikely(__pyx_t_7 == ((int)-1))) __PYX_ERR(0, 561, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":547 + * payload_exception, response_with_body) + * + * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< + * cdef Py_buffer py_buf + * if not self._buf: + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("aiohttp._http_parser.HttpRequestParser._on_status_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser_2__reduce_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser_2__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 2, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser.HttpRequestParser.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser_4__setstate_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser_4__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":4 + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 4, __pyx_L1_error) + + /* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser.HttpRequestParser.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":566 + * cdef class HttpResponseParser(HttpParser): + * + * def __init__(self, protocol, loop, timer=None, # <<<<<<<<<<<<<< + * size_t max_line_size=8190, size_t max_headers=32768, + * size_t max_field_size=8190, payload_exception=None, + */ + +/* Python wrapper */ +static int __pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_protocol = 0; + PyObject *__pyx_v_loop = 0; + PyObject *__pyx_v_timer = 0; + size_t __pyx_v_max_line_size; + size_t __pyx_v_max_headers; + size_t __pyx_v_max_field_size; + PyObject *__pyx_v_payload_exception = 0; + int __pyx_v_response_with_body; + CYTHON_UNUSED int __pyx_v_read_until_eof; + int __pyx_v_auto_decompress; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_protocol,&__pyx_n_s_loop,&__pyx_n_s_timer,&__pyx_n_s_max_line_size,&__pyx_n_s_max_headers,&__pyx_n_s_max_field_size,&__pyx_n_s_payload_exception,&__pyx_n_s_response_with_body,&__pyx_n_s_read_until_eof,&__pyx_n_s_auto_decompress,0}; + PyObject* values[10] = {0,0,0,0,0,0,0,0,0,0}; + values[2] = ((PyObject *)Py_None); + + /* "aiohttp/_http_parser.pyx":568 + * def __init__(self, protocol, loop, timer=None, + * size_t max_line_size=8190, size_t max_headers=32768, + * size_t max_field_size=8190, payload_exception=None, # <<<<<<<<<<<<<< + * bint response_with_body=True, bint read_until_eof=False, + * bint auto_decompress=True): + */ + values[6] = ((PyObject *)Py_None); + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + CYTHON_FALLTHROUGH; + case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + CYTHON_FALLTHROUGH; + case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + CYTHON_FALLTHROUGH; + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_protocol)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_loop)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__init__", 0, 2, 10, 1); __PYX_ERR(0, 566, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_timer); + if (value) { values[2] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 3: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_line_size); + if (value) { values[3] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 4: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_headers); + if (value) { values[4] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 5: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_field_size); + if (value) { values[5] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 6: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_payload_exception); + if (value) { values[6] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 7: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_response_with_body); + if (value) { values[7] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 8: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_read_until_eof); + if (value) { values[8] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 9: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_auto_decompress); + if (value) { values[9] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 566, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + CYTHON_FALLTHROUGH; + case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + CYTHON_FALLTHROUGH; + case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + CYTHON_FALLTHROUGH; + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_protocol = values[0]; + __pyx_v_loop = values[1]; + __pyx_v_timer = values[2]; + if (values[3]) { + __pyx_v_max_line_size = __Pyx_PyInt_As_size_t(values[3]); if (unlikely((__pyx_v_max_line_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 567, __pyx_L3_error) + } else { + __pyx_v_max_line_size = ((size_t)0x1FFE); + } + if (values[4]) { + __pyx_v_max_headers = __Pyx_PyInt_As_size_t(values[4]); if (unlikely((__pyx_v_max_headers == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 567, __pyx_L3_error) + } else { + __pyx_v_max_headers = ((size_t)0x8000); + } + if (values[5]) { + __pyx_v_max_field_size = __Pyx_PyInt_As_size_t(values[5]); if (unlikely((__pyx_v_max_field_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 568, __pyx_L3_error) + } else { + __pyx_v_max_field_size = ((size_t)0x1FFE); + } + __pyx_v_payload_exception = values[6]; + if (values[7]) { + __pyx_v_response_with_body = __Pyx_PyObject_IsTrue(values[7]); if (unlikely((__pyx_v_response_with_body == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 569, __pyx_L3_error) + } else { + + /* "aiohttp/_http_parser.pyx":569 + * size_t max_line_size=8190, size_t max_headers=32768, + * size_t max_field_size=8190, payload_exception=None, + * bint response_with_body=True, bint read_until_eof=False, # <<<<<<<<<<<<<< + * bint auto_decompress=True): + * self._init(cparser.HTTP_RESPONSE, protocol, loop, timer, + */ + __pyx_v_response_with_body = ((int)1); + } + if (values[8]) { + __pyx_v_read_until_eof = __Pyx_PyObject_IsTrue(values[8]); if (unlikely((__pyx_v_read_until_eof == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 569, __pyx_L3_error) + } else { + __pyx_v_read_until_eof = ((int)0); + } + if (values[9]) { + __pyx_v_auto_decompress = __Pyx_PyObject_IsTrue(values[9]); if (unlikely((__pyx_v_auto_decompress == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 570, __pyx_L3_error) + } else { + + /* "aiohttp/_http_parser.pyx":570 + * size_t max_field_size=8190, payload_exception=None, + * bint response_with_body=True, bint read_until_eof=False, + * bint auto_decompress=True): # <<<<<<<<<<<<<< + * self._init(cparser.HTTP_RESPONSE, protocol, loop, timer, + * max_line_size, max_headers, max_field_size, + */ + __pyx_v_auto_decompress = ((int)1); + } + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 0, 2, 10, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 566, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._http_parser.HttpResponseParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser___init__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *)__pyx_v_self), __pyx_v_protocol, __pyx_v_loop, __pyx_v_timer, __pyx_v_max_line_size, __pyx_v_max_headers, __pyx_v_max_field_size, __pyx_v_payload_exception, __pyx_v_response_with_body, __pyx_v_read_until_eof, __pyx_v_auto_decompress); + + /* "aiohttp/_http_parser.pyx":566 + * cdef class HttpResponseParser(HttpParser): + * + * def __init__(self, protocol, loop, timer=None, # <<<<<<<<<<<<<< + * size_t max_line_size=8190, size_t max_headers=32768, + * size_t max_field_size=8190, payload_exception=None, + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser___init__(struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *__pyx_v_self, PyObject *__pyx_v_protocol, PyObject *__pyx_v_loop, PyObject *__pyx_v_timer, size_t __pyx_v_max_line_size, size_t __pyx_v_max_headers, size_t __pyx_v_max_field_size, PyObject *__pyx_v_payload_exception, int __pyx_v_response_with_body, CYTHON_UNUSED int __pyx_v_read_until_eof, int __pyx_v_auto_decompress) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init __pyx_t_2; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "aiohttp/_http_parser.pyx":571 + * bint response_with_body=True, bint read_until_eof=False, + * bint auto_decompress=True): + * self._init(cparser.HTTP_RESPONSE, protocol, loop, timer, # <<<<<<<<<<<<<< + * max_line_size, max_headers, max_field_size, + * payload_exception, response_with_body, auto_decompress) + */ + __pyx_t_2.__pyx_n = 7; + __pyx_t_2.timer = __pyx_v_timer; + __pyx_t_2.max_line_size = __pyx_v_max_line_size; + __pyx_t_2.max_headers = __pyx_v_max_headers; + __pyx_t_2.max_field_size = __pyx_v_max_field_size; + __pyx_t_2.payload_exception = __pyx_v_payload_exception; + __pyx_t_2.response_with_body = __pyx_v_response_with_body; + __pyx_t_2.auto_decompress = __pyx_v_auto_decompress; + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpResponseParser *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._init(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), HTTP_RESPONSE, __pyx_v_protocol, __pyx_v_loop, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 571, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":566 + * cdef class HttpResponseParser(HttpParser): + * + * def __init__(self, protocol, loop, timer=None, # <<<<<<<<<<<<<< + * size_t max_line_size=8190, size_t max_headers=32768, + * size_t max_field_size=8190, payload_exception=None, + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser.HttpResponseParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":575 + * payload_exception, response_with_body, auto_decompress) + * + * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< + * if self._buf: + * self._reason = self._buf.decode('utf-8', 'surrogateescape') + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + __Pyx_RefNannySetupContext("_on_status_complete", 0); + + /* "aiohttp/_http_parser.pyx":576 + * + * cdef object _on_status_complete(self): + * if self._buf: # <<<<<<<<<<<<<< + * self._reason = self._buf.decode('utf-8', 'surrogateescape') + * PyByteArray_Resize(self._buf, 0) + */ + __pyx_t_1 = (__pyx_v_self->__pyx_base._buf != Py_None)&&(PyByteArray_GET_SIZE(__pyx_v_self->__pyx_base._buf) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_parser.pyx":577 + * cdef object _on_status_complete(self): + * if self._buf: + * self._reason = self._buf.decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< + * PyByteArray_Resize(self._buf, 0) + * + */ + if (unlikely(__pyx_v_self->__pyx_base._buf == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "decode"); + __PYX_ERR(0, 577, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_decode_bytearray(__pyx_v_self->__pyx_base._buf, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 577, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GIVEREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_v_self->__pyx_base._reason); + __Pyx_DECREF(__pyx_v_self->__pyx_base._reason); + __pyx_v_self->__pyx_base._reason = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":578 + * if self._buf: + * self._reason = self._buf.decode('utf-8', 'surrogateescape') + * PyByteArray_Resize(self._buf, 0) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_2 = __pyx_v_self->__pyx_base._buf; + __Pyx_INCREF(__pyx_t_2); + __pyx_t_3 = PyByteArray_Resize(__pyx_t_2, 0); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 578, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":576 + * + * cdef object _on_status_complete(self): + * if self._buf: # <<<<<<<<<<<<<< + * self._reason = self._buf.decode('utf-8', 'surrogateescape') + * PyByteArray_Resize(self._buf, 0) + */ + } + + /* "aiohttp/_http_parser.pyx":575 + * payload_exception, response_with_body, auto_decompress) + * + * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< + * if self._buf: + * self._reason = self._buf.decode('utf-8', 'surrogateescape') + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("aiohttp._http_parser.HttpResponseParser._on_status_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser_2__reduce_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser_2__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__9, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 2, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser.HttpResponseParser.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser_4__setstate_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser_4__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":4 + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__10, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 4, __pyx_L1_error) + + /* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("aiohttp._http_parser.HttpResponseParser.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":581 + * + * + * cdef int cb_on_message_begin(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * + */ + +static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(struct http_parser *__pyx_v_parser) { + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + __Pyx_RefNannySetupContext("cb_on_message_begin", 0); + + /* "aiohttp/_http_parser.pyx":582 + * + * cdef int cb_on_message_begin(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * + * pyparser._started = True + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":584 + * cdef HttpParser pyparser = parser.data + * + * pyparser._started = True # <<<<<<<<<<<<<< + * pyparser._headers = CIMultiDict() + * pyparser._raw_headers = [] + */ + __pyx_v_pyparser->_started = 1; + + /* "aiohttp/_http_parser.pyx":585 + * + * pyparser._started = True + * pyparser._headers = CIMultiDict() # <<<<<<<<<<<<<< + * pyparser._raw_headers = [] + * PyByteArray_Resize(pyparser._buf, 0) + */ + __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_CIMultiDict); + __pyx_t_2 = __pyx_v_7aiohttp_12_http_parser_CIMultiDict; __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 585, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_pyparser->_headers); + __Pyx_DECREF(__pyx_v_pyparser->_headers); + __pyx_v_pyparser->_headers = __pyx_t_1; + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":586 + * pyparser._started = True + * pyparser._headers = CIMultiDict() + * pyparser._raw_headers = [] # <<<<<<<<<<<<<< + * PyByteArray_Resize(pyparser._buf, 0) + * pyparser._path = None + */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 586, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_pyparser->_raw_headers); + __Pyx_DECREF(__pyx_v_pyparser->_raw_headers); + __pyx_v_pyparser->_raw_headers = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":587 + * pyparser._headers = CIMultiDict() + * pyparser._raw_headers = [] + * PyByteArray_Resize(pyparser._buf, 0) # <<<<<<<<<<<<<< + * pyparser._path = None + * pyparser._reason = None + */ + __pyx_t_1 = __pyx_v_pyparser->_buf; + __Pyx_INCREF(__pyx_t_1); + __pyx_t_4 = PyByteArray_Resize(__pyx_t_1, 0); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 587, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":588 + * pyparser._raw_headers = [] + * PyByteArray_Resize(pyparser._buf, 0) + * pyparser._path = None # <<<<<<<<<<<<<< + * pyparser._reason = None + * return 0 + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_pyparser->_path); + __Pyx_DECREF(__pyx_v_pyparser->_path); + __pyx_v_pyparser->_path = ((PyObject*)Py_None); + + /* "aiohttp/_http_parser.pyx":589 + * PyByteArray_Resize(pyparser._buf, 0) + * pyparser._path = None + * pyparser._reason = None # <<<<<<<<<<<<<< + * return 0 + * + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_pyparser->_reason); + __Pyx_DECREF(__pyx_v_pyparser->_reason); + __pyx_v_pyparser->_reason = ((PyObject*)Py_None); + + /* "aiohttp/_http_parser.pyx":590 + * pyparser._path = None + * pyparser._reason = None + * return 0 # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = 0; + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":581 + * + * + * cdef int cb_on_message_begin(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_message_begin", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":593 + * + * + * cdef int cb_on_url(cparser.http_parser* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + +static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(struct http_parser *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_ex = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + int __pyx_t_10; + PyObject *__pyx_t_11 = NULL; + __Pyx_RefNannySetupContext("cb_on_url", 0); + + /* "aiohttp/_http_parser.pyx":595 + * cdef int cb_on_url(cparser.http_parser* parser, + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * try: + * if length > pyparser._max_line_size: + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":596 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * if length > pyparser._max_line_size: + * raise LineTooLong( + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":597 + * cdef HttpParser pyparser = parser.data + * try: + * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< + * raise LineTooLong( + * 'Status line is too long', pyparser._max_line_size, length) + */ + __pyx_t_5 = ((__pyx_v_length > __pyx_v_pyparser->_max_line_size) != 0); + if (unlikely(__pyx_t_5)) { + + /* "aiohttp/_http_parser.pyx":598 + * try: + * if length > pyparser._max_line_size: + * raise LineTooLong( # <<<<<<<<<<<<<< + * 'Status line is too long', pyparser._max_line_size, length) + * extend(pyparser._buf, at, length) + */ + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 598, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_6); + + /* "aiohttp/_http_parser.pyx":599 + * if length > pyparser._max_line_size: + * raise LineTooLong( + * 'Status line is too long', pyparser._max_line_size, length) # <<<<<<<<<<<<<< + * extend(pyparser._buf, at, length) + * except BaseException as ex: + */ + __pyx_t_7 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_line_size); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 599, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_length); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 599, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[4] = {__pyx_t_9, __pyx_kp_u_Status_line_is_too_long, __pyx_t_7, __pyx_t_8}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 598, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[4] = {__pyx_t_9, __pyx_kp_u_Status_line_is_too_long, __pyx_t_7, __pyx_t_8}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 598, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else + #endif + { + __pyx_t_11 = PyTuple_New(3+__pyx_t_10); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 598, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_11); + if (__pyx_t_9) { + __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_11, 0, __pyx_t_9); __pyx_t_9 = NULL; + } + __Pyx_INCREF(__pyx_kp_u_Status_line_is_too_long); + __Pyx_GIVEREF(__pyx_kp_u_Status_line_is_too_long); + PyTuple_SET_ITEM(__pyx_t_11, 0+__pyx_t_10, __pyx_kp_u_Status_line_is_too_long); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_11, 1+__pyx_t_10, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_11, 2+__pyx_t_10, __pyx_t_8); + __pyx_t_7 = 0; + __pyx_t_8 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_11, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 598, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 598, __pyx_L3_error) + + /* "aiohttp/_http_parser.pyx":597 + * cdef HttpParser pyparser = parser.data + * try: + * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< + * raise LineTooLong( + * 'Status line is too long', pyparser._max_line_size, length) + */ + } + + /* "aiohttp/_http_parser.pyx":600 + * raise LineTooLong( + * 'Status line is too long', pyparser._max_line_size, length) + * extend(pyparser._buf, at, length) # <<<<<<<<<<<<<< + * except BaseException as ex: + * pyparser._last_error = ex + */ + __pyx_t_1 = __pyx_v_pyparser->_buf; + __Pyx_INCREF(__pyx_t_1); + __pyx_t_6 = __pyx_f_7aiohttp_12_http_parser_extend(__pyx_t_1, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 600, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "aiohttp/_http_parser.pyx":596 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * if length > pyparser._max_line_size: + * raise LineTooLong( + */ + } + + /* "aiohttp/_http_parser.pyx":605 + * return -1 + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "aiohttp/_http_parser.pyx":601 + * 'Status line is too long', pyparser._max_line_size, length) + * extend(pyparser._buf, at, length) + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + __pyx_t_10 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_10) { + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_url", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_1, &__pyx_t_11) < 0) __PYX_ERR(0, 601, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_11); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_ex = __pyx_t_1; + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":602 + * extend(pyparser._buf, at, length) + * except BaseException as ex: + * pyparser._last_error = ex # <<<<<<<<<<<<<< + * return -1 + * else: + */ + __Pyx_INCREF(__pyx_v_ex); + __Pyx_GIVEREF(__pyx_v_ex); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_ex; + + /* "aiohttp/_http_parser.pyx":603 + * except BaseException as ex: + * pyparser._last_error = ex + * return -1 # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = -1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + goto __pyx_L14_return; + } + + /* "aiohttp/_http_parser.pyx":601 + * 'Status line is too long', pyparser._max_line_size, length) + * extend(pyparser._buf, at, length) + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + /*finally:*/ { + __pyx_L14_return: { + __pyx_t_10 = __pyx_r; + __Pyx_DECREF(__pyx_v_ex); + __pyx_v_ex = NULL; + __pyx_r = __pyx_t_10; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "aiohttp/_http_parser.pyx":596 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * if length > pyparser._max_line_size: + * raise LineTooLong( + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "aiohttp/_http_parser.pyx":593 + * + * + * cdef int cb_on_url(cparser.http_parser* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_url", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":608 + * + * + * cdef int cb_on_status(cparser.http_parser* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + +static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(struct http_parser *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_ex = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + int __pyx_t_10; + PyObject *__pyx_t_11 = NULL; + __Pyx_RefNannySetupContext("cb_on_status", 0); + + /* "aiohttp/_http_parser.pyx":610 + * cdef int cb_on_status(cparser.http_parser* parser, + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * cdef str reason + * try: + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":612 + * cdef HttpParser pyparser = parser.data + * cdef str reason + * try: # <<<<<<<<<<<<<< + * if length > pyparser._max_line_size: + * raise LineTooLong( + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":613 + * cdef str reason + * try: + * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< + * raise LineTooLong( + * 'Status line is too long', pyparser._max_line_size, length) + */ + __pyx_t_5 = ((__pyx_v_length > __pyx_v_pyparser->_max_line_size) != 0); + if (unlikely(__pyx_t_5)) { + + /* "aiohttp/_http_parser.pyx":614 + * try: + * if length > pyparser._max_line_size: + * raise LineTooLong( # <<<<<<<<<<<<<< + * 'Status line is too long', pyparser._max_line_size, length) + * extend(pyparser._buf, at, length) + */ + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 614, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_6); + + /* "aiohttp/_http_parser.pyx":615 + * if length > pyparser._max_line_size: + * raise LineTooLong( + * 'Status line is too long', pyparser._max_line_size, length) # <<<<<<<<<<<<<< + * extend(pyparser._buf, at, length) + * except BaseException as ex: + */ + __pyx_t_7 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_line_size); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 615, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_length); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 615, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[4] = {__pyx_t_9, __pyx_kp_u_Status_line_is_too_long, __pyx_t_7, __pyx_t_8}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 614, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[4] = {__pyx_t_9, __pyx_kp_u_Status_line_is_too_long, __pyx_t_7, __pyx_t_8}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 614, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else + #endif + { + __pyx_t_11 = PyTuple_New(3+__pyx_t_10); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 614, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_11); + if (__pyx_t_9) { + __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_11, 0, __pyx_t_9); __pyx_t_9 = NULL; + } + __Pyx_INCREF(__pyx_kp_u_Status_line_is_too_long); + __Pyx_GIVEREF(__pyx_kp_u_Status_line_is_too_long); + PyTuple_SET_ITEM(__pyx_t_11, 0+__pyx_t_10, __pyx_kp_u_Status_line_is_too_long); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_11, 1+__pyx_t_10, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_11, 2+__pyx_t_10, __pyx_t_8); + __pyx_t_7 = 0; + __pyx_t_8 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_11, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 614, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 614, __pyx_L3_error) + + /* "aiohttp/_http_parser.pyx":613 + * cdef str reason + * try: + * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< + * raise LineTooLong( + * 'Status line is too long', pyparser._max_line_size, length) + */ + } + + /* "aiohttp/_http_parser.pyx":616 + * raise LineTooLong( + * 'Status line is too long', pyparser._max_line_size, length) + * extend(pyparser._buf, at, length) # <<<<<<<<<<<<<< + * except BaseException as ex: + * pyparser._last_error = ex + */ + __pyx_t_1 = __pyx_v_pyparser->_buf; + __Pyx_INCREF(__pyx_t_1); + __pyx_t_6 = __pyx_f_7aiohttp_12_http_parser_extend(__pyx_t_1, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 616, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "aiohttp/_http_parser.pyx":612 + * cdef HttpParser pyparser = parser.data + * cdef str reason + * try: # <<<<<<<<<<<<<< + * if length > pyparser._max_line_size: + * raise LineTooLong( + */ + } + + /* "aiohttp/_http_parser.pyx":621 + * return -1 + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "aiohttp/_http_parser.pyx":617 + * 'Status line is too long', pyparser._max_line_size, length) + * extend(pyparser._buf, at, length) + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + __pyx_t_10 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_10) { + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_status", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_1, &__pyx_t_11) < 0) __PYX_ERR(0, 617, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_11); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_ex = __pyx_t_1; + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":618 + * extend(pyparser._buf, at, length) + * except BaseException as ex: + * pyparser._last_error = ex # <<<<<<<<<<<<<< + * return -1 + * else: + */ + __Pyx_INCREF(__pyx_v_ex); + __Pyx_GIVEREF(__pyx_v_ex); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_ex; + + /* "aiohttp/_http_parser.pyx":619 + * except BaseException as ex: + * pyparser._last_error = ex + * return -1 # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = -1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + goto __pyx_L14_return; + } + + /* "aiohttp/_http_parser.pyx":617 + * 'Status line is too long', pyparser._max_line_size, length) + * extend(pyparser._buf, at, length) + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + /*finally:*/ { + __pyx_L14_return: { + __pyx_t_10 = __pyx_r; + __Pyx_DECREF(__pyx_v_ex); + __pyx_v_ex = NULL; + __pyx_r = __pyx_t_10; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "aiohttp/_http_parser.pyx":612 + * cdef HttpParser pyparser = parser.data + * cdef str reason + * try: # <<<<<<<<<<<<<< + * if length > pyparser._max_line_size: + * raise LineTooLong( + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "aiohttp/_http_parser.pyx":608 + * + * + * cdef int cb_on_status(cparser.http_parser* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_status", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":624 + * + * + * cdef int cb_on_header_field(cparser.http_parser* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + +static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(struct http_parser *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; + Py_ssize_t __pyx_v_size; + PyObject *__pyx_v_ex = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + Py_ssize_t __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + int __pyx_t_11; + PyObject *__pyx_t_12 = NULL; + __Pyx_RefNannySetupContext("cb_on_header_field", 0); + + /* "aiohttp/_http_parser.pyx":626 + * cdef int cb_on_header_field(cparser.http_parser* parser, + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * cdef Py_ssize_t size + * try: + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":628 + * cdef HttpParser pyparser = parser.data + * cdef Py_ssize_t size + * try: # <<<<<<<<<<<<<< + * pyparser._on_status_complete() + * size = len(pyparser._raw_name) + length + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":629 + * cdef Py_ssize_t size + * try: + * pyparser._on_status_complete() # <<<<<<<<<<<<<< + * size = len(pyparser._raw_name) + length + * if size > pyparser._max_field_size: + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_status_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 629, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":630 + * try: + * pyparser._on_status_complete() + * size = len(pyparser._raw_name) + length # <<<<<<<<<<<<<< + * if size > pyparser._max_field_size: + * raise LineTooLong( + */ + __pyx_t_1 = __pyx_v_pyparser->_raw_name; + __Pyx_INCREF(__pyx_t_1); + if (unlikely(__pyx_t_1 == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 630, __pyx_L3_error) + } + __pyx_t_5 = PyByteArray_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(0, 630, __pyx_L3_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_size = (__pyx_t_5 + __pyx_v_length); + + /* "aiohttp/_http_parser.pyx":631 + * pyparser._on_status_complete() + * size = len(pyparser._raw_name) + length + * if size > pyparser._max_field_size: # <<<<<<<<<<<<<< + * raise LineTooLong( + * 'Header name is too long', pyparser._max_field_size, size) + */ + __pyx_t_6 = ((__pyx_v_size > __pyx_v_pyparser->_max_field_size) != 0); + if (unlikely(__pyx_t_6)) { + + /* "aiohttp/_http_parser.pyx":632 + * size = len(pyparser._raw_name) + length + * if size > pyparser._max_field_size: + * raise LineTooLong( # <<<<<<<<<<<<<< + * 'Header name is too long', pyparser._max_field_size, size) + * pyparser._on_header_field(at, length) + */ + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 632, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_7); + + /* "aiohttp/_http_parser.pyx":633 + * if size > pyparser._max_field_size: + * raise LineTooLong( + * 'Header name is too long', pyparser._max_field_size, size) # <<<<<<<<<<<<<< + * pyparser._on_header_field(at, length) + * except BaseException as ex: + */ + __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_field_size); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 633, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = PyInt_FromSsize_t(__pyx_v_size); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 633, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_10 = NULL; + __pyx_t_11 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_10)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_10); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + __pyx_t_11 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[4] = {__pyx_t_10, __pyx_kp_u_Header_name_is_too_long, __pyx_t_8, __pyx_t_9}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 632, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[4] = {__pyx_t_10, __pyx_kp_u_Header_name_is_too_long, __pyx_t_8, __pyx_t_9}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 632, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } else + #endif + { + __pyx_t_12 = PyTuple_New(3+__pyx_t_11); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 632, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_12); + if (__pyx_t_10) { + __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_12, 0, __pyx_t_10); __pyx_t_10 = NULL; + } + __Pyx_INCREF(__pyx_kp_u_Header_name_is_too_long); + __Pyx_GIVEREF(__pyx_kp_u_Header_name_is_too_long); + PyTuple_SET_ITEM(__pyx_t_12, 0+__pyx_t_11, __pyx_kp_u_Header_name_is_too_long); + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_12, 1+__pyx_t_11, __pyx_t_8); + __Pyx_GIVEREF(__pyx_t_9); + PyTuple_SET_ITEM(__pyx_t_12, 2+__pyx_t_11, __pyx_t_9); + __pyx_t_8 = 0; + __pyx_t_9 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_12, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 632, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 632, __pyx_L3_error) + + /* "aiohttp/_http_parser.pyx":631 + * pyparser._on_status_complete() + * size = len(pyparser._raw_name) + length + * if size > pyparser._max_field_size: # <<<<<<<<<<<<<< + * raise LineTooLong( + * 'Header name is too long', pyparser._max_field_size, size) + */ + } + + /* "aiohttp/_http_parser.pyx":634 + * raise LineTooLong( + * 'Header name is too long', pyparser._max_field_size, size) + * pyparser._on_header_field(at, length) # <<<<<<<<<<<<<< + * except BaseException as ex: + * pyparser._last_error = ex + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_header_field(__pyx_v_pyparser, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 634, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":628 + * cdef HttpParser pyparser = parser.data + * cdef Py_ssize_t size + * try: # <<<<<<<<<<<<<< + * pyparser._on_status_complete() + * size = len(pyparser._raw_name) + length + */ + } + + /* "aiohttp/_http_parser.pyx":639 + * return -1 + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":635 + * 'Header name is too long', pyparser._max_field_size, size) + * pyparser._on_header_field(at, length) + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + __pyx_t_11 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_11) { + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_7, &__pyx_t_12) < 0) __PYX_ERR(0, 635, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_t_12); + __Pyx_INCREF(__pyx_t_7); + __pyx_v_ex = __pyx_t_7; + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":636 + * pyparser._on_header_field(at, length) + * except BaseException as ex: + * pyparser._last_error = ex # <<<<<<<<<<<<<< + * return -1 + * else: + */ + __Pyx_INCREF(__pyx_v_ex); + __Pyx_GIVEREF(__pyx_v_ex); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_ex; + + /* "aiohttp/_http_parser.pyx":637 + * except BaseException as ex: + * pyparser._last_error = ex + * return -1 # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = -1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + goto __pyx_L14_return; + } + + /* "aiohttp/_http_parser.pyx":635 + * 'Header name is too long', pyparser._max_field_size, size) + * pyparser._on_header_field(at, length) + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + /*finally:*/ { + __pyx_L14_return: { + __pyx_t_11 = __pyx_r; + __Pyx_DECREF(__pyx_v_ex); + __pyx_v_ex = NULL; + __pyx_r = __pyx_t_11; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "aiohttp/_http_parser.pyx":628 + * cdef HttpParser pyparser = parser.data + * cdef Py_ssize_t size + * try: # <<<<<<<<<<<<<< + * pyparser._on_status_complete() + * size = len(pyparser._raw_name) + length + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "aiohttp/_http_parser.pyx":624 + * + * + * cdef int cb_on_header_field(cparser.http_parser* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":642 + * + * + * cdef int cb_on_header_value(cparser.http_parser* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + +static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(struct http_parser *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; + Py_ssize_t __pyx_v_size; + PyObject *__pyx_v_ex = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + Py_ssize_t __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + int __pyx_t_11; + PyObject *__pyx_t_12 = NULL; + __Pyx_RefNannySetupContext("cb_on_header_value", 0); + + /* "aiohttp/_http_parser.pyx":644 + * cdef int cb_on_header_value(cparser.http_parser* parser, + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * cdef Py_ssize_t size + * try: + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":646 + * cdef HttpParser pyparser = parser.data + * cdef Py_ssize_t size + * try: # <<<<<<<<<<<<<< + * size = len(pyparser._raw_value) + length + * if size > pyparser._max_field_size: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":647 + * cdef Py_ssize_t size + * try: + * size = len(pyparser._raw_value) + length # <<<<<<<<<<<<<< + * if size > pyparser._max_field_size: + * raise LineTooLong( + */ + __pyx_t_1 = __pyx_v_pyparser->_raw_value; + __Pyx_INCREF(__pyx_t_1); + if (unlikely(__pyx_t_1 == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 647, __pyx_L3_error) + } + __pyx_t_5 = PyByteArray_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(0, 647, __pyx_L3_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_size = (__pyx_t_5 + __pyx_v_length); + + /* "aiohttp/_http_parser.pyx":648 + * try: + * size = len(pyparser._raw_value) + length + * if size > pyparser._max_field_size: # <<<<<<<<<<<<<< + * raise LineTooLong( + * 'Header value is too long', pyparser._max_field_size, size) + */ + __pyx_t_6 = ((__pyx_v_size > __pyx_v_pyparser->_max_field_size) != 0); + if (unlikely(__pyx_t_6)) { + + /* "aiohttp/_http_parser.pyx":649 + * size = len(pyparser._raw_value) + length + * if size > pyparser._max_field_size: + * raise LineTooLong( # <<<<<<<<<<<<<< + * 'Header value is too long', pyparser._max_field_size, size) + * pyparser._on_header_value(at, length) + */ + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 649, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_7); + + /* "aiohttp/_http_parser.pyx":650 + * if size > pyparser._max_field_size: + * raise LineTooLong( + * 'Header value is too long', pyparser._max_field_size, size) # <<<<<<<<<<<<<< + * pyparser._on_header_value(at, length) + * except BaseException as ex: + */ + __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_field_size); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 650, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = PyInt_FromSsize_t(__pyx_v_size); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 650, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_10 = NULL; + __pyx_t_11 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_10)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_10); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + __pyx_t_11 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[4] = {__pyx_t_10, __pyx_kp_u_Header_value_is_too_long, __pyx_t_8, __pyx_t_9}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 649, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[4] = {__pyx_t_10, __pyx_kp_u_Header_value_is_too_long, __pyx_t_8, __pyx_t_9}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 649, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } else + #endif + { + __pyx_t_12 = PyTuple_New(3+__pyx_t_11); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 649, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_12); + if (__pyx_t_10) { + __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_12, 0, __pyx_t_10); __pyx_t_10 = NULL; + } + __Pyx_INCREF(__pyx_kp_u_Header_value_is_too_long); + __Pyx_GIVEREF(__pyx_kp_u_Header_value_is_too_long); + PyTuple_SET_ITEM(__pyx_t_12, 0+__pyx_t_11, __pyx_kp_u_Header_value_is_too_long); + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_12, 1+__pyx_t_11, __pyx_t_8); + __Pyx_GIVEREF(__pyx_t_9); + PyTuple_SET_ITEM(__pyx_t_12, 2+__pyx_t_11, __pyx_t_9); + __pyx_t_8 = 0; + __pyx_t_9 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_12, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 649, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 649, __pyx_L3_error) + + /* "aiohttp/_http_parser.pyx":648 + * try: + * size = len(pyparser._raw_value) + length + * if size > pyparser._max_field_size: # <<<<<<<<<<<<<< + * raise LineTooLong( + * 'Header value is too long', pyparser._max_field_size, size) + */ + } + + /* "aiohttp/_http_parser.pyx":651 + * raise LineTooLong( + * 'Header value is too long', pyparser._max_field_size, size) + * pyparser._on_header_value(at, length) # <<<<<<<<<<<<<< + * except BaseException as ex: + * pyparser._last_error = ex + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_header_value(__pyx_v_pyparser, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 651, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":646 + * cdef HttpParser pyparser = parser.data + * cdef Py_ssize_t size + * try: # <<<<<<<<<<<<<< + * size = len(pyparser._raw_value) + length + * if size > pyparser._max_field_size: + */ + } + + /* "aiohttp/_http_parser.pyx":656 + * return -1 + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":652 + * 'Header value is too long', pyparser._max_field_size, size) + * pyparser._on_header_value(at, length) + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + __pyx_t_11 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_11) { + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_7, &__pyx_t_12) < 0) __PYX_ERR(0, 652, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_t_12); + __Pyx_INCREF(__pyx_t_7); + __pyx_v_ex = __pyx_t_7; + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":653 + * pyparser._on_header_value(at, length) + * except BaseException as ex: + * pyparser._last_error = ex # <<<<<<<<<<<<<< + * return -1 + * else: + */ + __Pyx_INCREF(__pyx_v_ex); + __Pyx_GIVEREF(__pyx_v_ex); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_ex; + + /* "aiohttp/_http_parser.pyx":654 + * except BaseException as ex: + * pyparser._last_error = ex + * return -1 # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = -1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + goto __pyx_L14_return; + } + + /* "aiohttp/_http_parser.pyx":652 + * 'Header value is too long', pyparser._max_field_size, size) + * pyparser._on_header_value(at, length) + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + /*finally:*/ { + __pyx_L14_return: { + __pyx_t_11 = __pyx_r; + __Pyx_DECREF(__pyx_v_ex); + __pyx_v_ex = NULL; + __pyx_r = __pyx_t_11; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "aiohttp/_http_parser.pyx":646 + * cdef HttpParser pyparser = parser.data + * cdef Py_ssize_t size + * try: # <<<<<<<<<<<<<< + * size = len(pyparser._raw_value) + length + * if size > pyparser._max_field_size: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "aiohttp/_http_parser.pyx":642 + * + * + * cdef int cb_on_header_value(cparser.http_parser* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":659 + * + * + * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + +static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(struct http_parser *__pyx_v_parser) { + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_exc = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + __Pyx_RefNannySetupContext("cb_on_headers_complete", 0); + + /* "aiohttp/_http_parser.pyx":660 + * + * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * try: + * pyparser._on_status_complete() + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":661 + * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_status_complete() + * pyparser._on_headers_complete() + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":662 + * cdef HttpParser pyparser = parser.data + * try: + * pyparser._on_status_complete() # <<<<<<<<<<<<<< + * pyparser._on_headers_complete() + * except BaseException as exc: + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_status_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 662, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":663 + * try: + * pyparser._on_status_complete() + * pyparser._on_headers_complete() # <<<<<<<<<<<<<< + * except BaseException as exc: + * pyparser._last_error = exc + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_headers_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 663, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":661 + * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_status_complete() + * pyparser._on_headers_complete() + */ + } + + /* "aiohttp/_http_parser.pyx":668 + * return -1 + * else: + * if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT # <<<<<<<<<<<<<< + * return 2 + * else: + */ + /*else:*/ { + __pyx_t_6 = (__pyx_v_pyparser->_cparser->upgrade != 0); + if (!__pyx_t_6) { + } else { + __pyx_t_5 = __pyx_t_6; + goto __pyx_L10_bool_binop_done; + } + __pyx_t_6 = ((__pyx_v_pyparser->_cparser->method == 5) != 0); + __pyx_t_5 = __pyx_t_6; + __pyx_L10_bool_binop_done:; + if (__pyx_t_5) { + + /* "aiohttp/_http_parser.pyx":669 + * else: + * if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT + * return 2 # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = 2; + goto __pyx_L6_except_return; + + /* "aiohttp/_http_parser.pyx":668 + * return -1 + * else: + * if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT # <<<<<<<<<<<<<< + * return 2 + * else: + */ + } + + /* "aiohttp/_http_parser.pyx":671 + * return 2 + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":664 + * pyparser._on_status_complete() + * pyparser._on_headers_complete() + * except BaseException as exc: # <<<<<<<<<<<<<< + * pyparser._last_error = exc + * return -1 + */ + __pyx_t_7 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_7) { + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_8, &__pyx_t_9) < 0) __PYX_ERR(0, 664, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_9); + __Pyx_INCREF(__pyx_t_8); + __pyx_v_exc = __pyx_t_8; + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":665 + * pyparser._on_headers_complete() + * except BaseException as exc: + * pyparser._last_error = exc # <<<<<<<<<<<<<< + * return -1 + * else: + */ + __Pyx_INCREF(__pyx_v_exc); + __Pyx_GIVEREF(__pyx_v_exc); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_exc; + + /* "aiohttp/_http_parser.pyx":666 + * except BaseException as exc: + * pyparser._last_error = exc + * return -1 # <<<<<<<<<<<<<< + * else: + * if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT + */ + __pyx_r = -1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L16_return; + } + + /* "aiohttp/_http_parser.pyx":664 + * pyparser._on_status_complete() + * pyparser._on_headers_complete() + * except BaseException as exc: # <<<<<<<<<<<<<< + * pyparser._last_error = exc + * return -1 + */ + /*finally:*/ { + __pyx_L16_return: { + __pyx_t_7 = __pyx_r; + __Pyx_DECREF(__pyx_v_exc); + __pyx_v_exc = NULL; + __pyx_r = __pyx_t_7; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "aiohttp/_http_parser.pyx":661 + * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_status_complete() + * pyparser._on_headers_complete() + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "aiohttp/_http_parser.pyx":659 + * + * + * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_exc); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":674 + * + * + * cdef int cb_on_body(cparser.http_parser* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + +static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(struct http_parser *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_body = 0; + PyObject *__pyx_v_exc = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + PyObject *__pyx_t_9 = NULL; + int __pyx_t_10; + int __pyx_t_11; + PyObject *__pyx_t_12 = NULL; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + PyObject *__pyx_t_15 = NULL; + int __pyx_t_16; + char const *__pyx_t_17; + PyObject *__pyx_t_18 = NULL; + PyObject *__pyx_t_19 = NULL; + PyObject *__pyx_t_20 = NULL; + PyObject *__pyx_t_21 = NULL; + PyObject *__pyx_t_22 = NULL; + PyObject *__pyx_t_23 = NULL; + __Pyx_RefNannySetupContext("cb_on_body", 0); + + /* "aiohttp/_http_parser.pyx":676 + * cdef int cb_on_body(cparser.http_parser* parser, + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * cdef bytes body = at[:length] + * try: + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":677 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * cdef bytes body = at[:length] # <<<<<<<<<<<<<< + * try: + * pyparser._payload.feed_data(body, length) + */ + __pyx_t_1 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 677, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_body = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":678 + * cdef HttpParser pyparser = parser.data + * cdef bytes body = at[:length] + * try: # <<<<<<<<<<<<<< + * pyparser._payload.feed_data(body, length) + * except BaseException as exc: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":679 + * cdef bytes body = at[:length] + * try: + * pyparser._payload.feed_data(body, length) # <<<<<<<<<<<<<< + * except BaseException as exc: + * if pyparser._payload_exception is not None: + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_pyparser->_payload, __pyx_n_s_feed_data); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 679, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyInt_FromSize_t(__pyx_v_length); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 679, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = NULL; + __pyx_t_8 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + __pyx_t_8 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_5)) { + PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_v_body, __pyx_t_6}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 679, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) { + PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_v_body, __pyx_t_6}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 679, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + { + __pyx_t_9 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 679, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_9); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_INCREF(__pyx_v_body); + __Pyx_GIVEREF(__pyx_v_body); + PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_8, __pyx_v_body); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_8, __pyx_t_6); + __pyx_t_6 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_9, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 679, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":678 + * cdef HttpParser pyparser = parser.data + * cdef bytes body = at[:length] + * try: # <<<<<<<<<<<<<< + * pyparser._payload.feed_data(body, length) + * except BaseException as exc: + */ + } + + /* "aiohttp/_http_parser.pyx":688 + * return -1 + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":680 + * try: + * pyparser._payload.feed_data(body, length) + * except BaseException as exc: # <<<<<<<<<<<<<< + * if pyparser._payload_exception is not None: + * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) + */ + __pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_8) { + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_body", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_5, &__pyx_t_9) < 0) __PYX_ERR(0, 680, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_9); + __Pyx_INCREF(__pyx_t_5); + __pyx_v_exc = __pyx_t_5; + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":681 + * pyparser._payload.feed_data(body, length) + * except BaseException as exc: + * if pyparser._payload_exception is not None: # <<<<<<<<<<<<<< + * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) + * else: + */ + __pyx_t_10 = (__pyx_v_pyparser->_payload_exception != Py_None); + __pyx_t_11 = (__pyx_t_10 != 0); + if (__pyx_t_11) { + + /* "aiohttp/_http_parser.pyx":682 + * except BaseException as exc: + * if pyparser._payload_exception is not None: + * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) # <<<<<<<<<<<<<< + * else: + * pyparser._payload.set_exception(exc) + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_pyparser->_payload, __pyx_n_s_set_exception); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 682, __pyx_L14_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_13 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyUnicode_Type)), __pyx_v_exc); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 682, __pyx_L14_error) + __Pyx_GOTREF(__pyx_t_13); + __Pyx_INCREF(__pyx_v_pyparser->_payload_exception); + __pyx_t_14 = __pyx_v_pyparser->_payload_exception; __pyx_t_15 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_14))) { + __pyx_t_15 = PyMethod_GET_SELF(__pyx_t_14); + if (likely(__pyx_t_15)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_14); + __Pyx_INCREF(__pyx_t_15); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_14, function); + } + } + __pyx_t_12 = (__pyx_t_15) ? __Pyx_PyObject_Call2Args(__pyx_t_14, __pyx_t_15, __pyx_t_13) : __Pyx_PyObject_CallOneArg(__pyx_t_14, __pyx_t_13); + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; + if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 682, __pyx_L14_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_14 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_14)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_14); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_6 = (__pyx_t_14) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_14, __pyx_t_12) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_12); + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 682, __pyx_L14_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "aiohttp/_http_parser.pyx":681 + * pyparser._payload.feed_data(body, length) + * except BaseException as exc: + * if pyparser._payload_exception is not None: # <<<<<<<<<<<<<< + * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) + * else: + */ + goto __pyx_L16; + } + + /* "aiohttp/_http_parser.pyx":684 + * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) + * else: + * pyparser._payload.set_exception(exc) # <<<<<<<<<<<<<< + * pyparser._payload_error = 1 + * return -1 + */ + /*else*/ { + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_pyparser->_payload, __pyx_n_s_set_exception); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 684, __pyx_L14_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_12 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_12 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_12)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_12); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_6 = (__pyx_t_12) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_12, __pyx_v_exc) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_exc); + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 684, __pyx_L14_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __pyx_L16:; + + /* "aiohttp/_http_parser.pyx":685 + * else: + * pyparser._payload.set_exception(exc) + * pyparser._payload_error = 1 # <<<<<<<<<<<<<< + * return -1 + * else: + */ + __pyx_v_pyparser->_payload_error = 1; + + /* "aiohttp/_http_parser.pyx":686 + * pyparser._payload.set_exception(exc) + * pyparser._payload_error = 1 + * return -1 # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = -1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L13_return; + } + + /* "aiohttp/_http_parser.pyx":680 + * try: + * pyparser._payload.feed_data(body, length) + * except BaseException as exc: # <<<<<<<<<<<<<< + * if pyparser._payload_exception is not None: + * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) + */ + /*finally:*/ { + __pyx_L14_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_18 = 0; __pyx_t_19 = 0; __pyx_t_20 = 0; __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_21, &__pyx_t_22, &__pyx_t_23); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_18, &__pyx_t_19, &__pyx_t_20) < 0)) __Pyx_ErrFetch(&__pyx_t_18, &__pyx_t_19, &__pyx_t_20); + __Pyx_XGOTREF(__pyx_t_18); + __Pyx_XGOTREF(__pyx_t_19); + __Pyx_XGOTREF(__pyx_t_20); + __Pyx_XGOTREF(__pyx_t_21); + __Pyx_XGOTREF(__pyx_t_22); + __Pyx_XGOTREF(__pyx_t_23); + __pyx_t_8 = __pyx_lineno; __pyx_t_16 = __pyx_clineno; __pyx_t_17 = __pyx_filename; + { + __Pyx_DECREF(__pyx_v_exc); + __pyx_v_exc = NULL; + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_21); + __Pyx_XGIVEREF(__pyx_t_22); + __Pyx_XGIVEREF(__pyx_t_23); + __Pyx_ExceptionReset(__pyx_t_21, __pyx_t_22, __pyx_t_23); + } + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_XGIVEREF(__pyx_t_19); + __Pyx_XGIVEREF(__pyx_t_20); + __Pyx_ErrRestore(__pyx_t_18, __pyx_t_19, __pyx_t_20); + __pyx_t_18 = 0; __pyx_t_19 = 0; __pyx_t_20 = 0; __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; + __pyx_lineno = __pyx_t_8; __pyx_clineno = __pyx_t_16; __pyx_filename = __pyx_t_17; + goto __pyx_L5_except_error; + } + __pyx_L13_return: { + __pyx_t_16 = __pyx_r; + __Pyx_DECREF(__pyx_v_exc); + __pyx_v_exc = NULL; + __pyx_r = __pyx_t_16; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "aiohttp/_http_parser.pyx":678 + * cdef HttpParser pyparser = parser.data + * cdef bytes body = at[:length] + * try: # <<<<<<<<<<<<<< + * pyparser._payload.feed_data(body, length) + * except BaseException as exc: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "aiohttp/_http_parser.pyx":674 + * + * + * cdef int cb_on_body(cparser.http_parser* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_XDECREF(__pyx_t_13); + __Pyx_XDECREF(__pyx_t_14); + __Pyx_XDECREF(__pyx_t_15); + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_body", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_body); + __Pyx_XDECREF(__pyx_v_exc); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":691 + * + * + * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + +static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(struct http_parser *__pyx_v_parser) { + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_exc = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + __Pyx_RefNannySetupContext("cb_on_message_complete", 0); + + /* "aiohttp/_http_parser.pyx":692 + * + * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * try: + * pyparser._started = False + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":693 + * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._started = False + * pyparser._on_message_complete() + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":694 + * cdef HttpParser pyparser = parser.data + * try: + * pyparser._started = False # <<<<<<<<<<<<<< + * pyparser._on_message_complete() + * except BaseException as exc: + */ + __pyx_v_pyparser->_started = 0; + + /* "aiohttp/_http_parser.pyx":695 + * try: + * pyparser._started = False + * pyparser._on_message_complete() # <<<<<<<<<<<<<< + * except BaseException as exc: + * pyparser._last_error = exc + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_message_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 695, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":693 + * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._started = False + * pyparser._on_message_complete() + */ + } + + /* "aiohttp/_http_parser.pyx":700 + * return -1 + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":696 + * pyparser._started = False + * pyparser._on_message_complete() + * except BaseException as exc: # <<<<<<<<<<<<<< + * pyparser._last_error = exc + * return -1 + */ + __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_5) { + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_message_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 696, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + __Pyx_INCREF(__pyx_t_6); + __pyx_v_exc = __pyx_t_6; + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":697 + * pyparser._on_message_complete() + * except BaseException as exc: + * pyparser._last_error = exc # <<<<<<<<<<<<<< + * return -1 + * else: + */ + __Pyx_INCREF(__pyx_v_exc); + __Pyx_GIVEREF(__pyx_v_exc); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_exc; + + /* "aiohttp/_http_parser.pyx":698 + * except BaseException as exc: + * pyparser._last_error = exc + * return -1 # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = -1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L13_return; + } + + /* "aiohttp/_http_parser.pyx":696 + * pyparser._started = False + * pyparser._on_message_complete() + * except BaseException as exc: # <<<<<<<<<<<<<< + * pyparser._last_error = exc + * return -1 + */ + /*finally:*/ { + __pyx_L13_return: { + __pyx_t_5 = __pyx_r; + __Pyx_DECREF(__pyx_v_exc); + __pyx_v_exc = NULL; + __pyx_r = __pyx_t_5; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "aiohttp/_http_parser.pyx":693 + * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._started = False + * pyparser._on_message_complete() + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "aiohttp/_http_parser.pyx":691 + * + * + * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_message_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_exc); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":703 + * + * + * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + +static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(struct http_parser *__pyx_v_parser) { + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_exc = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + __Pyx_RefNannySetupContext("cb_on_chunk_header", 0); + + /* "aiohttp/_http_parser.pyx":704 + * + * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * try: + * pyparser._on_chunk_header() + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":705 + * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_chunk_header() + * except BaseException as exc: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":706 + * cdef HttpParser pyparser = parser.data + * try: + * pyparser._on_chunk_header() # <<<<<<<<<<<<<< + * except BaseException as exc: + * pyparser._last_error = exc + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_chunk_header(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 706, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":705 + * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_chunk_header() + * except BaseException as exc: + */ + } + + /* "aiohttp/_http_parser.pyx":711 + * return -1 + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":707 + * try: + * pyparser._on_chunk_header() + * except BaseException as exc: # <<<<<<<<<<<<<< + * pyparser._last_error = exc + * return -1 + */ + __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_5) { + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 707, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + __Pyx_INCREF(__pyx_t_6); + __pyx_v_exc = __pyx_t_6; + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":708 + * pyparser._on_chunk_header() + * except BaseException as exc: + * pyparser._last_error = exc # <<<<<<<<<<<<<< + * return -1 + * else: + */ + __Pyx_INCREF(__pyx_v_exc); + __Pyx_GIVEREF(__pyx_v_exc); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_exc; + + /* "aiohttp/_http_parser.pyx":709 + * except BaseException as exc: + * pyparser._last_error = exc + * return -1 # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = -1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L13_return; + } + + /* "aiohttp/_http_parser.pyx":707 + * try: + * pyparser._on_chunk_header() + * except BaseException as exc: # <<<<<<<<<<<<<< + * pyparser._last_error = exc + * return -1 + */ + /*finally:*/ { + __pyx_L13_return: { + __pyx_t_5 = __pyx_r; + __Pyx_DECREF(__pyx_v_exc); + __pyx_v_exc = NULL; + __pyx_r = __pyx_t_5; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "aiohttp/_http_parser.pyx":705 + * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_chunk_header() + * except BaseException as exc: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "aiohttp/_http_parser.pyx":703 + * + * + * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_exc); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":714 + * + * + * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + +static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(struct http_parser *__pyx_v_parser) { + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_exc = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + __Pyx_RefNannySetupContext("cb_on_chunk_complete", 0); + + /* "aiohttp/_http_parser.pyx":715 + * + * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * try: + * pyparser._on_chunk_complete() + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":716 + * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_chunk_complete() + * except BaseException as exc: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":717 + * cdef HttpParser pyparser = parser.data + * try: + * pyparser._on_chunk_complete() # <<<<<<<<<<<<<< + * except BaseException as exc: + * pyparser._last_error = exc + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_chunk_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 717, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":716 + * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_chunk_complete() + * except BaseException as exc: + */ + } + + /* "aiohttp/_http_parser.pyx":722 + * return -1 + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":718 + * try: + * pyparser._on_chunk_complete() + * except BaseException as exc: # <<<<<<<<<<<<<< + * pyparser._last_error = exc + * return -1 + */ + __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_5) { + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 718, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + __Pyx_INCREF(__pyx_t_6); + __pyx_v_exc = __pyx_t_6; + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":719 + * pyparser._on_chunk_complete() + * except BaseException as exc: + * pyparser._last_error = exc # <<<<<<<<<<<<<< + * return -1 + * else: + */ + __Pyx_INCREF(__pyx_v_exc); + __Pyx_GIVEREF(__pyx_v_exc); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_exc; + + /* "aiohttp/_http_parser.pyx":720 + * except BaseException as exc: + * pyparser._last_error = exc + * return -1 # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = -1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L13_return; + } + + /* "aiohttp/_http_parser.pyx":718 + * try: + * pyparser._on_chunk_complete() + * except BaseException as exc: # <<<<<<<<<<<<<< + * pyparser._last_error = exc + * return -1 + */ + /*finally:*/ { + __pyx_L13_return: { + __pyx_t_5 = __pyx_r; + __Pyx_DECREF(__pyx_v_exc); + __pyx_v_exc = NULL; + __pyx_r = __pyx_t_5; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "aiohttp/_http_parser.pyx":716 + * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_chunk_complete() + * except BaseException as exc: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "aiohttp/_http_parser.pyx":714 + * + * + * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("aiohttp._http_parser.cb_on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_exc); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":725 + * + * + * cdef parser_error_from_errno(cparser.http_errno errno): # <<<<<<<<<<<<<< + * cdef bytes desc = cparser.http_errno_description(errno) + * + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(enum http_errno __pyx_v_errno) { + PyObject *__pyx_v_desc = 0; + PyObject *__pyx_v_cls = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannySetupContext("parser_error_from_errno", 0); + + /* "aiohttp/_http_parser.pyx":726 + * + * cdef parser_error_from_errno(cparser.http_errno errno): + * cdef bytes desc = cparser.http_errno_description(errno) # <<<<<<<<<<<<<< + * + * if errno in (cparser.HPE_CB_message_begin, + */ + __pyx_t_1 = __Pyx_PyBytes_FromString(http_errno_description(__pyx_v_errno)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 726, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_desc = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":728 + * cdef bytes desc = cparser.http_errno_description(errno) + * + * if errno in (cparser.HPE_CB_message_begin, # <<<<<<<<<<<<<< + * cparser.HPE_CB_url, + * cparser.HPE_CB_header_field, + */ + switch (__pyx_v_errno) { + case HPE_CB_message_begin: + case HPE_CB_url: + + /* "aiohttp/_http_parser.pyx":729 + * + * if errno in (cparser.HPE_CB_message_begin, + * cparser.HPE_CB_url, # <<<<<<<<<<<<<< + * cparser.HPE_CB_header_field, + * cparser.HPE_CB_header_value, + */ + case HPE_CB_header_field: + + /* "aiohttp/_http_parser.pyx":730 + * if errno in (cparser.HPE_CB_message_begin, + * cparser.HPE_CB_url, + * cparser.HPE_CB_header_field, # <<<<<<<<<<<<<< + * cparser.HPE_CB_header_value, + * cparser.HPE_CB_headers_complete, + */ + case HPE_CB_header_value: + + /* "aiohttp/_http_parser.pyx":731 + * cparser.HPE_CB_url, + * cparser.HPE_CB_header_field, + * cparser.HPE_CB_header_value, # <<<<<<<<<<<<<< + * cparser.HPE_CB_headers_complete, + * cparser.HPE_CB_body, + */ + case HPE_CB_headers_complete: + + /* "aiohttp/_http_parser.pyx":732 + * cparser.HPE_CB_header_field, + * cparser.HPE_CB_header_value, + * cparser.HPE_CB_headers_complete, # <<<<<<<<<<<<<< + * cparser.HPE_CB_body, + * cparser.HPE_CB_message_complete, + */ + case HPE_CB_body: + + /* "aiohttp/_http_parser.pyx":733 + * cparser.HPE_CB_header_value, + * cparser.HPE_CB_headers_complete, + * cparser.HPE_CB_body, # <<<<<<<<<<<<<< + * cparser.HPE_CB_message_complete, + * cparser.HPE_CB_status, + */ + case HPE_CB_message_complete: + + /* "aiohttp/_http_parser.pyx":734 + * cparser.HPE_CB_headers_complete, + * cparser.HPE_CB_body, + * cparser.HPE_CB_message_complete, # <<<<<<<<<<<<<< + * cparser.HPE_CB_status, + * cparser.HPE_CB_chunk_header, + */ + case HPE_CB_status: + + /* "aiohttp/_http_parser.pyx":735 + * cparser.HPE_CB_body, + * cparser.HPE_CB_message_complete, + * cparser.HPE_CB_status, # <<<<<<<<<<<<<< + * cparser.HPE_CB_chunk_header, + * cparser.HPE_CB_chunk_complete): + */ + case HPE_CB_chunk_header: + + /* "aiohttp/_http_parser.pyx":736 + * cparser.HPE_CB_message_complete, + * cparser.HPE_CB_status, + * cparser.HPE_CB_chunk_header, # <<<<<<<<<<<<<< + * cparser.HPE_CB_chunk_complete): + * cls = BadHttpMessage + */ + case HPE_CB_chunk_complete: + + /* "aiohttp/_http_parser.pyx":738 + * cparser.HPE_CB_chunk_header, + * cparser.HPE_CB_chunk_complete): + * cls = BadHttpMessage # <<<<<<<<<<<<<< + * + * elif errno == cparser.HPE_INVALID_STATUS: + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_BadHttpMessage); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 738, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_cls = __pyx_t_1; + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":728 + * cdef bytes desc = cparser.http_errno_description(errno) + * + * if errno in (cparser.HPE_CB_message_begin, # <<<<<<<<<<<<<< + * cparser.HPE_CB_url, + * cparser.HPE_CB_header_field, + */ + break; + case HPE_INVALID_STATUS: + + /* "aiohttp/_http_parser.pyx":741 + * + * elif errno == cparser.HPE_INVALID_STATUS: + * cls = BadStatusLine # <<<<<<<<<<<<<< + * + * elif errno == cparser.HPE_INVALID_METHOD: + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_BadStatusLine); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 741, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_cls = __pyx_t_1; + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":740 + * cls = BadHttpMessage + * + * elif errno == cparser.HPE_INVALID_STATUS: # <<<<<<<<<<<<<< + * cls = BadStatusLine + * + */ + break; + case HPE_INVALID_METHOD: + + /* "aiohttp/_http_parser.pyx":744 + * + * elif errno == cparser.HPE_INVALID_METHOD: + * cls = BadStatusLine # <<<<<<<<<<<<<< + * + * elif errno == cparser.HPE_INVALID_URL: + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_BadStatusLine); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 744, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_cls = __pyx_t_1; + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":743 + * cls = BadStatusLine + * + * elif errno == cparser.HPE_INVALID_METHOD: # <<<<<<<<<<<<<< + * cls = BadStatusLine + * + */ + break; + case HPE_INVALID_URL: + + /* "aiohttp/_http_parser.pyx":747 + * + * elif errno == cparser.HPE_INVALID_URL: + * cls = InvalidURLError # <<<<<<<<<<<<<< + * + * else: + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_InvalidURLError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 747, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_cls = __pyx_t_1; + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":746 + * cls = BadStatusLine + * + * elif errno == cparser.HPE_INVALID_URL: # <<<<<<<<<<<<<< + * cls = InvalidURLError + * + */ + break; + default: + + /* "aiohttp/_http_parser.pyx":750 + * + * else: + * cls = BadHttpMessage # <<<<<<<<<<<<<< + * + * return cls(desc.decode('latin-1')) + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_BadHttpMessage); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 750, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_cls = __pyx_t_1; + __pyx_t_1 = 0; + break; + } + + /* "aiohttp/_http_parser.pyx":752 + * cls = BadHttpMessage + * + * return cls(desc.decode('latin-1')) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_decode_bytes(__pyx_v_desc, 0, PY_SSIZE_T_MAX, NULL, NULL, PyUnicode_DecodeLatin1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 752, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v_cls); + __pyx_t_3 = __pyx_v_cls; __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_t_2) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_2); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 752, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_http_parser.pyx":725 + * + * + * cdef parser_error_from_errno(cparser.http_errno errno): # <<<<<<<<<<<<<< + * cdef bytes desc = cparser.http_errno_description(errno) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("aiohttp._http_parser.parser_error_from_errno", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_desc); + __Pyx_XDECREF(__pyx_v_cls); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":755 + * + * + * def parse_url(url): # <<<<<<<<<<<<<< + * cdef: + * Py_buffer py_buf + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_1parse_url(PyObject *__pyx_self, PyObject *__pyx_v_url); /*proto*/ +static PyMethodDef __pyx_mdef_7aiohttp_12_http_parser_1parse_url = {"parse_url", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_1parse_url, METH_O, 0}; +static PyObject *__pyx_pw_7aiohttp_12_http_parser_1parse_url(PyObject *__pyx_self, PyObject *__pyx_v_url) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("parse_url (wrapper)", 0); + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_parse_url(__pyx_self, ((PyObject *)__pyx_v_url)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_parse_url(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_url) { + Py_buffer __pyx_v_py_buf; + char *__pyx_v_buf_data; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + char const *__pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + __Pyx_RefNannySetupContext("parse_url", 0); + + /* "aiohttp/_http_parser.pyx":760 + * char* buf_data + * + * PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) # <<<<<<<<<<<<<< + * try: + * buf_data = py_buf.buf + */ + __pyx_t_1 = PyObject_GetBuffer(__pyx_v_url, (&__pyx_v_py_buf), PyBUF_SIMPLE); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 760, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":761 + * + * PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) + * try: # <<<<<<<<<<<<<< + * buf_data = py_buf.buf + * return _parse_url(buf_data, py_buf.len) + */ + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":762 + * PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) + * try: + * buf_data = py_buf.buf # <<<<<<<<<<<<<< + * return _parse_url(buf_data, py_buf.len) + * finally: + */ + __pyx_v_buf_data = ((char *)__pyx_v_py_buf.buf); + + /* "aiohttp/_http_parser.pyx":763 + * try: + * buf_data = py_buf.buf + * return _parse_url(buf_data, py_buf.len) # <<<<<<<<<<<<<< + * finally: + * PyBuffer_Release(&py_buf) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __pyx_f_7aiohttp_12_http_parser__parse_url(__pyx_v_buf_data, __pyx_v_py_buf.len); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 763, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L3_return; + } + + /* "aiohttp/_http_parser.pyx":765 + * return _parse_url(buf_data, py_buf.len) + * finally: + * PyBuffer_Release(&py_buf) # <<<<<<<<<<<<<< + * + * + */ + /*finally:*/ { + __pyx_L4_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_5 = 0; __pyx_t_6 = 0; __pyx_t_7 = 0; __pyx_t_8 = 0; __pyx_t_9 = 0; __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_8, &__pyx_t_9, &__pyx_t_10); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0)) __Pyx_ErrFetch(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_6); + __Pyx_XGOTREF(__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_8); + __Pyx_XGOTREF(__pyx_t_9); + __Pyx_XGOTREF(__pyx_t_10); + __pyx_t_1 = __pyx_lineno; __pyx_t_3 = __pyx_clineno; __pyx_t_4 = __pyx_filename; + { + PyBuffer_Release((&__pyx_v_py_buf)); + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_ExceptionReset(__pyx_t_8, __pyx_t_9, __pyx_t_10); + } + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ErrRestore(__pyx_t_5, __pyx_t_6, __pyx_t_7); + __pyx_t_5 = 0; __pyx_t_6 = 0; __pyx_t_7 = 0; __pyx_t_8 = 0; __pyx_t_9 = 0; __pyx_t_10 = 0; + __pyx_lineno = __pyx_t_1; __pyx_clineno = __pyx_t_3; __pyx_filename = __pyx_t_4; + goto __pyx_L1_error; + } + __pyx_L3_return: { + __pyx_t_10 = __pyx_r; + __pyx_r = 0; + PyBuffer_Release((&__pyx_v_py_buf)); + __pyx_r = __pyx_t_10; + __pyx_t_10 = 0; + goto __pyx_L0; + } + } + + /* "aiohttp/_http_parser.pyx":755 + * + * + * def parse_url(url): # <<<<<<<<<<<<<< + * cdef: + * Py_buffer py_buf + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("aiohttp._http_parser.parse_url", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_parser.pyx":768 + * + * + * cdef _parse_url(char* buf_data, size_t length): # <<<<<<<<<<<<<< + * cdef: + * cparser.http_parser_url* parsed + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser__parse_url(char *__pyx_v_buf_data, size_t __pyx_v_length) { + struct http_parser_url *__pyx_v_parsed; + int __pyx_v_res; + PyObject *__pyx_v_schema = 0; + PyObject *__pyx_v_host = 0; + PyObject *__pyx_v_port = 0; + PyObject *__pyx_v_path = 0; + PyObject *__pyx_v_query = 0; + PyObject *__pyx_v_fragment = 0; + PyObject *__pyx_v_user = 0; + PyObject *__pyx_v_password = 0; + PyObject *__pyx_v_userinfo = 0; + CYTHON_UNUSED PyObject *__pyx_v_result = 0; + int __pyx_v_off; + int __pyx_v_ln; + CYTHON_UNUSED PyObject *__pyx_v_sep = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + uint16_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *(*__pyx_t_8)(PyObject *); + PyObject *__pyx_t_9 = NULL; + int __pyx_t_10; + int __pyx_t_11; + char const *__pyx_t_12; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + PyObject *__pyx_t_18 = NULL; + __Pyx_RefNannySetupContext("_parse_url", 0); + + /* "aiohttp/_http_parser.pyx":772 + * cparser.http_parser_url* parsed + * int res + * str schema = None # <<<<<<<<<<<<<< + * str host = None + * object port = None + */ + __Pyx_INCREF(Py_None); + __pyx_v_schema = ((PyObject*)Py_None); + + /* "aiohttp/_http_parser.pyx":773 + * int res + * str schema = None + * str host = None # <<<<<<<<<<<<<< + * object port = None + * str path = None + */ + __Pyx_INCREF(Py_None); + __pyx_v_host = ((PyObject*)Py_None); + + /* "aiohttp/_http_parser.pyx":774 + * str schema = None + * str host = None + * object port = None # <<<<<<<<<<<<<< + * str path = None + * str query = None + */ + __Pyx_INCREF(Py_None); + __pyx_v_port = Py_None; + + /* "aiohttp/_http_parser.pyx":775 + * str host = None + * object port = None + * str path = None # <<<<<<<<<<<<<< + * str query = None + * str fragment = None + */ + __Pyx_INCREF(Py_None); + __pyx_v_path = ((PyObject*)Py_None); + + /* "aiohttp/_http_parser.pyx":776 + * object port = None + * str path = None + * str query = None # <<<<<<<<<<<<<< + * str fragment = None + * str user = None + */ + __Pyx_INCREF(Py_None); + __pyx_v_query = ((PyObject*)Py_None); + + /* "aiohttp/_http_parser.pyx":777 + * str path = None + * str query = None + * str fragment = None # <<<<<<<<<<<<<< + * str user = None + * str password = None + */ + __Pyx_INCREF(Py_None); + __pyx_v_fragment = ((PyObject*)Py_None); + + /* "aiohttp/_http_parser.pyx":778 + * str query = None + * str fragment = None + * str user = None # <<<<<<<<<<<<<< + * str password = None + * str userinfo = None + */ + __Pyx_INCREF(Py_None); + __pyx_v_user = ((PyObject*)Py_None); + + /* "aiohttp/_http_parser.pyx":779 + * str fragment = None + * str user = None + * str password = None # <<<<<<<<<<<<<< + * str userinfo = None + * object result = None + */ + __Pyx_INCREF(Py_None); + __pyx_v_password = ((PyObject*)Py_None); + + /* "aiohttp/_http_parser.pyx":780 + * str user = None + * str password = None + * str userinfo = None # <<<<<<<<<<<<<< + * object result = None + * int off + */ + __Pyx_INCREF(Py_None); + __pyx_v_userinfo = ((PyObject*)Py_None); + + /* "aiohttp/_http_parser.pyx":781 + * str password = None + * str userinfo = None + * object result = None # <<<<<<<<<<<<<< + * int off + * int ln + */ + __Pyx_INCREF(Py_None); + __pyx_v_result = Py_None; + + /* "aiohttp/_http_parser.pyx":785 + * int ln + * + * parsed = \ # <<<<<<<<<<<<<< + * PyMem_Malloc(sizeof(cparser.http_parser_url)) + * if parsed is NULL: + */ + __pyx_v_parsed = ((struct http_parser_url *)PyMem_Malloc((sizeof(struct http_parser_url)))); + + /* "aiohttp/_http_parser.pyx":787 + * parsed = \ + * PyMem_Malloc(sizeof(cparser.http_parser_url)) + * if parsed is NULL: # <<<<<<<<<<<<<< + * raise MemoryError() + * cparser.http_parser_url_init(parsed) + */ + __pyx_t_1 = ((__pyx_v_parsed == NULL) != 0); + if (unlikely(__pyx_t_1)) { + + /* "aiohttp/_http_parser.pyx":788 + * PyMem_Malloc(sizeof(cparser.http_parser_url)) + * if parsed is NULL: + * raise MemoryError() # <<<<<<<<<<<<<< + * cparser.http_parser_url_init(parsed) + * try: + */ + PyErr_NoMemory(); __PYX_ERR(0, 788, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":787 + * parsed = \ + * PyMem_Malloc(sizeof(cparser.http_parser_url)) + * if parsed is NULL: # <<<<<<<<<<<<<< + * raise MemoryError() + * cparser.http_parser_url_init(parsed) + */ + } + + /* "aiohttp/_http_parser.pyx":789 + * if parsed is NULL: + * raise MemoryError() + * cparser.http_parser_url_init(parsed) # <<<<<<<<<<<<<< + * try: + * res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) + */ + http_parser_url_init(__pyx_v_parsed); + + /* "aiohttp/_http_parser.pyx":790 + * raise MemoryError() + * cparser.http_parser_url_init(parsed) + * try: # <<<<<<<<<<<<<< + * res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) + * + */ + /*try:*/ { + + /* "aiohttp/_http_parser.pyx":791 + * cparser.http_parser_url_init(parsed) + * try: + * res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) # <<<<<<<<<<<<<< + * + * if res == 0: + */ + __pyx_v_res = http_parser_parse_url(__pyx_v_buf_data, __pyx_v_length, 0, __pyx_v_parsed); + + /* "aiohttp/_http_parser.pyx":793 + * res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) + * + * if res == 0: # <<<<<<<<<<<<<< + * if parsed.field_set & (1 << cparser.UF_SCHEMA): + * off = parsed.field_data[cparser.UF_SCHEMA].off + */ + __pyx_t_1 = ((__pyx_v_res == 0) != 0); + if (likely(__pyx_t_1)) { + + /* "aiohttp/_http_parser.pyx":794 + * + * if res == 0: + * if parsed.field_set & (1 << cparser.UF_SCHEMA): # <<<<<<<<<<<<<< + * off = parsed.field_data[cparser.UF_SCHEMA].off + * ln = parsed.field_data[cparser.UF_SCHEMA].len + */ + __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_SCHEMA)) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_parser.pyx":795 + * if res == 0: + * if parsed.field_set & (1 << cparser.UF_SCHEMA): + * off = parsed.field_data[cparser.UF_SCHEMA].off # <<<<<<<<<<<<<< + * ln = parsed.field_data[cparser.UF_SCHEMA].len + * schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + */ + __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_SCHEMA)]).off; + __pyx_v_off = __pyx_t_2; + + /* "aiohttp/_http_parser.pyx":796 + * if parsed.field_set & (1 << cparser.UF_SCHEMA): + * off = parsed.field_data[cparser.UF_SCHEMA].off + * ln = parsed.field_data[cparser.UF_SCHEMA].len # <<<<<<<<<<<<<< + * schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + * else: + */ + __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_SCHEMA)]).len; + __pyx_v_ln = __pyx_t_2; + + /* "aiohttp/_http_parser.pyx":797 + * off = parsed.field_data[cparser.UF_SCHEMA].off + * ln = parsed.field_data[cparser.UF_SCHEMA].len + * schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< + * else: + * schema = '' + */ + __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 797, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF_SET(__pyx_v_schema, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":794 + * + * if res == 0: + * if parsed.field_set & (1 << cparser.UF_SCHEMA): # <<<<<<<<<<<<<< + * off = parsed.field_data[cparser.UF_SCHEMA].off + * ln = parsed.field_data[cparser.UF_SCHEMA].len + */ + goto __pyx_L8; + } + + /* "aiohttp/_http_parser.pyx":799 + * schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + * else: + * schema = '' # <<<<<<<<<<<<<< + * + * if parsed.field_set & (1 << cparser.UF_HOST): + */ + /*else*/ { + __Pyx_INCREF(__pyx_kp_u__4); + __Pyx_DECREF_SET(__pyx_v_schema, __pyx_kp_u__4); + } + __pyx_L8:; + + /* "aiohttp/_http_parser.pyx":801 + * schema = '' + * + * if parsed.field_set & (1 << cparser.UF_HOST): # <<<<<<<<<<<<<< + * off = parsed.field_data[cparser.UF_HOST].off + * ln = parsed.field_data[cparser.UF_HOST].len + */ + __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_HOST)) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_parser.pyx":802 + * + * if parsed.field_set & (1 << cparser.UF_HOST): + * off = parsed.field_data[cparser.UF_HOST].off # <<<<<<<<<<<<<< + * ln = parsed.field_data[cparser.UF_HOST].len + * host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + */ + __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_HOST)]).off; + __pyx_v_off = __pyx_t_2; + + /* "aiohttp/_http_parser.pyx":803 + * if parsed.field_set & (1 << cparser.UF_HOST): + * off = parsed.field_data[cparser.UF_HOST].off + * ln = parsed.field_data[cparser.UF_HOST].len # <<<<<<<<<<<<<< + * host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + * else: + */ + __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_HOST)]).len; + __pyx_v_ln = __pyx_t_2; + + /* "aiohttp/_http_parser.pyx":804 + * off = parsed.field_data[cparser.UF_HOST].off + * ln = parsed.field_data[cparser.UF_HOST].len + * host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< + * else: + * host = '' + */ + __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 804, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF_SET(__pyx_v_host, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":801 + * schema = '' + * + * if parsed.field_set & (1 << cparser.UF_HOST): # <<<<<<<<<<<<<< + * off = parsed.field_data[cparser.UF_HOST].off + * ln = parsed.field_data[cparser.UF_HOST].len + */ + goto __pyx_L9; + } + + /* "aiohttp/_http_parser.pyx":806 + * host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + * else: + * host = '' # <<<<<<<<<<<<<< + * + * if parsed.field_set & (1 << cparser.UF_PORT): + */ + /*else*/ { + __Pyx_INCREF(__pyx_kp_u__4); + __Pyx_DECREF_SET(__pyx_v_host, __pyx_kp_u__4); + } + __pyx_L9:; + + /* "aiohttp/_http_parser.pyx":808 + * host = '' + * + * if parsed.field_set & (1 << cparser.UF_PORT): # <<<<<<<<<<<<<< + * port = parsed.port + * + */ + __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_PORT)) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_parser.pyx":809 + * + * if parsed.field_set & (1 << cparser.UF_PORT): + * port = parsed.port # <<<<<<<<<<<<<< + * + * if parsed.field_set & (1 << cparser.UF_PATH): + */ + __pyx_t_3 = __Pyx_PyInt_From_uint16_t(__pyx_v_parsed->port); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 809, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF_SET(__pyx_v_port, __pyx_t_3); + __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":808 + * host = '' + * + * if parsed.field_set & (1 << cparser.UF_PORT): # <<<<<<<<<<<<<< + * port = parsed.port + * + */ + } + + /* "aiohttp/_http_parser.pyx":811 + * port = parsed.port + * + * if parsed.field_set & (1 << cparser.UF_PATH): # <<<<<<<<<<<<<< + * off = parsed.field_data[cparser.UF_PATH].off + * ln = parsed.field_data[cparser.UF_PATH].len + */ + __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_PATH)) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_parser.pyx":812 + * + * if parsed.field_set & (1 << cparser.UF_PATH): + * off = parsed.field_data[cparser.UF_PATH].off # <<<<<<<<<<<<<< + * ln = parsed.field_data[cparser.UF_PATH].len + * path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + */ + __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_PATH)]).off; + __pyx_v_off = __pyx_t_2; + + /* "aiohttp/_http_parser.pyx":813 + * if parsed.field_set & (1 << cparser.UF_PATH): + * off = parsed.field_data[cparser.UF_PATH].off + * ln = parsed.field_data[cparser.UF_PATH].len # <<<<<<<<<<<<<< + * path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + * else: + */ + __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_PATH)]).len; + __pyx_v_ln = __pyx_t_2; + + /* "aiohttp/_http_parser.pyx":814 + * off = parsed.field_data[cparser.UF_PATH].off + * ln = parsed.field_data[cparser.UF_PATH].len + * path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< + * else: + * path = '' + */ + __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 814, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF_SET(__pyx_v_path, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":811 + * port = parsed.port + * + * if parsed.field_set & (1 << cparser.UF_PATH): # <<<<<<<<<<<<<< + * off = parsed.field_data[cparser.UF_PATH].off + * ln = parsed.field_data[cparser.UF_PATH].len + */ + goto __pyx_L11; + } + + /* "aiohttp/_http_parser.pyx":816 + * path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + * else: + * path = '' # <<<<<<<<<<<<<< + * + * if parsed.field_set & (1 << cparser.UF_QUERY): + */ + /*else*/ { + __Pyx_INCREF(__pyx_kp_u__4); + __Pyx_DECREF_SET(__pyx_v_path, __pyx_kp_u__4); + } + __pyx_L11:; + + /* "aiohttp/_http_parser.pyx":818 + * path = '' + * + * if parsed.field_set & (1 << cparser.UF_QUERY): # <<<<<<<<<<<<<< + * off = parsed.field_data[cparser.UF_QUERY].off + * ln = parsed.field_data[cparser.UF_QUERY].len + */ + __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_QUERY)) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_parser.pyx":819 + * + * if parsed.field_set & (1 << cparser.UF_QUERY): + * off = parsed.field_data[cparser.UF_QUERY].off # <<<<<<<<<<<<<< + * ln = parsed.field_data[cparser.UF_QUERY].len + * query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + */ + __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_QUERY)]).off; + __pyx_v_off = __pyx_t_2; + + /* "aiohttp/_http_parser.pyx":820 + * if parsed.field_set & (1 << cparser.UF_QUERY): + * off = parsed.field_data[cparser.UF_QUERY].off + * ln = parsed.field_data[cparser.UF_QUERY].len # <<<<<<<<<<<<<< + * query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + * else: + */ + __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_QUERY)]).len; + __pyx_v_ln = __pyx_t_2; + + /* "aiohttp/_http_parser.pyx":821 + * off = parsed.field_data[cparser.UF_QUERY].off + * ln = parsed.field_data[cparser.UF_QUERY].len + * query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< + * else: + * query = '' + */ + __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 821, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF_SET(__pyx_v_query, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":818 + * path = '' + * + * if parsed.field_set & (1 << cparser.UF_QUERY): # <<<<<<<<<<<<<< + * off = parsed.field_data[cparser.UF_QUERY].off + * ln = parsed.field_data[cparser.UF_QUERY].len + */ + goto __pyx_L12; + } + + /* "aiohttp/_http_parser.pyx":823 + * query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + * else: + * query = '' # <<<<<<<<<<<<<< + * + * if parsed.field_set & (1 << cparser.UF_FRAGMENT): + */ + /*else*/ { + __Pyx_INCREF(__pyx_kp_u__4); + __Pyx_DECREF_SET(__pyx_v_query, __pyx_kp_u__4); + } + __pyx_L12:; + + /* "aiohttp/_http_parser.pyx":825 + * query = '' + * + * if parsed.field_set & (1 << cparser.UF_FRAGMENT): # <<<<<<<<<<<<<< + * off = parsed.field_data[cparser.UF_FRAGMENT].off + * ln = parsed.field_data[cparser.UF_FRAGMENT].len + */ + __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_FRAGMENT)) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_parser.pyx":826 + * + * if parsed.field_set & (1 << cparser.UF_FRAGMENT): + * off = parsed.field_data[cparser.UF_FRAGMENT].off # <<<<<<<<<<<<<< + * ln = parsed.field_data[cparser.UF_FRAGMENT].len + * fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + */ + __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_FRAGMENT)]).off; + __pyx_v_off = __pyx_t_2; + + /* "aiohttp/_http_parser.pyx":827 + * if parsed.field_set & (1 << cparser.UF_FRAGMENT): + * off = parsed.field_data[cparser.UF_FRAGMENT].off + * ln = parsed.field_data[cparser.UF_FRAGMENT].len # <<<<<<<<<<<<<< + * fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + * else: + */ + __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_FRAGMENT)]).len; + __pyx_v_ln = __pyx_t_2; + + /* "aiohttp/_http_parser.pyx":828 + * off = parsed.field_data[cparser.UF_FRAGMENT].off + * ln = parsed.field_data[cparser.UF_FRAGMENT].len + * fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< + * else: + * fragment = '' + */ + __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 828, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF_SET(__pyx_v_fragment, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":825 + * query = '' + * + * if parsed.field_set & (1 << cparser.UF_FRAGMENT): # <<<<<<<<<<<<<< + * off = parsed.field_data[cparser.UF_FRAGMENT].off + * ln = parsed.field_data[cparser.UF_FRAGMENT].len + */ + goto __pyx_L13; + } + + /* "aiohttp/_http_parser.pyx":830 + * fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + * else: + * fragment = '' # <<<<<<<<<<<<<< + * + * if parsed.field_set & (1 << cparser.UF_USERINFO): + */ + /*else*/ { + __Pyx_INCREF(__pyx_kp_u__4); + __Pyx_DECREF_SET(__pyx_v_fragment, __pyx_kp_u__4); + } + __pyx_L13:; + + /* "aiohttp/_http_parser.pyx":832 + * fragment = '' + * + * if parsed.field_set & (1 << cparser.UF_USERINFO): # <<<<<<<<<<<<<< + * off = parsed.field_data[cparser.UF_USERINFO].off + * ln = parsed.field_data[cparser.UF_USERINFO].len + */ + __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_USERINFO)) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_parser.pyx":833 + * + * if parsed.field_set & (1 << cparser.UF_USERINFO): + * off = parsed.field_data[cparser.UF_USERINFO].off # <<<<<<<<<<<<<< + * ln = parsed.field_data[cparser.UF_USERINFO].len + * userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + */ + __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_USERINFO)]).off; + __pyx_v_off = __pyx_t_2; + + /* "aiohttp/_http_parser.pyx":834 + * if parsed.field_set & (1 << cparser.UF_USERINFO): + * off = parsed.field_data[cparser.UF_USERINFO].off + * ln = parsed.field_data[cparser.UF_USERINFO].len # <<<<<<<<<<<<<< + * userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + * + */ + __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_USERINFO)]).len; + __pyx_v_ln = __pyx_t_2; + + /* "aiohttp/_http_parser.pyx":835 + * off = parsed.field_data[cparser.UF_USERINFO].off + * ln = parsed.field_data[cparser.UF_USERINFO].len + * userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< + * + * user, sep, password = userinfo.partition(':') + */ + __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 835, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF_SET(__pyx_v_userinfo, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "aiohttp/_http_parser.pyx":837 + * userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + * + * user, sep, password = userinfo.partition(':') # <<<<<<<<<<<<<< + * + * return URL_build(scheme=schema, + */ + __pyx_t_3 = __Pyx_CallUnboundCMethod1(&__pyx_umethod_PyUnicode_Type_partition, __pyx_v_userinfo, __pyx_kp_u__11); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 837, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_3); + if ((likely(PyTuple_CheckExact(__pyx_t_3))) || (PyList_CheckExact(__pyx_t_3))) { + PyObject* sequence = __pyx_t_3; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 3)) { + if (size > 3) __Pyx_RaiseTooManyValuesError(3); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 837, __pyx_L5_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_5 = PyTuple_GET_ITEM(sequence, 1); + __pyx_t_6 = PyTuple_GET_ITEM(sequence, 2); + } else { + __pyx_t_4 = PyList_GET_ITEM(sequence, 0); + __pyx_t_5 = PyList_GET_ITEM(sequence, 1); + __pyx_t_6 = PyList_GET_ITEM(sequence, 2); + } + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + #else + __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 837, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 837, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = PySequence_ITEM(sequence, 2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 837, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_6); + #endif + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_7 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 837, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_8 = Py_TYPE(__pyx_t_7)->tp_iternext; + index = 0; __pyx_t_4 = __pyx_t_8(__pyx_t_7); if (unlikely(!__pyx_t_4)) goto __pyx_L15_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + index = 1; __pyx_t_5 = __pyx_t_8(__pyx_t_7); if (unlikely(!__pyx_t_5)) goto __pyx_L15_unpacking_failed; + __Pyx_GOTREF(__pyx_t_5); + index = 2; __pyx_t_6 = __pyx_t_8(__pyx_t_7); if (unlikely(!__pyx_t_6)) goto __pyx_L15_unpacking_failed; + __Pyx_GOTREF(__pyx_t_6); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_8(__pyx_t_7), 3) < 0) __PYX_ERR(0, 837, __pyx_L5_error) + __pyx_t_8 = NULL; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L16_unpacking_done; + __pyx_L15_unpacking_failed:; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_8 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 837, __pyx_L5_error) + __pyx_L16_unpacking_done:; + } + if (!(likely(PyUnicode_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_4)->tp_name), 0))) __PYX_ERR(0, 837, __pyx_L5_error) + if (!(likely(PyUnicode_CheckExact(__pyx_t_6))||((__pyx_t_6) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_6)->tp_name), 0))) __PYX_ERR(0, 837, __pyx_L5_error) + __Pyx_DECREF_SET(__pyx_v_user, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + __pyx_v_sep = __pyx_t_5; + __pyx_t_5 = 0; + __Pyx_DECREF_SET(__pyx_v_password, ((PyObject*)__pyx_t_6)); + __pyx_t_6 = 0; + + /* "aiohttp/_http_parser.pyx":832 + * fragment = '' + * + * if parsed.field_set & (1 << cparser.UF_USERINFO): # <<<<<<<<<<<<<< + * off = parsed.field_data[cparser.UF_USERINFO].off + * ln = parsed.field_data[cparser.UF_USERINFO].len + */ + } + + /* "aiohttp/_http_parser.pyx":839 + * user, sep, password = userinfo.partition(':') + * + * return URL_build(scheme=schema, # <<<<<<<<<<<<<< + * user=user, password=password, host=host, port=port, + * path=path, query=query, fragment=fragment) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = __Pyx_PyDict_NewPresized(8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 839, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_scheme, __pyx_v_schema) < 0) __PYX_ERR(0, 839, __pyx_L5_error) + + /* "aiohttp/_http_parser.pyx":840 + * + * return URL_build(scheme=schema, + * user=user, password=password, host=host, port=port, # <<<<<<<<<<<<<< + * path=path, query=query, fragment=fragment) + * else: + */ + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_user, __pyx_v_user) < 0) __PYX_ERR(0, 839, __pyx_L5_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_password, __pyx_v_password) < 0) __PYX_ERR(0, 839, __pyx_L5_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_host, __pyx_v_host) < 0) __PYX_ERR(0, 839, __pyx_L5_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_port, __pyx_v_port) < 0) __PYX_ERR(0, 839, __pyx_L5_error) + + /* "aiohttp/_http_parser.pyx":841 + * return URL_build(scheme=schema, + * user=user, password=password, host=host, port=port, + * path=path, query=query, fragment=fragment) # <<<<<<<<<<<<<< + * else: + * raise InvalidURLError("invalid url {!r}".format(buf_data)) + */ + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_path, __pyx_v_path) < 0) __PYX_ERR(0, 839, __pyx_L5_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_query, __pyx_v_query) < 0) __PYX_ERR(0, 839, __pyx_L5_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_fragment, __pyx_v_fragment) < 0) __PYX_ERR(0, 839, __pyx_L5_error) + + /* "aiohttp/_http_parser.pyx":839 + * user, sep, password = userinfo.partition(':') + * + * return URL_build(scheme=schema, # <<<<<<<<<<<<<< + * user=user, password=password, host=host, port=port, + * path=path, query=query, fragment=fragment) + */ + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_v_7aiohttp_12_http_parser_URL_build, __pyx_empty_tuple, __pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 839, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L4_return; + + /* "aiohttp/_http_parser.pyx":793 + * res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) + * + * if res == 0: # <<<<<<<<<<<<<< + * if parsed.field_set & (1 << cparser.UF_SCHEMA): + * off = parsed.field_data[cparser.UF_SCHEMA].off + */ + } + + /* "aiohttp/_http_parser.pyx":843 + * path=path, query=query, fragment=fragment) + * else: + * raise InvalidURLError("invalid url {!r}".format(buf_data)) # <<<<<<<<<<<<<< + * finally: + * PyMem_Free(parsed) + */ + /*else*/ { + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_InvalidURLError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 843, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_kp_u_invalid_url_r, __pyx_n_s_format); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 843, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = __Pyx_PyBytes_FromString(__pyx_v_buf_data); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 843, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_5 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_9, __pyx_t_7) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 843, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_6 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 843, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_t_6, 0, 0, 0); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __PYX_ERR(0, 843, __pyx_L5_error) + } + } + + /* "aiohttp/_http_parser.pyx":845 + * raise InvalidURLError("invalid url {!r}".format(buf_data)) + * finally: + * PyMem_Free(parsed) # <<<<<<<<<<<<<< + */ + /*finally:*/ { + __pyx_L5_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; __pyx_t_18 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_16, &__pyx_t_17, &__pyx_t_18); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_13, &__pyx_t_14, &__pyx_t_15) < 0)) __Pyx_ErrFetch(&__pyx_t_13, &__pyx_t_14, &__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_17); + __Pyx_XGOTREF(__pyx_t_18); + __pyx_t_10 = __pyx_lineno; __pyx_t_11 = __pyx_clineno; __pyx_t_12 = __pyx_filename; + { + PyMem_Free(__pyx_v_parsed); + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_ExceptionReset(__pyx_t_16, __pyx_t_17, __pyx_t_18); + } + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_ErrRestore(__pyx_t_13, __pyx_t_14, __pyx_t_15); + __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; __pyx_t_18 = 0; + __pyx_lineno = __pyx_t_10; __pyx_clineno = __pyx_t_11; __pyx_filename = __pyx_t_12; + goto __pyx_L1_error; + } + __pyx_L4_return: { + __pyx_t_18 = __pyx_r; + __pyx_r = 0; + PyMem_Free(__pyx_v_parsed); + __pyx_r = __pyx_t_18; + __pyx_t_18 = 0; + goto __pyx_L0; + } + } + + /* "aiohttp/_http_parser.pyx":768 + * + * + * cdef _parse_url(char* buf_data, size_t length): # <<<<<<<<<<<<<< + * cdef: + * cparser.http_parser_url* parsed + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("aiohttp._http_parser._parse_url", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_schema); + __Pyx_XDECREF(__pyx_v_host); + __Pyx_XDECREF(__pyx_v_port); + __Pyx_XDECREF(__pyx_v_path); + __Pyx_XDECREF(__pyx_v_query); + __Pyx_XDECREF(__pyx_v_fragment); + __Pyx_XDECREF(__pyx_v_user); + __Pyx_XDECREF(__pyx_v_password); + __Pyx_XDECREF(__pyx_v_userinfo); + __Pyx_XDECREF(__pyx_v_result); + __Pyx_XDECREF(__pyx_v_sep); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_RawRequestMessage(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_3__pyx_unpickle_RawRequestMessage(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7aiohttp_12_http_parser_3__pyx_unpickle_RawRequestMessage = {"__pyx_unpickle_RawRequestMessage", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_12_http_parser_3__pyx_unpickle_RawRequestMessage, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_7aiohttp_12_http_parser_3__pyx_unpickle_RawRequestMessage(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_RawRequestMessage (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_RawRequestMessage", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_RawRequestMessage", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_RawRequestMessage") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_RawRequestMessage", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._http_parser.__pyx_unpickle_RawRequestMessage", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_2__pyx_unpickle_RawRequestMessage(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_2__pyx_unpickle_RawRequestMessage(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + __Pyx_RefNannySetupContext("__pyx_unpickle_RawRequestMessage", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum != 0x1408252: # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x1408252 = (chunked, compression, headers, method, path, raw_headers, should_close, upgrade, url, version))" % __pyx_checksum) + */ + __pyx_t_1 = ((__pyx_v___pyx_checksum != 0x1408252) != 0); + if (__pyx_t_1) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum != 0x1408252: + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x1408252 = (chunked, compression, headers, method, path, raw_headers, should_close, upgrade, url, version))" % __pyx_checksum) + * __pyx_result = RawRequestMessage.__new__(__pyx_type) + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_t_2); + __pyx_v___pyx_PickleError = __pyx_t_2; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum != 0x1408252: + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x1408252 = (chunked, compression, headers, method, path, raw_headers, should_close, upgrade, url, version))" % __pyx_checksum) # <<<<<<<<<<<<<< + * __pyx_result = RawRequestMessage.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0x14, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_INCREF(__pyx_v___pyx_PickleError); + __pyx_t_2 = __pyx_v___pyx_PickleError; __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_5, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum != 0x1408252: # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x1408252 = (chunked, compression, headers, method, path, raw_headers, should_close, upgrade, url, version))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x1408252 = (chunked, compression, headers, method, path, raw_headers, should_close, upgrade, url, version))" % __pyx_checksum) + * __pyx_result = RawRequestMessage.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_3 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_4, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v___pyx_result = __pyx_t_3; + __pyx_t_3 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x1408252 = (chunked, compression, headers, method, path, raw_headers, should_close, upgrade, url, version))" % __pyx_checksum) + * __pyx_result = RawRequestMessage.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_1 = (__pyx_v___pyx_state != Py_None); + __pyx_t_6 = (__pyx_t_1 != 0); + if (__pyx_t_6) { + + /* "(tree fragment)":9 + * __pyx_result = RawRequestMessage.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error) + __pyx_t_3 = __pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawRequestMessage__set_state(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (%s vs 0x1408252 = (chunked, compression, headers, method, path, raw_headers, should_close, upgrade, url, version))" % __pyx_checksum) + * __pyx_result = RawRequestMessage.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): + * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_RawRequestMessage(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("aiohttp._http_parser.__pyx_unpickle_RawRequestMessage", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] + * if len(__pyx_state) > 10 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawRequestMessage__set_state(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + Py_ssize_t __pyx_t_3; + int __pyx_t_4; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("__pyx_unpickle_RawRequestMessage__set_state", 0); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): + * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 10 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[10]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->chunked); + __Pyx_DECREF(__pyx_v___pyx_result->chunked); + __pyx_v___pyx_result->chunked = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->compression); + __Pyx_DECREF(__pyx_v___pyx_result->compression); + __pyx_v___pyx_result->compression = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->headers); + __Pyx_DECREF(__pyx_v___pyx_result->headers); + __pyx_v___pyx_result->headers = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyUnicode_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->method); + __Pyx_DECREF(__pyx_v___pyx_result->method); + __pyx_v___pyx_result->method = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 4, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyUnicode_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->path); + __Pyx_DECREF(__pyx_v___pyx_result->path); + __pyx_v___pyx_result->path = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 5, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->raw_headers); + __Pyx_DECREF(__pyx_v___pyx_result->raw_headers); + __pyx_v___pyx_result->raw_headers = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 6, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->should_close); + __Pyx_DECREF(__pyx_v___pyx_result->should_close); + __pyx_v___pyx_result->should_close = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 7, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->upgrade); + __Pyx_DECREF(__pyx_v___pyx_result->upgrade); + __pyx_v___pyx_result->upgrade = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 8, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->url); + __Pyx_DECREF(__pyx_v___pyx_result->url); + __pyx_v___pyx_result->url = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 9, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->version); + __Pyx_DECREF(__pyx_v___pyx_result->version); + __pyx_v___pyx_result->version = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): + * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] + * if len(__pyx_state) > 10 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[10]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(1, 13, __pyx_L1_error) + } + __pyx_t_3 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_4 = ((__pyx_t_3 > 10) != 0); + if (__pyx_t_4) { + } else { + __pyx_t_2 = __pyx_t_4; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_5 = (__pyx_t_4 != 0); + __pyx_t_2 = __pyx_t_5; + __pyx_L4_bool_binop_done:; + if (__pyx_t_2) { + + /* "(tree fragment)":14 + * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] + * if len(__pyx_state) > 10 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[10]) # <<<<<<<<<<<<<< + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 14, __pyx_L1_error) + } + __pyx_t_6 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 10, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_1 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_6); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): + * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] + * if len(__pyx_state) > 10 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[10]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] + * if len(__pyx_state) > 10 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("aiohttp._http_parser.__pyx_unpickle_RawRequestMessage__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_RawResponseMessage(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_parser_5__pyx_unpickle_RawResponseMessage(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7aiohttp_12_http_parser_5__pyx_unpickle_RawResponseMessage = {"__pyx_unpickle_RawResponseMessage", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_12_http_parser_5__pyx_unpickle_RawResponseMessage, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_7aiohttp_12_http_parser_5__pyx_unpickle_RawResponseMessage(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_RawResponseMessage (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_RawResponseMessage", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_RawResponseMessage", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_RawResponseMessage") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_RawResponseMessage", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._http_parser.__pyx_unpickle_RawResponseMessage", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_7aiohttp_12_http_parser_4__pyx_unpickle_RawResponseMessage(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_parser_4__pyx_unpickle_RawResponseMessage(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + __Pyx_RefNannySetupContext("__pyx_unpickle_RawResponseMessage", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum != 0xc7706dc: # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0xc7706dc = (chunked, code, compression, headers, raw_headers, reason, should_close, upgrade, version))" % __pyx_checksum) + */ + __pyx_t_1 = ((__pyx_v___pyx_checksum != 0xc7706dc) != 0); + if (__pyx_t_1) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum != 0xc7706dc: + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError("Incompatible checksums (%s vs 0xc7706dc = (chunked, code, compression, headers, raw_headers, reason, should_close, upgrade, version))" % __pyx_checksum) + * __pyx_result = RawResponseMessage.__new__(__pyx_type) + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_t_2); + __pyx_v___pyx_PickleError = __pyx_t_2; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum != 0xc7706dc: + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0xc7706dc = (chunked, code, compression, headers, raw_headers, reason, should_close, upgrade, version))" % __pyx_checksum) # <<<<<<<<<<<<<< + * __pyx_result = RawResponseMessage.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0xc7, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_INCREF(__pyx_v___pyx_PickleError); + __pyx_t_2 = __pyx_v___pyx_PickleError; __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_5, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum != 0xc7706dc: # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0xc7706dc = (chunked, code, compression, headers, raw_headers, reason, should_close, upgrade, version))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError("Incompatible checksums (%s vs 0xc7706dc = (chunked, code, compression, headers, raw_headers, reason, should_close, upgrade, version))" % __pyx_checksum) + * __pyx_result = RawResponseMessage.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_RawResponseMessage__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + __pyx_t_3 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_4, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v___pyx_result = __pyx_t_3; + __pyx_t_3 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (%s vs 0xc7706dc = (chunked, code, compression, headers, raw_headers, reason, should_close, upgrade, version))" % __pyx_checksum) + * __pyx_result = RawResponseMessage.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_RawResponseMessage__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_1 = (__pyx_v___pyx_state != Py_None); + __pyx_t_6 = (__pyx_t_1 != 0); + if (__pyx_t_6) { + + /* "(tree fragment)":9 + * __pyx_result = RawResponseMessage.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_RawResponseMessage__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_RawResponseMessage__set_state(RawResponseMessage __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error) + __pyx_t_3 = __pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawResponseMessage__set_state(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError("Incompatible checksums (%s vs 0xc7706dc = (chunked, code, compression, headers, raw_headers, reason, should_close, upgrade, version))" % __pyx_checksum) + * __pyx_result = RawResponseMessage.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_RawResponseMessage__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_RawResponseMessage__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_RawResponseMessage__set_state(RawResponseMessage __pyx_result, tuple __pyx_state): + * __pyx_result.chunked = __pyx_state[0]; __pyx_result.code = __pyx_state[1]; __pyx_result.compression = __pyx_state[2]; __pyx_result.headers = __pyx_state[3]; __pyx_result.raw_headers = __pyx_state[4]; __pyx_result.reason = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.version = __pyx_state[8] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_RawResponseMessage(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("aiohttp._http_parser.__pyx_unpickle_RawResponseMessage", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_RawResponseMessage__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_RawResponseMessage__set_state(RawResponseMessage __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.chunked = __pyx_state[0]; __pyx_result.code = __pyx_state[1]; __pyx_result.compression = __pyx_state[2]; __pyx_result.headers = __pyx_state[3]; __pyx_result.raw_headers = __pyx_state[4]; __pyx_result.reason = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.version = __pyx_state[8] + * if len(__pyx_state) > 9 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawResponseMessage__set_state(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + Py_ssize_t __pyx_t_4; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + __Pyx_RefNannySetupContext("__pyx_unpickle_RawResponseMessage__set_state", 0); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_RawResponseMessage__set_state(RawResponseMessage __pyx_result, tuple __pyx_state): + * __pyx_result.chunked = __pyx_state[0]; __pyx_result.code = __pyx_state[1]; __pyx_result.compression = __pyx_state[2]; __pyx_result.headers = __pyx_state[3]; __pyx_result.raw_headers = __pyx_state[4]; __pyx_result.reason = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.version = __pyx_state[8] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 9 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[9]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->chunked); + __Pyx_DECREF(__pyx_v___pyx_result->chunked); + __pyx_v___pyx_result->chunked = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v___pyx_result->code = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->compression); + __Pyx_DECREF(__pyx_v___pyx_result->compression); + __pyx_v___pyx_result->compression = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->headers); + __Pyx_DECREF(__pyx_v___pyx_result->headers); + __pyx_v___pyx_result->headers = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 4, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->raw_headers); + __Pyx_DECREF(__pyx_v___pyx_result->raw_headers); + __pyx_v___pyx_result->raw_headers = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 5, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyUnicode_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->reason); + __Pyx_DECREF(__pyx_v___pyx_result->reason); + __pyx_v___pyx_result->reason = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 6, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->should_close); + __Pyx_DECREF(__pyx_v___pyx_result->should_close); + __pyx_v___pyx_result->should_close = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 7, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->upgrade); + __Pyx_DECREF(__pyx_v___pyx_result->upgrade); + __pyx_v___pyx_result->upgrade = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 8, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->version); + __Pyx_DECREF(__pyx_v___pyx_result->version); + __pyx_v___pyx_result->version = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_RawResponseMessage__set_state(RawResponseMessage __pyx_result, tuple __pyx_state): + * __pyx_result.chunked = __pyx_state[0]; __pyx_result.code = __pyx_state[1]; __pyx_result.compression = __pyx_state[2]; __pyx_result.headers = __pyx_state[3]; __pyx_result.raw_headers = __pyx_state[4]; __pyx_result.reason = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.version = __pyx_state[8] + * if len(__pyx_state) > 9 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[9]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(1, 13, __pyx_L1_error) + } + __pyx_t_4 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_5 = ((__pyx_t_4 > 9) != 0); + if (__pyx_t_5) { + } else { + __pyx_t_3 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_5 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_6 = (__pyx_t_5 != 0); + __pyx_t_3 = __pyx_t_6; + __pyx_L4_bool_binop_done:; + if (__pyx_t_3) { + + /* "(tree fragment)":14 + * __pyx_result.chunked = __pyx_state[0]; __pyx_result.code = __pyx_state[1]; __pyx_result.compression = __pyx_state[2]; __pyx_result.headers = __pyx_state[3]; __pyx_result.raw_headers = __pyx_state[4]; __pyx_result.reason = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.version = __pyx_state[8] + * if len(__pyx_state) > 9 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[9]) # <<<<<<<<<<<<<< + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_update); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 14, __pyx_L1_error) + } + __pyx_t_7 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 9, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + __pyx_t_1 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_9, __pyx_t_7) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_RawResponseMessage__set_state(RawResponseMessage __pyx_result, tuple __pyx_state): + * __pyx_result.chunked = __pyx_state[0]; __pyx_result.code = __pyx_state[1]; __pyx_result.compression = __pyx_state[2]; __pyx_result.headers = __pyx_state[3]; __pyx_result.raw_headers = __pyx_state[4]; __pyx_result.reason = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.version = __pyx_state[8] + * if len(__pyx_state) > 9 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[9]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_RawResponseMessage__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_RawResponseMessage__set_state(RawResponseMessage __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.chunked = __pyx_state[0]; __pyx_result.code = __pyx_state[1]; __pyx_result.compression = __pyx_state[2]; __pyx_result.headers = __pyx_state[3]; __pyx_result.raw_headers = __pyx_state[4]; __pyx_result.reason = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.version = __pyx_state[8] + * if len(__pyx_state) > 9 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("aiohttp._http_parser.__pyx_unpickle_RawResponseMessage__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_freelist_7aiohttp_12_http_parser_RawRequestMessage[250]; +static int __pyx_freecount_7aiohttp_12_http_parser_RawRequestMessage = 0; + +static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_RawRequestMessage(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *p; + PyObject *o; + if (CYTHON_COMPILING_IN_CPYTHON && likely((__pyx_freecount_7aiohttp_12_http_parser_RawRequestMessage > 0) & (t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage)) & ((t->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)) == 0))) { + o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser_RawRequestMessage[--__pyx_freecount_7aiohttp_12_http_parser_RawRequestMessage]; + memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage)); + (void) PyObject_INIT(o, t); + PyObject_GC_Track(o); + } else { + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + } + p = ((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)o); + p->method = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->path = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->version = Py_None; Py_INCREF(Py_None); + p->headers = Py_None; Py_INCREF(Py_None); + p->raw_headers = Py_None; Py_INCREF(Py_None); + p->should_close = Py_None; Py_INCREF(Py_None); + p->compression = Py_None; Py_INCREF(Py_None); + p->upgrade = Py_None; Py_INCREF(Py_None); + p->chunked = Py_None; Py_INCREF(Py_None); + p->url = Py_None; Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_7aiohttp_12_http_parser_RawRequestMessage(PyObject *o) { + struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *p = (struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->method); + Py_CLEAR(p->path); + Py_CLEAR(p->version); + Py_CLEAR(p->headers); + Py_CLEAR(p->raw_headers); + Py_CLEAR(p->should_close); + Py_CLEAR(p->compression); + Py_CLEAR(p->upgrade); + Py_CLEAR(p->chunked); + Py_CLEAR(p->url); + if (CYTHON_COMPILING_IN_CPYTHON && ((__pyx_freecount_7aiohttp_12_http_parser_RawRequestMessage < 250) & (Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage)) & ((Py_TYPE(o)->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)) == 0))) { + __pyx_freelist_7aiohttp_12_http_parser_RawRequestMessage[__pyx_freecount_7aiohttp_12_http_parser_RawRequestMessage++] = ((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)o); + } else { + (*Py_TYPE(o)->tp_free)(o); + } +} + +static int __pyx_tp_traverse_7aiohttp_12_http_parser_RawRequestMessage(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *p = (struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)o; + if (p->version) { + e = (*v)(p->version, a); if (e) return e; + } + if (p->headers) { + e = (*v)(p->headers, a); if (e) return e; + } + if (p->raw_headers) { + e = (*v)(p->raw_headers, a); if (e) return e; + } + if (p->should_close) { + e = (*v)(p->should_close, a); if (e) return e; + } + if (p->compression) { + e = (*v)(p->compression, a); if (e) return e; + } + if (p->upgrade) { + e = (*v)(p->upgrade, a); if (e) return e; + } + if (p->chunked) { + e = (*v)(p->chunked, a); if (e) return e; + } + if (p->url) { + e = (*v)(p->url, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_7aiohttp_12_http_parser_RawRequestMessage(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *p = (struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)o; + tmp = ((PyObject*)p->version); + p->version = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->headers); + p->headers = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->raw_headers); + p->raw_headers = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->should_close); + p->should_close = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->compression); + p->compression = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->upgrade); + p->upgrade = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->chunked); + p->chunked = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->url); + p->url = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_method(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_6method_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_path(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_4path_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_version(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7version_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_headers(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7headers_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_raw_headers(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_11raw_headers_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_should_close(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_12should_close_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_compression(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_11compression_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_upgrade(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7upgrade_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_chunked(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7chunked_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_url(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_3url_1__get__(o); +} + +static PyMethodDef __pyx_methods_7aiohttp_12_http_parser_RawRequestMessage[] = { + {"_replace", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_5_replace, METH_VARARGS|METH_KEYWORDS, 0}, + {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_9__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_7aiohttp_12_http_parser_RawRequestMessage[] = { + {(char *)"method", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_method, 0, (char *)0, 0}, + {(char *)"path", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_path, 0, (char *)0, 0}, + {(char *)"version", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_version, 0, (char *)0, 0}, + {(char *)"headers", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_headers, 0, (char *)0, 0}, + {(char *)"raw_headers", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_raw_headers, 0, (char *)0, 0}, + {(char *)"should_close", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_should_close, 0, (char *)0, 0}, + {(char *)"compression", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_compression, 0, (char *)0, 0}, + {(char *)"upgrade", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_upgrade, 0, (char *)0, 0}, + {(char *)"chunked", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_chunked, 0, (char *)0, 0}, + {(char *)"url", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_url, 0, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_7aiohttp_12_http_parser_RawRequestMessage = { + PyVarObject_HEAD_INIT(0, 0) + "aiohttp._http_parser.RawRequestMessage", /*tp_name*/ + sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_7aiohttp_12_http_parser_RawRequestMessage, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_3__repr__, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_7aiohttp_12_http_parser_RawRequestMessage, /*tp_traverse*/ + __pyx_tp_clear_7aiohttp_12_http_parser_RawRequestMessage, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_7aiohttp_12_http_parser_RawRequestMessage, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_7aiohttp_12_http_parser_RawRequestMessage, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_7aiohttp_12_http_parser_RawRequestMessage, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_freelist_7aiohttp_12_http_parser_RawResponseMessage[250]; +static int __pyx_freecount_7aiohttp_12_http_parser_RawResponseMessage = 0; + +static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_RawResponseMessage(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *p; + PyObject *o; + if (CYTHON_COMPILING_IN_CPYTHON && likely((__pyx_freecount_7aiohttp_12_http_parser_RawResponseMessage > 0) & (t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage)) & ((t->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)) == 0))) { + o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser_RawResponseMessage[--__pyx_freecount_7aiohttp_12_http_parser_RawResponseMessage]; + memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage)); + (void) PyObject_INIT(o, t); + PyObject_GC_Track(o); + } else { + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + } + p = ((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)o); + p->version = Py_None; Py_INCREF(Py_None); + p->reason = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->headers = Py_None; Py_INCREF(Py_None); + p->raw_headers = Py_None; Py_INCREF(Py_None); + p->should_close = Py_None; Py_INCREF(Py_None); + p->compression = Py_None; Py_INCREF(Py_None); + p->upgrade = Py_None; Py_INCREF(Py_None); + p->chunked = Py_None; Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_7aiohttp_12_http_parser_RawResponseMessage(PyObject *o) { + struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *p = (struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->version); + Py_CLEAR(p->reason); + Py_CLEAR(p->headers); + Py_CLEAR(p->raw_headers); + Py_CLEAR(p->should_close); + Py_CLEAR(p->compression); + Py_CLEAR(p->upgrade); + Py_CLEAR(p->chunked); + if (CYTHON_COMPILING_IN_CPYTHON && ((__pyx_freecount_7aiohttp_12_http_parser_RawResponseMessage < 250) & (Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage)) & ((Py_TYPE(o)->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)) == 0))) { + __pyx_freelist_7aiohttp_12_http_parser_RawResponseMessage[__pyx_freecount_7aiohttp_12_http_parser_RawResponseMessage++] = ((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)o); + } else { + (*Py_TYPE(o)->tp_free)(o); + } +} + +static int __pyx_tp_traverse_7aiohttp_12_http_parser_RawResponseMessage(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *p = (struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)o; + if (p->version) { + e = (*v)(p->version, a); if (e) return e; + } + if (p->headers) { + e = (*v)(p->headers, a); if (e) return e; + } + if (p->raw_headers) { + e = (*v)(p->raw_headers, a); if (e) return e; + } + if (p->should_close) { + e = (*v)(p->should_close, a); if (e) return e; + } + if (p->compression) { + e = (*v)(p->compression, a); if (e) return e; + } + if (p->upgrade) { + e = (*v)(p->upgrade, a); if (e) return e; + } + if (p->chunked) { + e = (*v)(p->chunked, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_7aiohttp_12_http_parser_RawResponseMessage(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *p = (struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)o; + tmp = ((PyObject*)p->version); + p->version = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->headers); + p->headers = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->raw_headers); + p->raw_headers = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->should_close); + p->should_close = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->compression); + p->compression = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->upgrade); + p->upgrade = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->chunked); + p->chunked = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_version(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7version_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_code(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_4code_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_reason(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_6reason_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_headers(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7headers_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_raw_headers(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_11raw_headers_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_should_close(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_12should_close_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_compression(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_11compression_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_upgrade(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7upgrade_1__get__(o); +} + +static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_chunked(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7chunked_1__get__(o); +} + +static PyMethodDef __pyx_methods_7aiohttp_12_http_parser_RawResponseMessage[] = { + {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_5__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_7aiohttp_12_http_parser_RawResponseMessage[] = { + {(char *)"version", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_version, 0, (char *)0, 0}, + {(char *)"code", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_code, 0, (char *)0, 0}, + {(char *)"reason", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_reason, 0, (char *)0, 0}, + {(char *)"headers", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_headers, 0, (char *)0, 0}, + {(char *)"raw_headers", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_raw_headers, 0, (char *)0, 0}, + {(char *)"should_close", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_should_close, 0, (char *)0, 0}, + {(char *)"compression", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_compression, 0, (char *)0, 0}, + {(char *)"upgrade", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_upgrade, 0, (char *)0, 0}, + {(char *)"chunked", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_chunked, 0, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_7aiohttp_12_http_parser_RawResponseMessage = { + PyVarObject_HEAD_INIT(0, 0) + "aiohttp._http_parser.RawResponseMessage", /*tp_name*/ + sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_7aiohttp_12_http_parser_RawResponseMessage, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_3__repr__, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_7aiohttp_12_http_parser_RawResponseMessage, /*tp_traverse*/ + __pyx_tp_clear_7aiohttp_12_http_parser_RawResponseMessage, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_7aiohttp_12_http_parser_RawResponseMessage, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_7aiohttp_12_http_parser_RawResponseMessage, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_7aiohttp_12_http_parser_RawResponseMessage, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; +static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser __pyx_vtable_7aiohttp_12_http_parser_HttpParser; + +static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_HttpParser(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)o); + p->__pyx_vtab = __pyx_vtabptr_7aiohttp_12_http_parser_HttpParser; + p->_raw_name = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->_raw_value = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->_protocol = Py_None; Py_INCREF(Py_None); + p->_loop = Py_None; Py_INCREF(Py_None); + p->_timer = Py_None; Py_INCREF(Py_None); + p->_url = Py_None; Py_INCREF(Py_None); + p->_buf = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->_path = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->_reason = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->_headers = Py_None; Py_INCREF(Py_None); + p->_raw_headers = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->_messages = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->_payload = Py_None; Py_INCREF(Py_None); + p->_payload_exception = Py_None; Py_INCREF(Py_None); + p->_last_error = Py_None; Py_INCREF(Py_None); + p->_content_encoding = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->py_buf.obj = NULL; + if (unlikely(__pyx_pw_7aiohttp_12_http_parser_10HttpParser_1__cinit__(o, __pyx_empty_tuple, NULL) < 0)) goto bad; + return o; + bad: + Py_DECREF(o); o = 0; + return NULL; +} + +static void __pyx_tp_dealloc_7aiohttp_12_http_parser_HttpParser(PyObject *o) { + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *p = (struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + { + PyObject *etype, *eval, *etb; + PyErr_Fetch(&etype, &eval, &etb); + ++Py_REFCNT(o); + __pyx_pw_7aiohttp_12_http_parser_10HttpParser_3__dealloc__(o); + --Py_REFCNT(o); + PyErr_Restore(etype, eval, etb); + } + Py_CLEAR(p->_raw_name); + Py_CLEAR(p->_raw_value); + Py_CLEAR(p->_protocol); + Py_CLEAR(p->_loop); + Py_CLEAR(p->_timer); + Py_CLEAR(p->_url); + Py_CLEAR(p->_buf); + Py_CLEAR(p->_path); + Py_CLEAR(p->_reason); + Py_CLEAR(p->_headers); + Py_CLEAR(p->_raw_headers); + Py_CLEAR(p->_messages); + Py_CLEAR(p->_payload); + Py_CLEAR(p->_payload_exception); + Py_CLEAR(p->_last_error); + Py_CLEAR(p->_content_encoding); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_7aiohttp_12_http_parser_HttpParser(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *p = (struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)o; + if (p->_protocol) { + e = (*v)(p->_protocol, a); if (e) return e; + } + if (p->_loop) { + e = (*v)(p->_loop, a); if (e) return e; + } + if (p->_timer) { + e = (*v)(p->_timer, a); if (e) return e; + } + if (p->_url) { + e = (*v)(p->_url, a); if (e) return e; + } + if (p->_headers) { + e = (*v)(p->_headers, a); if (e) return e; + } + if (p->_raw_headers) { + e = (*v)(p->_raw_headers, a); if (e) return e; + } + if (p->_messages) { + e = (*v)(p->_messages, a); if (e) return e; + } + if (p->_payload) { + e = (*v)(p->_payload, a); if (e) return e; + } + if (p->_payload_exception) { + e = (*v)(p->_payload_exception, a); if (e) return e; + } + if (p->_last_error) { + e = (*v)(p->_last_error, a); if (e) return e; + } + if (p->py_buf.obj) { + e = (*v)(p->py_buf.obj, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_7aiohttp_12_http_parser_HttpParser(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *p = (struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)o; + tmp = ((PyObject*)p->_protocol); + p->_protocol = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_loop); + p->_loop = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_timer); + p->_timer = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_url); + p->_url = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_headers); + p->_headers = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_raw_headers); + p->_raw_headers = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_messages); + p->_messages = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_payload); + p->_payload = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_payload_exception); + p->_payload_exception = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_last_error); + p->_last_error = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + Py_CLEAR(p->py_buf.obj); + return 0; +} + +static PyMethodDef __pyx_methods_7aiohttp_12_http_parser_HttpParser[] = { + {"feed_eof", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_10HttpParser_5feed_eof, METH_NOARGS, 0}, + {"feed_data", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_10HttpParser_7feed_data, METH_O, 0}, + {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_10HttpParser_9__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_10HttpParser_11__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_7aiohttp_12_http_parser_HttpParser = { + PyVarObject_HEAD_INIT(0, 0) + "aiohttp._http_parser.HttpParser", /*tp_name*/ + sizeof(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_7aiohttp_12_http_parser_HttpParser, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_7aiohttp_12_http_parser_HttpParser, /*tp_traverse*/ + __pyx_tp_clear_7aiohttp_12_http_parser_HttpParser, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_7aiohttp_12_http_parser_HttpParser, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_7aiohttp_12_http_parser_HttpParser, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; +static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParser __pyx_vtable_7aiohttp_12_http_parser_HttpRequestParser; + +static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_HttpRequestParser(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *p; + PyObject *o = __pyx_tp_new_7aiohttp_12_http_parser_HttpParser(t, a, k); + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *)o); + p->__pyx_base.__pyx_vtab = (struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser*)__pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParser; + return o; +} + +static PyMethodDef __pyx_methods_7aiohttp_12_http_parser_HttpRequestParser[] = { + {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_3__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_5__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_7aiohttp_12_http_parser_HttpRequestParser = { + PyVarObject_HEAD_INIT(0, 0) + "aiohttp._http_parser.HttpRequestParser", /*tp_name*/ + sizeof(struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_7aiohttp_12_http_parser_HttpParser, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_7aiohttp_12_http_parser_HttpParser, /*tp_traverse*/ + __pyx_tp_clear_7aiohttp_12_http_parser_HttpParser, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_7aiohttp_12_http_parser_HttpRequestParser, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_7aiohttp_12_http_parser_HttpRequestParser, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; +static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpResponseParser __pyx_vtable_7aiohttp_12_http_parser_HttpResponseParser; + +static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_HttpResponseParser(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *p; + PyObject *o = __pyx_tp_new_7aiohttp_12_http_parser_HttpParser(t, a, k); + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *)o); + p->__pyx_base.__pyx_vtab = (struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser*)__pyx_vtabptr_7aiohttp_12_http_parser_HttpResponseParser; + return o; +} + +static PyMethodDef __pyx_methods_7aiohttp_12_http_parser_HttpResponseParser[] = { + {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_3__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_5__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_7aiohttp_12_http_parser_HttpResponseParser = { + PyVarObject_HEAD_INIT(0, 0) + "aiohttp._http_parser.HttpResponseParser", /*tp_name*/ + sizeof(struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_7aiohttp_12_http_parser_HttpParser, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_7aiohttp_12_http_parser_HttpParser, /*tp_traverse*/ + __pyx_tp_clear_7aiohttp_12_http_parser_HttpParser, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_7aiohttp_12_http_parser_HttpResponseParser, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_7aiohttp_12_http_parser_HttpResponseParser, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct____repr__[8]; +static int __pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct____repr__ = 0; + +static PyObject *__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct____repr__(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + PyObject *o; + if (CYTHON_COMPILING_IN_CPYTHON && likely((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct____repr__ > 0) & (t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__)))) { + o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct____repr__[--__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct____repr__]; + memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__)); + (void) PyObject_INIT(o, t); + PyObject_GC_Track(o); + } else { + o = (*t->tp_alloc)(t, 0); + if (unlikely(!o)) return 0; + } + return o; +} + +static void __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct____repr__(PyObject *o) { + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *)o; + PyObject_GC_UnTrack(o); + Py_CLEAR(p->__pyx_v_info); + if (CYTHON_COMPILING_IN_CPYTHON && ((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct____repr__ < 8) & (Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__)))) { + __pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct____repr__[__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct____repr__++] = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *)o); + } else { + (*Py_TYPE(o)->tp_free)(o); + } +} + +static int __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct____repr__(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *)o; + if (p->__pyx_v_info) { + e = (*v)(p->__pyx_v_info, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_7aiohttp_12_http_parser___pyx_scope_struct____repr__(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *)o; + tmp = ((PyObject*)p->__pyx_v_info); + p->__pyx_v_info = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyTypeObject __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct____repr__ = { + PyVarObject_HEAD_INIT(0, 0) + "aiohttp._http_parser.__pyx_scope_struct____repr__", /*tp_name*/ + sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct____repr__, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct____repr__, /*tp_traverse*/ + __pyx_tp_clear_7aiohttp_12_http_parser___pyx_scope_struct____repr__, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + 0, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct____repr__, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr[8]; +static int __pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr = 0; + +static PyObject *__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + PyObject *o; + if (CYTHON_COMPILING_IN_CPYTHON && likely((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr > 0) & (t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr)))) { + o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr[--__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr]; + memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr)); + (void) PyObject_INIT(o, t); + PyObject_GC_Track(o); + } else { + o = (*t->tp_alloc)(t, 0); + if (unlikely(!o)) return 0; + } + return o; +} + +static void __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr(PyObject *o) { + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *)o; + PyObject_GC_UnTrack(o); + Py_CLEAR(p->__pyx_outer_scope); + Py_CLEAR(p->__pyx_v_name); + Py_CLEAR(p->__pyx_v_val); + if (CYTHON_COMPILING_IN_CPYTHON && ((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr < 8) & (Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr)))) { + __pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr[__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr++] = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *)o); + } else { + (*Py_TYPE(o)->tp_free)(o); + } +} + +static int __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *)o; + if (p->__pyx_outer_scope) { + e = (*v)(((PyObject *)p->__pyx_outer_scope), a); if (e) return e; + } + if (p->__pyx_v_name) { + e = (*v)(p->__pyx_v_name, a); if (e) return e; + } + if (p->__pyx_v_val) { + e = (*v)(p->__pyx_v_val, a); if (e) return e; + } + return 0; +} + +static PyTypeObject __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr = { + PyVarObject_HEAD_INIT(0, 0) + "aiohttp._http_parser.__pyx_scope_struct_1_genexpr", /*tp_name*/ + sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr, /*tp_traverse*/ + 0, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + 0, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__[8]; +static int __pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ = 0; + +static PyObject *__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + PyObject *o; + if (CYTHON_COMPILING_IN_CPYTHON && likely((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ > 0) & (t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__)))) { + o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__[--__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__]; + memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__)); + (void) PyObject_INIT(o, t); + PyObject_GC_Track(o); + } else { + o = (*t->tp_alloc)(t, 0); + if (unlikely(!o)) return 0; + } + return o; +} + +static void __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__(PyObject *o) { + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *)o; + PyObject_GC_UnTrack(o); + Py_CLEAR(p->__pyx_v_info); + if (CYTHON_COMPILING_IN_CPYTHON && ((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ < 8) & (Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__)))) { + __pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__[__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__++] = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *)o); + } else { + (*Py_TYPE(o)->tp_free)(o); + } +} + +static int __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *)o; + if (p->__pyx_v_info) { + e = (*v)(p->__pyx_v_info, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *)o; + tmp = ((PyObject*)p->__pyx_v_info); + p->__pyx_v_info = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyTypeObject __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ = { + PyVarObject_HEAD_INIT(0, 0) + "aiohttp._http_parser.__pyx_scope_struct_2___repr__", /*tp_name*/ + sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__, /*tp_traverse*/ + __pyx_tp_clear_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + 0, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr[8]; +static int __pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr = 0; + +static PyObject *__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + PyObject *o; + if (CYTHON_COMPILING_IN_CPYTHON && likely((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr > 0) & (t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr)))) { + o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr[--__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr]; + memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr)); + (void) PyObject_INIT(o, t); + PyObject_GC_Track(o); + } else { + o = (*t->tp_alloc)(t, 0); + if (unlikely(!o)) return 0; + } + return o; +} + +static void __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr(PyObject *o) { + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *)o; + PyObject_GC_UnTrack(o); + Py_CLEAR(p->__pyx_outer_scope); + Py_CLEAR(p->__pyx_v_name); + Py_CLEAR(p->__pyx_v_val); + if (CYTHON_COMPILING_IN_CPYTHON && ((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr < 8) & (Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr)))) { + __pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr[__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr++] = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *)o); + } else { + (*Py_TYPE(o)->tp_free)(o); + } +} + +static int __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *)o; + if (p->__pyx_outer_scope) { + e = (*v)(((PyObject *)p->__pyx_outer_scope), a); if (e) return e; + } + if (p->__pyx_v_name) { + e = (*v)(p->__pyx_v_name, a); if (e) return e; + } + if (p->__pyx_v_val) { + e = (*v)(p->__pyx_v_val, a); if (e) return e; + } + return 0; +} + +static PyTypeObject __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr = { + PyVarObject_HEAD_INIT(0, 0) + "aiohttp._http_parser.__pyx_scope_struct_3_genexpr", /*tp_name*/ + sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr, /*tp_traverse*/ + 0, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + 0, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec__http_parser(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec__http_parser}, + {0, NULL} +}; +#endif + +static struct PyModuleDef __pyx_moduledef = { + PyModuleDef_HEAD_INIT, + "_http_parser", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_u_, __pyx_k_, sizeof(__pyx_k_), 0, 1, 0, 0}, + {&__pyx_n_s_ACCEPT, __pyx_k_ACCEPT, sizeof(__pyx_k_ACCEPT), 0, 0, 1, 1}, + {&__pyx_n_s_ACCEPT_CHARSET, __pyx_k_ACCEPT_CHARSET, sizeof(__pyx_k_ACCEPT_CHARSET), 0, 0, 1, 1}, + {&__pyx_n_s_ACCEPT_ENCODING, __pyx_k_ACCEPT_ENCODING, sizeof(__pyx_k_ACCEPT_ENCODING), 0, 0, 1, 1}, + {&__pyx_n_s_ACCEPT_LANGUAGE, __pyx_k_ACCEPT_LANGUAGE, sizeof(__pyx_k_ACCEPT_LANGUAGE), 0, 0, 1, 1}, + {&__pyx_n_s_ACCEPT_RANGES, __pyx_k_ACCEPT_RANGES, sizeof(__pyx_k_ACCEPT_RANGES), 0, 0, 1, 1}, + {&__pyx_n_s_ACCESS_CONTROL_ALLOW_CREDENTIALS, __pyx_k_ACCESS_CONTROL_ALLOW_CREDENTIALS, sizeof(__pyx_k_ACCESS_CONTROL_ALLOW_CREDENTIALS), 0, 0, 1, 1}, + {&__pyx_n_s_ACCESS_CONTROL_ALLOW_HEADERS, __pyx_k_ACCESS_CONTROL_ALLOW_HEADERS, sizeof(__pyx_k_ACCESS_CONTROL_ALLOW_HEADERS), 0, 0, 1, 1}, + {&__pyx_n_s_ACCESS_CONTROL_ALLOW_METHODS, __pyx_k_ACCESS_CONTROL_ALLOW_METHODS, sizeof(__pyx_k_ACCESS_CONTROL_ALLOW_METHODS), 0, 0, 1, 1}, + {&__pyx_n_s_ACCESS_CONTROL_ALLOW_ORIGIN, __pyx_k_ACCESS_CONTROL_ALLOW_ORIGIN, sizeof(__pyx_k_ACCESS_CONTROL_ALLOW_ORIGIN), 0, 0, 1, 1}, + {&__pyx_n_s_ACCESS_CONTROL_EXPOSE_HEADERS, __pyx_k_ACCESS_CONTROL_EXPOSE_HEADERS, sizeof(__pyx_k_ACCESS_CONTROL_EXPOSE_HEADERS), 0, 0, 1, 1}, + {&__pyx_n_s_ACCESS_CONTROL_MAX_AGE, __pyx_k_ACCESS_CONTROL_MAX_AGE, sizeof(__pyx_k_ACCESS_CONTROL_MAX_AGE), 0, 0, 1, 1}, + {&__pyx_n_s_ACCESS_CONTROL_REQUEST_HEADERS, __pyx_k_ACCESS_CONTROL_REQUEST_HEADERS, sizeof(__pyx_k_ACCESS_CONTROL_REQUEST_HEADERS), 0, 0, 1, 1}, + {&__pyx_n_s_ACCESS_CONTROL_REQUEST_METHOD, __pyx_k_ACCESS_CONTROL_REQUEST_METHOD, sizeof(__pyx_k_ACCESS_CONTROL_REQUEST_METHOD), 0, 0, 1, 1}, + {&__pyx_n_s_AGE, __pyx_k_AGE, sizeof(__pyx_k_AGE), 0, 0, 1, 1}, + {&__pyx_n_s_ALLOW, __pyx_k_ALLOW, sizeof(__pyx_k_ALLOW), 0, 0, 1, 1}, + {&__pyx_n_s_AUTHORIZATION, __pyx_k_AUTHORIZATION, sizeof(__pyx_k_AUTHORIZATION), 0, 0, 1, 1}, + {&__pyx_n_s_BadHttpMessage, __pyx_k_BadHttpMessage, sizeof(__pyx_k_BadHttpMessage), 0, 0, 1, 1}, + {&__pyx_n_s_BadStatusLine, __pyx_k_BadStatusLine, sizeof(__pyx_k_BadStatusLine), 0, 0, 1, 1}, + {&__pyx_n_s_BaseException, __pyx_k_BaseException, sizeof(__pyx_k_BaseException), 0, 0, 1, 1}, + {&__pyx_n_s_CACHE_CONTROL, __pyx_k_CACHE_CONTROL, sizeof(__pyx_k_CACHE_CONTROL), 0, 0, 1, 1}, + {&__pyx_n_s_CIMultiDict, __pyx_k_CIMultiDict, sizeof(__pyx_k_CIMultiDict), 0, 0, 1, 1}, + {&__pyx_n_s_CIMultiDictProxy, __pyx_k_CIMultiDictProxy, sizeof(__pyx_k_CIMultiDictProxy), 0, 0, 1, 1}, + {&__pyx_n_s_CIMultiDictProxy_2, __pyx_k_CIMultiDictProxy_2, sizeof(__pyx_k_CIMultiDictProxy_2), 0, 0, 1, 1}, + {&__pyx_n_s_CIMultiDict_2, __pyx_k_CIMultiDict_2, sizeof(__pyx_k_CIMultiDict_2), 0, 0, 1, 1}, + {&__pyx_n_s_CONNECTION, __pyx_k_CONNECTION, sizeof(__pyx_k_CONNECTION), 0, 0, 1, 1}, + {&__pyx_n_s_CONTENT_DISPOSITION, __pyx_k_CONTENT_DISPOSITION, sizeof(__pyx_k_CONTENT_DISPOSITION), 0, 0, 1, 1}, + {&__pyx_n_s_CONTENT_ENCODING, __pyx_k_CONTENT_ENCODING, sizeof(__pyx_k_CONTENT_ENCODING), 0, 0, 1, 1}, + {&__pyx_n_s_CONTENT_LANGUAGE, __pyx_k_CONTENT_LANGUAGE, sizeof(__pyx_k_CONTENT_LANGUAGE), 0, 0, 1, 1}, + {&__pyx_n_s_CONTENT_LENGTH, __pyx_k_CONTENT_LENGTH, sizeof(__pyx_k_CONTENT_LENGTH), 0, 0, 1, 1}, + {&__pyx_n_s_CONTENT_LOCATION, __pyx_k_CONTENT_LOCATION, sizeof(__pyx_k_CONTENT_LOCATION), 0, 0, 1, 1}, + {&__pyx_n_s_CONTENT_MD5, __pyx_k_CONTENT_MD5, sizeof(__pyx_k_CONTENT_MD5), 0, 0, 1, 1}, + {&__pyx_n_s_CONTENT_RANGE, __pyx_k_CONTENT_RANGE, sizeof(__pyx_k_CONTENT_RANGE), 0, 0, 1, 1}, + {&__pyx_n_s_CONTENT_TRANSFER_ENCODING, __pyx_k_CONTENT_TRANSFER_ENCODING, sizeof(__pyx_k_CONTENT_TRANSFER_ENCODING), 0, 0, 1, 1}, + {&__pyx_n_s_CONTENT_TYPE, __pyx_k_CONTENT_TYPE, sizeof(__pyx_k_CONTENT_TYPE), 0, 0, 1, 1}, + {&__pyx_n_s_COOKIE, __pyx_k_COOKIE, sizeof(__pyx_k_COOKIE), 0, 0, 1, 1}, + {&__pyx_n_s_ContentLengthError, __pyx_k_ContentLengthError, sizeof(__pyx_k_ContentLengthError), 0, 0, 1, 1}, + {&__pyx_n_s_DATE, __pyx_k_DATE, sizeof(__pyx_k_DATE), 0, 0, 1, 1}, + {&__pyx_n_s_DESTINATION, __pyx_k_DESTINATION, sizeof(__pyx_k_DESTINATION), 0, 0, 1, 1}, + {&__pyx_n_s_DIGEST, __pyx_k_DIGEST, sizeof(__pyx_k_DIGEST), 0, 0, 1, 1}, + {&__pyx_n_s_DeflateBuffer, __pyx_k_DeflateBuffer, sizeof(__pyx_k_DeflateBuffer), 0, 0, 1, 1}, + {&__pyx_n_s_DeflateBuffer_2, __pyx_k_DeflateBuffer_2, sizeof(__pyx_k_DeflateBuffer_2), 0, 0, 1, 1}, + {&__pyx_n_s_EMPTY_PAYLOAD, __pyx_k_EMPTY_PAYLOAD, sizeof(__pyx_k_EMPTY_PAYLOAD), 0, 0, 1, 1}, + {&__pyx_n_s_EMPTY_PAYLOAD_2, __pyx_k_EMPTY_PAYLOAD_2, sizeof(__pyx_k_EMPTY_PAYLOAD_2), 0, 0, 1, 1}, + {&__pyx_n_s_ETAG, __pyx_k_ETAG, sizeof(__pyx_k_ETAG), 0, 0, 1, 1}, + {&__pyx_n_s_EXPECT, __pyx_k_EXPECT, sizeof(__pyx_k_EXPECT), 0, 0, 1, 1}, + {&__pyx_n_s_EXPIRES, __pyx_k_EXPIRES, sizeof(__pyx_k_EXPIRES), 0, 0, 1, 1}, + {&__pyx_n_s_FORWARDED, __pyx_k_FORWARDED, sizeof(__pyx_k_FORWARDED), 0, 0, 1, 1}, + {&__pyx_n_s_FROM, __pyx_k_FROM, sizeof(__pyx_k_FROM), 0, 0, 1, 1}, + {&__pyx_n_s_HOST, __pyx_k_HOST, sizeof(__pyx_k_HOST), 0, 0, 1, 1}, + {&__pyx_kp_u_Header_name_is_too_long, __pyx_k_Header_name_is_too_long, sizeof(__pyx_k_Header_name_is_too_long), 0, 1, 0, 0}, + {&__pyx_kp_u_Header_value_is_too_long, __pyx_k_Header_value_is_too_long, sizeof(__pyx_k_Header_value_is_too_long), 0, 1, 0, 0}, + {&__pyx_n_s_HttpRequestParser, __pyx_k_HttpRequestParser, sizeof(__pyx_k_HttpRequestParser), 0, 0, 1, 1}, + {&__pyx_n_u_HttpRequestParser, __pyx_k_HttpRequestParser, sizeof(__pyx_k_HttpRequestParser), 0, 1, 0, 1}, + {&__pyx_n_s_HttpResponseParser, __pyx_k_HttpResponseParser, sizeof(__pyx_k_HttpResponseParser), 0, 0, 1, 1}, + {&__pyx_n_u_HttpResponseParser, __pyx_k_HttpResponseParser, sizeof(__pyx_k_HttpResponseParser), 0, 1, 0, 1}, + {&__pyx_n_s_HttpVersion, __pyx_k_HttpVersion, sizeof(__pyx_k_HttpVersion), 0, 0, 1, 1}, + {&__pyx_n_s_HttpVersion10, __pyx_k_HttpVersion10, sizeof(__pyx_k_HttpVersion10), 0, 0, 1, 1}, + {&__pyx_n_s_HttpVersion10_2, __pyx_k_HttpVersion10_2, sizeof(__pyx_k_HttpVersion10_2), 0, 0, 1, 1}, + {&__pyx_n_s_HttpVersion11, __pyx_k_HttpVersion11, sizeof(__pyx_k_HttpVersion11), 0, 0, 1, 1}, + {&__pyx_n_s_HttpVersion11_2, __pyx_k_HttpVersion11_2, sizeof(__pyx_k_HttpVersion11_2), 0, 0, 1, 1}, + {&__pyx_n_s_HttpVersion_2, __pyx_k_HttpVersion_2, sizeof(__pyx_k_HttpVersion_2), 0, 0, 1, 1}, + {&__pyx_n_s_IF_MATCH, __pyx_k_IF_MATCH, sizeof(__pyx_k_IF_MATCH), 0, 0, 1, 1}, + {&__pyx_n_s_IF_MODIFIED_SINCE, __pyx_k_IF_MODIFIED_SINCE, sizeof(__pyx_k_IF_MODIFIED_SINCE), 0, 0, 1, 1}, + {&__pyx_n_s_IF_NONE_MATCH, __pyx_k_IF_NONE_MATCH, sizeof(__pyx_k_IF_NONE_MATCH), 0, 0, 1, 1}, + {&__pyx_n_s_IF_RANGE, __pyx_k_IF_RANGE, sizeof(__pyx_k_IF_RANGE), 0, 0, 1, 1}, + {&__pyx_n_s_IF_UNMODIFIED_SINCE, __pyx_k_IF_UNMODIFIED_SINCE, sizeof(__pyx_k_IF_UNMODIFIED_SINCE), 0, 0, 1, 1}, + {&__pyx_kp_s_Incompatible_checksums_s_vs_0x14, __pyx_k_Incompatible_checksums_s_vs_0x14, sizeof(__pyx_k_Incompatible_checksums_s_vs_0x14), 0, 0, 1, 0}, + {&__pyx_kp_s_Incompatible_checksums_s_vs_0xc7, __pyx_k_Incompatible_checksums_s_vs_0xc7, sizeof(__pyx_k_Incompatible_checksums_s_vs_0xc7), 0, 0, 1, 0}, + {&__pyx_n_s_InvalidHeader, __pyx_k_InvalidHeader, sizeof(__pyx_k_InvalidHeader), 0, 0, 1, 1}, + {&__pyx_n_s_InvalidURLError, __pyx_k_InvalidURLError, sizeof(__pyx_k_InvalidURLError), 0, 0, 1, 1}, + {&__pyx_n_s_KEEP_ALIVE, __pyx_k_KEEP_ALIVE, sizeof(__pyx_k_KEEP_ALIVE), 0, 0, 1, 1}, + {&__pyx_n_s_LAST_EVENT_ID, __pyx_k_LAST_EVENT_ID, sizeof(__pyx_k_LAST_EVENT_ID), 0, 0, 1, 1}, + {&__pyx_n_s_LAST_MODIFIED, __pyx_k_LAST_MODIFIED, sizeof(__pyx_k_LAST_MODIFIED), 0, 0, 1, 1}, + {&__pyx_n_s_LINK, __pyx_k_LINK, sizeof(__pyx_k_LINK), 0, 0, 1, 1}, + {&__pyx_n_s_LOCATION, __pyx_k_LOCATION, sizeof(__pyx_k_LOCATION), 0, 0, 1, 1}, + {&__pyx_n_s_LineTooLong, __pyx_k_LineTooLong, sizeof(__pyx_k_LineTooLong), 0, 0, 1, 1}, + {&__pyx_n_s_MAX_FORWARDS, __pyx_k_MAX_FORWARDS, sizeof(__pyx_k_MAX_FORWARDS), 0, 0, 1, 1}, + {&__pyx_n_s_MemoryError, __pyx_k_MemoryError, sizeof(__pyx_k_MemoryError), 0, 0, 1, 1}, + {&__pyx_kp_u_Not_enough_data_for_satisfy_cont, __pyx_k_Not_enough_data_for_satisfy_cont, sizeof(__pyx_k_Not_enough_data_for_satisfy_cont), 0, 1, 0, 0}, + {&__pyx_kp_u_Not_enough_data_for_satisfy_tran, __pyx_k_Not_enough_data_for_satisfy_tran, sizeof(__pyx_k_Not_enough_data_for_satisfy_tran), 0, 1, 0, 0}, + {&__pyx_n_s_ORIGIN, __pyx_k_ORIGIN, sizeof(__pyx_k_ORIGIN), 0, 0, 1, 1}, + {&__pyx_n_s_PRAGMA, __pyx_k_PRAGMA, sizeof(__pyx_k_PRAGMA), 0, 0, 1, 1}, + {&__pyx_n_s_PROXY_AUTHENTICATE, __pyx_k_PROXY_AUTHENTICATE, sizeof(__pyx_k_PROXY_AUTHENTICATE), 0, 0, 1, 1}, + {&__pyx_n_s_PROXY_AUTHORIZATION, __pyx_k_PROXY_AUTHORIZATION, sizeof(__pyx_k_PROXY_AUTHORIZATION), 0, 0, 1, 1}, + {&__pyx_n_s_PayloadEncodingError, __pyx_k_PayloadEncodingError, sizeof(__pyx_k_PayloadEncodingError), 0, 0, 1, 1}, + {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_RANGE, __pyx_k_RANGE, sizeof(__pyx_k_RANGE), 0, 0, 1, 1}, + {&__pyx_n_s_REFERER, __pyx_k_REFERER, sizeof(__pyx_k_REFERER), 0, 0, 1, 1}, + {&__pyx_n_s_RETRY_AFTER, __pyx_k_RETRY_AFTER, sizeof(__pyx_k_RETRY_AFTER), 0, 0, 1, 1}, + {&__pyx_kp_u_RawRequestMessage, __pyx_k_RawRequestMessage, sizeof(__pyx_k_RawRequestMessage), 0, 1, 0, 0}, + {&__pyx_n_s_RawRequestMessage_2, __pyx_k_RawRequestMessage_2, sizeof(__pyx_k_RawRequestMessage_2), 0, 0, 1, 1}, + {&__pyx_n_u_RawRequestMessage_2, __pyx_k_RawRequestMessage_2, sizeof(__pyx_k_RawRequestMessage_2), 0, 1, 0, 1}, + {&__pyx_kp_u_RawResponseMessage, __pyx_k_RawResponseMessage, sizeof(__pyx_k_RawResponseMessage), 0, 1, 0, 0}, + {&__pyx_n_s_RawResponseMessage_2, __pyx_k_RawResponseMessage_2, sizeof(__pyx_k_RawResponseMessage_2), 0, 0, 1, 1}, + {&__pyx_n_u_RawResponseMessage_2, __pyx_k_RawResponseMessage_2, sizeof(__pyx_k_RawResponseMessage_2), 0, 1, 0, 1}, + {&__pyx_n_s_SEC_WEBSOCKET_ACCEPT, __pyx_k_SEC_WEBSOCKET_ACCEPT, sizeof(__pyx_k_SEC_WEBSOCKET_ACCEPT), 0, 0, 1, 1}, + {&__pyx_n_s_SEC_WEBSOCKET_EXTENSIONS, __pyx_k_SEC_WEBSOCKET_EXTENSIONS, sizeof(__pyx_k_SEC_WEBSOCKET_EXTENSIONS), 0, 0, 1, 1}, + {&__pyx_n_s_SEC_WEBSOCKET_KEY, __pyx_k_SEC_WEBSOCKET_KEY, sizeof(__pyx_k_SEC_WEBSOCKET_KEY), 0, 0, 1, 1}, + {&__pyx_n_s_SEC_WEBSOCKET_KEY1, __pyx_k_SEC_WEBSOCKET_KEY1, sizeof(__pyx_k_SEC_WEBSOCKET_KEY1), 0, 0, 1, 1}, + {&__pyx_n_s_SEC_WEBSOCKET_PROTOCOL, __pyx_k_SEC_WEBSOCKET_PROTOCOL, sizeof(__pyx_k_SEC_WEBSOCKET_PROTOCOL), 0, 0, 1, 1}, + {&__pyx_n_s_SEC_WEBSOCKET_VERSION, __pyx_k_SEC_WEBSOCKET_VERSION, sizeof(__pyx_k_SEC_WEBSOCKET_VERSION), 0, 0, 1, 1}, + {&__pyx_n_s_SERVER, __pyx_k_SERVER, sizeof(__pyx_k_SERVER), 0, 0, 1, 1}, + {&__pyx_n_s_SET_COOKIE, __pyx_k_SET_COOKIE, sizeof(__pyx_k_SET_COOKIE), 0, 0, 1, 1}, + {&__pyx_kp_u_Status_line_is_too_long, __pyx_k_Status_line_is_too_long, sizeof(__pyx_k_Status_line_is_too_long), 0, 1, 0, 0}, + {&__pyx_n_s_StreamReader, __pyx_k_StreamReader, sizeof(__pyx_k_StreamReader), 0, 0, 1, 1}, + {&__pyx_n_s_StreamReader_2, __pyx_k_StreamReader_2, sizeof(__pyx_k_StreamReader_2), 0, 0, 1, 1}, + {&__pyx_n_s_TE, __pyx_k_TE, sizeof(__pyx_k_TE), 0, 0, 1, 1}, + {&__pyx_n_s_TRAILER, __pyx_k_TRAILER, sizeof(__pyx_k_TRAILER), 0, 0, 1, 1}, + {&__pyx_n_s_TRANSFER_ENCODING, __pyx_k_TRANSFER_ENCODING, sizeof(__pyx_k_TRANSFER_ENCODING), 0, 0, 1, 1}, + {&__pyx_n_s_TransferEncodingError, __pyx_k_TransferEncodingError, sizeof(__pyx_k_TransferEncodingError), 0, 0, 1, 1}, + {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, + {&__pyx_n_s_UPGRADE, __pyx_k_UPGRADE, sizeof(__pyx_k_UPGRADE), 0, 0, 1, 1}, + {&__pyx_n_s_URI, __pyx_k_URI, sizeof(__pyx_k_URI), 0, 0, 1, 1}, + {&__pyx_n_s_URL, __pyx_k_URL, sizeof(__pyx_k_URL), 0, 0, 1, 1}, + {&__pyx_n_s_URL_2, __pyx_k_URL_2, sizeof(__pyx_k_URL_2), 0, 0, 1, 1}, + {&__pyx_n_s_USER_AGENT, __pyx_k_USER_AGENT, sizeof(__pyx_k_USER_AGENT), 0, 0, 1, 1}, + {&__pyx_n_s_VARY, __pyx_k_VARY, sizeof(__pyx_k_VARY), 0, 0, 1, 1}, + {&__pyx_n_s_VIA, __pyx_k_VIA, sizeof(__pyx_k_VIA), 0, 0, 1, 1}, + {&__pyx_n_s_WANT_DIGEST, __pyx_k_WANT_DIGEST, sizeof(__pyx_k_WANT_DIGEST), 0, 0, 1, 1}, + {&__pyx_n_s_WARNING, __pyx_k_WARNING, sizeof(__pyx_k_WARNING), 0, 0, 1, 1}, + {&__pyx_n_s_WEBSOCKET, __pyx_k_WEBSOCKET, sizeof(__pyx_k_WEBSOCKET), 0, 0, 1, 1}, + {&__pyx_n_s_WWW_AUTHENTICATE, __pyx_k_WWW_AUTHENTICATE, sizeof(__pyx_k_WWW_AUTHENTICATE), 0, 0, 1, 1}, + {&__pyx_n_s_X_FORWARDED_FOR, __pyx_k_X_FORWARDED_FOR, sizeof(__pyx_k_X_FORWARDED_FOR), 0, 0, 1, 1}, + {&__pyx_n_s_X_FORWARDED_HOST, __pyx_k_X_FORWARDED_HOST, sizeof(__pyx_k_X_FORWARDED_HOST), 0, 0, 1, 1}, + {&__pyx_n_s_X_FORWARDED_PROTO, __pyx_k_X_FORWARDED_PROTO, sizeof(__pyx_k_X_FORWARDED_PROTO), 0, 0, 1, 1}, + {&__pyx_kp_u__11, __pyx_k__11, sizeof(__pyx_k__11), 0, 1, 0, 0}, + {&__pyx_kp_u__2, __pyx_k__2, sizeof(__pyx_k__2), 0, 1, 0, 0}, + {&__pyx_kp_u__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 1, 0, 0}, + {&__pyx_n_s__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 0, 1, 1}, + {&__pyx_kp_b__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 0, 0, 0}, + {&__pyx_kp_u__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 1, 0, 0}, + {&__pyx_n_s_add, __pyx_k_add, sizeof(__pyx_k_add), 0, 0, 1, 1}, + {&__pyx_n_s_aiohttp, __pyx_k_aiohttp, sizeof(__pyx_k_aiohttp), 0, 0, 1, 1}, + {&__pyx_n_s_aiohttp__http_parser, __pyx_k_aiohttp__http_parser, sizeof(__pyx_k_aiohttp__http_parser), 0, 0, 1, 1}, + {&__pyx_kp_s_aiohttp__http_parser_pyx, __pyx_k_aiohttp__http_parser_pyx, sizeof(__pyx_k_aiohttp__http_parser_pyx), 0, 0, 1, 0}, + {&__pyx_n_s_all, __pyx_k_all, sizeof(__pyx_k_all), 0, 0, 1, 1}, + {&__pyx_n_s_args, __pyx_k_args, sizeof(__pyx_k_args), 0, 0, 1, 1}, + {&__pyx_n_s_auto_decompress, __pyx_k_auto_decompress, sizeof(__pyx_k_auto_decompress), 0, 0, 1, 1}, + {&__pyx_n_s_begin_http_chunk_receiving, __pyx_k_begin_http_chunk_receiving, sizeof(__pyx_k_begin_http_chunk_receiving), 0, 0, 1, 1}, + {&__pyx_n_u_br, __pyx_k_br, sizeof(__pyx_k_br), 0, 1, 0, 1}, + {&__pyx_n_s_buf_data, __pyx_k_buf_data, sizeof(__pyx_k_buf_data), 0, 0, 1, 1}, + {&__pyx_n_s_build, __pyx_k_build, sizeof(__pyx_k_build), 0, 0, 1, 1}, + {&__pyx_n_s_chunked, __pyx_k_chunked, sizeof(__pyx_k_chunked), 0, 0, 1, 1}, + {&__pyx_n_u_chunked, __pyx_k_chunked, sizeof(__pyx_k_chunked), 0, 1, 0, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_close, __pyx_k_close, sizeof(__pyx_k_close), 0, 0, 1, 1}, + {&__pyx_n_s_code, __pyx_k_code, sizeof(__pyx_k_code), 0, 0, 1, 1}, + {&__pyx_n_u_code, __pyx_k_code, sizeof(__pyx_k_code), 0, 1, 0, 1}, + {&__pyx_n_s_compression, __pyx_k_compression, sizeof(__pyx_k_compression), 0, 0, 1, 1}, + {&__pyx_n_u_compression, __pyx_k_compression, sizeof(__pyx_k_compression), 0, 1, 0, 1}, + {&__pyx_n_u_deflate, __pyx_k_deflate, sizeof(__pyx_k_deflate), 0, 1, 0, 1}, + {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, + {&__pyx_n_s_end_http_chunk_receiving, __pyx_k_end_http_chunk_receiving, sizeof(__pyx_k_end_http_chunk_receiving), 0, 0, 1, 1}, + {&__pyx_n_s_feed_data, __pyx_k_feed_data, sizeof(__pyx_k_feed_data), 0, 0, 1, 1}, + {&__pyx_n_s_feed_eof, __pyx_k_feed_eof, sizeof(__pyx_k_feed_eof), 0, 0, 1, 1}, + {&__pyx_n_s_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 0, 1, 1}, + {&__pyx_n_s_fragment, __pyx_k_fragment, sizeof(__pyx_k_fragment), 0, 0, 1, 1}, + {&__pyx_n_s_genexpr, __pyx_k_genexpr, sizeof(__pyx_k_genexpr), 0, 0, 1, 1}, + {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, + {&__pyx_n_u_gzip, __pyx_k_gzip, sizeof(__pyx_k_gzip), 0, 1, 0, 1}, + {&__pyx_n_s_hdrs, __pyx_k_hdrs, sizeof(__pyx_k_hdrs), 0, 0, 1, 1}, + {&__pyx_n_s_headers, __pyx_k_headers, sizeof(__pyx_k_headers), 0, 0, 1, 1}, + {&__pyx_n_u_headers, __pyx_k_headers, sizeof(__pyx_k_headers), 0, 1, 0, 1}, + {&__pyx_n_s_host, __pyx_k_host, sizeof(__pyx_k_host), 0, 0, 1, 1}, + {&__pyx_n_s_http_exceptions, __pyx_k_http_exceptions, sizeof(__pyx_k_http_exceptions), 0, 0, 1, 1}, + {&__pyx_n_s_http_parser, __pyx_k_http_parser, sizeof(__pyx_k_http_parser), 0, 0, 1, 1}, + {&__pyx_n_s_http_writer, __pyx_k_http_writer, sizeof(__pyx_k_http_writer), 0, 0, 1, 1}, + {&__pyx_n_s_i, __pyx_k_i, sizeof(__pyx_k_i), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_kp_u_invalid_url_r, __pyx_k_invalid_url_r, sizeof(__pyx_k_invalid_url_r), 0, 1, 0, 0}, + {&__pyx_n_s_loop, __pyx_k_loop, sizeof(__pyx_k_loop), 0, 0, 1, 1}, + {&__pyx_n_s_lower, __pyx_k_lower, sizeof(__pyx_k_lower), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_max_field_size, __pyx_k_max_field_size, sizeof(__pyx_k_max_field_size), 0, 0, 1, 1}, + {&__pyx_n_s_max_headers, __pyx_k_max_headers, sizeof(__pyx_k_max_headers), 0, 0, 1, 1}, + {&__pyx_n_s_max_line_size, __pyx_k_max_line_size, sizeof(__pyx_k_max_line_size), 0, 0, 1, 1}, + {&__pyx_n_s_method, __pyx_k_method, sizeof(__pyx_k_method), 0, 0, 1, 1}, + {&__pyx_n_u_method, __pyx_k_method, sizeof(__pyx_k_method), 0, 1, 0, 1}, + {&__pyx_n_s_multidict, __pyx_k_multidict, sizeof(__pyx_k_multidict), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, + {&__pyx_kp_s_no_default___reduce___due_to_non, __pyx_k_no_default___reduce___due_to_non, sizeof(__pyx_k_no_default___reduce___due_to_non), 0, 0, 1, 0}, + {&__pyx_n_s_parse_url, __pyx_k_parse_url, sizeof(__pyx_k_parse_url), 0, 0, 1, 1}, + {&__pyx_n_s_partition, __pyx_k_partition, sizeof(__pyx_k_partition), 0, 0, 1, 1}, + {&__pyx_n_s_password, __pyx_k_password, sizeof(__pyx_k_password), 0, 0, 1, 1}, + {&__pyx_n_s_path, __pyx_k_path, sizeof(__pyx_k_path), 0, 0, 1, 1}, + {&__pyx_n_u_path, __pyx_k_path, sizeof(__pyx_k_path), 0, 1, 0, 1}, + {&__pyx_n_s_payload_exception, __pyx_k_payload_exception, sizeof(__pyx_k_payload_exception), 0, 0, 1, 1}, + {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, + {&__pyx_n_s_port, __pyx_k_port, sizeof(__pyx_k_port), 0, 0, 1, 1}, + {&__pyx_n_s_protocol, __pyx_k_protocol, sizeof(__pyx_k_protocol), 0, 0, 1, 1}, + {&__pyx_n_s_py_buf, __pyx_k_py_buf, sizeof(__pyx_k_py_buf), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_RawRequestMessage, __pyx_k_pyx_unpickle_RawRequestMessage, sizeof(__pyx_k_pyx_unpickle_RawRequestMessage), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_RawResponseMessag, __pyx_k_pyx_unpickle_RawResponseMessag, sizeof(__pyx_k_pyx_unpickle_RawResponseMessag), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, + {&__pyx_n_s_query, __pyx_k_query, sizeof(__pyx_k_query), 0, 0, 1, 1}, + {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, + {&__pyx_n_s_raw_headers, __pyx_k_raw_headers, sizeof(__pyx_k_raw_headers), 0, 0, 1, 1}, + {&__pyx_n_u_raw_headers, __pyx_k_raw_headers, sizeof(__pyx_k_raw_headers), 0, 1, 0, 1}, + {&__pyx_n_s_read_until_eof, __pyx_k_read_until_eof, sizeof(__pyx_k_read_until_eof), 0, 0, 1, 1}, + {&__pyx_n_s_reason, __pyx_k_reason, sizeof(__pyx_k_reason), 0, 0, 1, 1}, + {&__pyx_n_u_reason, __pyx_k_reason, sizeof(__pyx_k_reason), 0, 1, 0, 1}, + {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, + {&__pyx_n_s_repr___locals_genexpr, __pyx_k_repr___locals_genexpr, sizeof(__pyx_k_repr___locals_genexpr), 0, 0, 1, 1}, + {&__pyx_n_s_response_with_body, __pyx_k_response_with_body, sizeof(__pyx_k_response_with_body), 0, 0, 1, 1}, + {&__pyx_n_s_scheme, __pyx_k_scheme, sizeof(__pyx_k_scheme), 0, 0, 1, 1}, + {&__pyx_n_s_send, __pyx_k_send, sizeof(__pyx_k_send), 0, 0, 1, 1}, + {&__pyx_n_s_set_exception, __pyx_k_set_exception, sizeof(__pyx_k_set_exception), 0, 0, 1, 1}, + {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, + {&__pyx_n_s_should_close, __pyx_k_should_close, sizeof(__pyx_k_should_close), 0, 0, 1, 1}, + {&__pyx_n_u_should_close, __pyx_k_should_close, sizeof(__pyx_k_should_close), 0, 1, 0, 1}, + {&__pyx_n_s_streams, __pyx_k_streams, sizeof(__pyx_k_streams), 0, 0, 1, 1}, + {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_throw, __pyx_k_throw, sizeof(__pyx_k_throw), 0, 0, 1, 1}, + {&__pyx_n_s_timer, __pyx_k_timer, sizeof(__pyx_k_timer), 0, 0, 1, 1}, + {&__pyx_kp_u_unknown, __pyx_k_unknown, sizeof(__pyx_k_unknown), 0, 1, 0, 0}, + {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, + {&__pyx_n_s_upgrade, __pyx_k_upgrade, sizeof(__pyx_k_upgrade), 0, 0, 1, 1}, + {&__pyx_n_u_upgrade, __pyx_k_upgrade, sizeof(__pyx_k_upgrade), 0, 1, 0, 1}, + {&__pyx_n_s_url, __pyx_k_url, sizeof(__pyx_k_url), 0, 0, 1, 1}, + {&__pyx_n_u_url, __pyx_k_url, sizeof(__pyx_k_url), 0, 1, 0, 1}, + {&__pyx_n_s_user, __pyx_k_user, sizeof(__pyx_k_user), 0, 0, 1, 1}, + {&__pyx_n_s_version, __pyx_k_version, sizeof(__pyx_k_version), 0, 0, 1, 1}, + {&__pyx_n_u_version, __pyx_k_version, sizeof(__pyx_k_version), 0, 1, 0, 1}, + {&__pyx_n_s_yarl, __pyx_k_yarl, sizeof(__pyx_k_yarl), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 70, __pyx_L1_error) + __pyx_builtin_MemoryError = __Pyx_GetBuiltinName(__pyx_n_s_MemoryError); if (!__pyx_builtin_MemoryError) __PYX_ERR(0, 297, __pyx_L1_error) + __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(1, 2, __pyx_L1_error) + __pyx_builtin_BaseException = __Pyx_GetBuiltinName(__pyx_n_s_BaseException); if (!__pyx_builtin_BaseException) __PYX_ERR(0, 601, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(1, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__5); + __Pyx_GIVEREF(__pyx_tuple__5); + + /* "(tree fragment)":4 + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + */ + __pyx_tuple__6 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__6); + __Pyx_GIVEREF(__pyx_tuple__6); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + __pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__7); + __Pyx_GIVEREF(__pyx_tuple__7); + + /* "(tree fragment)":4 + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + */ + __pyx_tuple__8 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__8); + __Pyx_GIVEREF(__pyx_tuple__8); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + __pyx_tuple__9 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(1, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__9); + __Pyx_GIVEREF(__pyx_tuple__9); + + /* "(tree fragment)":4 + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + */ + __pyx_tuple__10 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__10); + __Pyx_GIVEREF(__pyx_tuple__10); + + /* "aiohttp/_http_parser.pyx":40 + * char* PyByteArray_AsString(object) + * + * __all__ = ('HttpRequestParser', 'HttpResponseParser', # <<<<<<<<<<<<<< + * 'RawRequestMessage', 'RawResponseMessage') + * + */ + __pyx_tuple__12 = PyTuple_Pack(4, __pyx_n_u_HttpRequestParser, __pyx_n_u_HttpResponseParser, __pyx_n_u_RawRequestMessage_2, __pyx_n_u_RawResponseMessage_2); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 40, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__12); + __Pyx_GIVEREF(__pyx_tuple__12); + + /* "aiohttp/_http_parser.pyx":755 + * + * + * def parse_url(url): # <<<<<<<<<<<<<< + * cdef: + * Py_buffer py_buf + */ + __pyx_tuple__13 = PyTuple_Pack(3, __pyx_n_s_url, __pyx_n_s_py_buf, __pyx_n_s_buf_data); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(0, 755, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__13); + __Pyx_GIVEREF(__pyx_tuple__13); + __pyx_codeobj__14 = (PyObject*)__Pyx_PyCode_New(1, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__13, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__http_parser_pyx, __pyx_n_s_parse_url, 755, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__14)) __PYX_ERR(0, 755, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __pyx_unpickle_RawRequestMessage(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_tuple__15 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__15); + __Pyx_GIVEREF(__pyx_tuple__15); + __pyx_codeobj__16 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__15, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_RawRequestMessage, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__16)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_tuple__17 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__17); + __Pyx_GIVEREF(__pyx_tuple__17); + __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_RawResponseMessag, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + __pyx_umethod_PyUnicode_Type_partition.type = (PyObject*)&PyUnicode_Type; + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + __pyx_int_21004882 = PyInt_FromLong(21004882L); if (unlikely(!__pyx_int_21004882)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_209127132 = PyInt_FromLong(209127132L); if (unlikely(!__pyx_int_209127132)) __PYX_ERR(0, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __pyx_v_7aiohttp_12_http_parser_headers = ((PyObject*)Py_None); Py_INCREF(Py_None); + __pyx_v_7aiohttp_12_http_parser_URL = Py_None; Py_INCREF(Py_None); + __pyx_v_7aiohttp_12_http_parser_URL_build = Py_None; Py_INCREF(Py_None); + __pyx_v_7aiohttp_12_http_parser_CIMultiDict = Py_None; Py_INCREF(Py_None); + __pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy = Py_None; Py_INCREF(Py_None); + __pyx_v_7aiohttp_12_http_parser_HttpVersion = Py_None; Py_INCREF(Py_None); + __pyx_v_7aiohttp_12_http_parser_HttpVersion10 = Py_None; Py_INCREF(Py_None); + __pyx_v_7aiohttp_12_http_parser_HttpVersion11 = Py_None; Py_INCREF(Py_None); + __pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1 = Py_None; Py_INCREF(Py_None); + __pyx_v_7aiohttp_12_http_parser_CONTENT_ENCODING = Py_None; Py_INCREF(Py_None); + __pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD = Py_None; Py_INCREF(Py_None); + __pyx_v_7aiohttp_12_http_parser_StreamReader = Py_None; Py_INCREF(Py_None); + __pyx_v_7aiohttp_12_http_parser_DeflateBuffer = Py_None; Py_INCREF(Py_None); + __pyx_v_7aiohttp_12_http_parser__http_method = ((PyObject*)Py_None); Py_INCREF(Py_None); + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser_RawRequestMessage) < 0) __PYX_ERR(0, 93, __pyx_L1_error) + __pyx_type_7aiohttp_12_http_parser_RawRequestMessage.tp_print = 0; + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser_RawRequestMessage.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser_RawRequestMessage.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_7aiohttp_12_http_parser_RawRequestMessage.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_RawRequestMessage_2, (PyObject *)&__pyx_type_7aiohttp_12_http_parser_RawRequestMessage) < 0) __PYX_ERR(0, 93, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_12_http_parser_RawRequestMessage) < 0) __PYX_ERR(0, 93, __pyx_L1_error) + __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage = &__pyx_type_7aiohttp_12_http_parser_RawRequestMessage; + if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser_RawResponseMessage) < 0) __PYX_ERR(0, 193, __pyx_L1_error) + __pyx_type_7aiohttp_12_http_parser_RawResponseMessage.tp_print = 0; + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser_RawResponseMessage.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser_RawResponseMessage.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_7aiohttp_12_http_parser_RawResponseMessage.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_RawResponseMessage_2, (PyObject *)&__pyx_type_7aiohttp_12_http_parser_RawResponseMessage) < 0) __PYX_ERR(0, 193, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_12_http_parser_RawResponseMessage) < 0) __PYX_ERR(0, 193, __pyx_L1_error) + __pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage = &__pyx_type_7aiohttp_12_http_parser_RawResponseMessage; + __pyx_vtabptr_7aiohttp_12_http_parser_HttpParser = &__pyx_vtable_7aiohttp_12_http_parser_HttpParser; + __pyx_vtable_7aiohttp_12_http_parser_HttpParser._init = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, enum http_parser_type, PyObject *, PyObject *, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init *__pyx_optional_args))__pyx_f_7aiohttp_12_http_parser_10HttpParser__init; + __pyx_vtable_7aiohttp_12_http_parser_HttpParser._process_header = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header; + __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_header_field = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, char *, size_t))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field; + __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_header_value = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, char *, size_t))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value; + __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_headers_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_complete; + __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_message_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_message_complete; + __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_chunk_header = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_header; + __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_chunk_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_complete; + __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_status_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_status_complete; + __pyx_vtable_7aiohttp_12_http_parser_HttpParser.http_version = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version; + if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 255, __pyx_L1_error) + __pyx_type_7aiohttp_12_http_parser_HttpParser.tp_print = 0; + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser_HttpParser.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser_HttpParser.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_7aiohttp_12_http_parser_HttpParser.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (__Pyx_SetVtable(__pyx_type_7aiohttp_12_http_parser_HttpParser.tp_dict, __pyx_vtabptr_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 255, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 255, __pyx_L1_error) + __pyx_ptype_7aiohttp_12_http_parser_HttpParser = &__pyx_type_7aiohttp_12_http_parser_HttpParser; + __pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParser = &__pyx_vtable_7aiohttp_12_http_parser_HttpRequestParser; + __pyx_vtable_7aiohttp_12_http_parser_HttpRequestParser.__pyx_base = *__pyx_vtabptr_7aiohttp_12_http_parser_HttpParser; + __pyx_vtable_7aiohttp_12_http_parser_HttpRequestParser.__pyx_base._on_status_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_complete; + __pyx_type_7aiohttp_12_http_parser_HttpRequestParser.tp_base = __pyx_ptype_7aiohttp_12_http_parser_HttpParser; + if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 537, __pyx_L1_error) + __pyx_type_7aiohttp_12_http_parser_HttpRequestParser.tp_print = 0; + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser_HttpRequestParser.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser_HttpRequestParser.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_7aiohttp_12_http_parser_HttpRequestParser.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (__Pyx_SetVtable(__pyx_type_7aiohttp_12_http_parser_HttpRequestParser.tp_dict, __pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 537, __pyx_L1_error) + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_HttpRequestParser, (PyObject *)&__pyx_type_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 537, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 537, __pyx_L1_error) + __pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser = &__pyx_type_7aiohttp_12_http_parser_HttpRequestParser; + __pyx_vtabptr_7aiohttp_12_http_parser_HttpResponseParser = &__pyx_vtable_7aiohttp_12_http_parser_HttpResponseParser; + __pyx_vtable_7aiohttp_12_http_parser_HttpResponseParser.__pyx_base = *__pyx_vtabptr_7aiohttp_12_http_parser_HttpParser; + __pyx_vtable_7aiohttp_12_http_parser_HttpResponseParser.__pyx_base._on_status_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status_complete; + __pyx_type_7aiohttp_12_http_parser_HttpResponseParser.tp_base = __pyx_ptype_7aiohttp_12_http_parser_HttpParser; + if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 564, __pyx_L1_error) + __pyx_type_7aiohttp_12_http_parser_HttpResponseParser.tp_print = 0; + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser_HttpResponseParser.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser_HttpResponseParser.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_7aiohttp_12_http_parser_HttpResponseParser.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (__Pyx_SetVtable(__pyx_type_7aiohttp_12_http_parser_HttpResponseParser.tp_dict, __pyx_vtabptr_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 564, __pyx_L1_error) + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_HttpResponseParser, (PyObject *)&__pyx_type_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 564, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 564, __pyx_L1_error) + __pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser = &__pyx_type_7aiohttp_12_http_parser_HttpResponseParser; + if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct____repr__) < 0) __PYX_ERR(0, 118, __pyx_L1_error) + __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct____repr__.tp_print = 0; + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct____repr__.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct____repr__.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct____repr__.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + } + __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct____repr__ = &__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct____repr__; + if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr) < 0) __PYX_ERR(0, 130, __pyx_L1_error) + __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr.tp_print = 0; + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + } + __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr = &__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr; + if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__) < 0) __PYX_ERR(0, 216, __pyx_L1_error) + __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__.tp_print = 0; + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + } + __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ = &__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__; + if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr) < 0) __PYX_ERR(0, 227, __pyx_L1_error) + __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr.tp_print = 0; + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + } + __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr = &__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", + #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_7cpython_4bool_bool) __PYX_ERR(3, 8, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(4, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_7cpython_7complex_complex) __PYX_ERR(4, 15, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + +#if PY_MAJOR_VERSION < 3 +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC void +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#else +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC init_http_parser(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC init_http_parser(void) +#else +__Pyx_PyMODINIT_FUNC PyInit__http_parser(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit__http_parser(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { + result = PyDict_SetItemString(moddict, to_name, value); + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec__http_parser(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + PyObject *__pyx_t_18 = NULL; + PyObject *__pyx_t_19 = NULL; + PyObject *__pyx_t_20 = NULL; + PyObject *__pyx_t_21 = NULL; + PyObject *__pyx_t_22 = NULL; + PyObject *__pyx_t_23 = NULL; + PyObject *__pyx_t_24 = NULL; + PyObject *__pyx_t_25 = NULL; + PyObject *__pyx_t_26 = NULL; + PyObject *__pyx_t_27 = NULL; + PyObject *__pyx_t_28 = NULL; + PyObject *__pyx_t_29 = NULL; + PyObject *__pyx_t_30 = NULL; + PyObject *__pyx_t_31 = NULL; + PyObject *__pyx_t_32 = NULL; + PyObject *__pyx_t_33 = NULL; + PyObject *__pyx_t_34 = NULL; + PyObject *__pyx_t_35 = NULL; + PyObject *__pyx_t_36 = NULL; + PyObject *__pyx_t_37 = NULL; + PyObject *__pyx_t_38 = NULL; + PyObject *__pyx_t_39 = NULL; + PyObject *__pyx_t_40 = NULL; + PyObject *__pyx_t_41 = NULL; + PyObject *__pyx_t_42 = NULL; + PyObject *__pyx_t_43 = NULL; + PyObject *__pyx_t_44 = NULL; + PyObject *__pyx_t_45 = NULL; + PyObject *__pyx_t_46 = NULL; + PyObject *__pyx_t_47 = NULL; + PyObject *__pyx_t_48 = NULL; + PyObject *__pyx_t_49 = NULL; + PyObject *__pyx_t_50 = NULL; + PyObject *__pyx_t_51 = NULL; + PyObject *__pyx_t_52 = NULL; + PyObject *__pyx_t_53 = NULL; + PyObject *__pyx_t_54 = NULL; + PyObject *__pyx_t_55 = NULL; + PyObject *__pyx_t_56 = NULL; + PyObject *__pyx_t_57 = NULL; + PyObject *__pyx_t_58 = NULL; + PyObject *__pyx_t_59 = NULL; + PyObject *__pyx_t_60 = NULL; + PyObject *__pyx_t_61 = NULL; + PyObject *__pyx_t_62 = NULL; + PyObject *__pyx_t_63 = NULL; + PyObject *__pyx_t_64 = NULL; + PyObject *__pyx_t_65 = NULL; + PyObject *__pyx_t_66 = NULL; + PyObject *__pyx_t_67 = NULL; + PyObject *__pyx_t_68 = NULL; + PyObject *__pyx_t_69 = NULL; + PyObject *__pyx_t_70 = NULL; + PyObject *__pyx_t_71 = NULL; + PyObject *__pyx_t_72 = NULL; + PyObject *__pyx_t_73 = NULL; + PyObject *__pyx_t_74 = NULL; + PyObject *__pyx_t_75 = NULL; + PyObject *__pyx_t_76 = NULL; + PyObject *__pyx_t_77 = NULL; + PyObject *__pyx_t_78 = NULL; + PyObject *__pyx_t_79 = NULL; + long __pyx_t_80; + enum http_method __pyx_t_81; + char const *__pyx_t_82; + int __pyx_t_83; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module '_http_parser' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__http_parser(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("_http_parser", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_aiohttp___http_parser) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "aiohttp._http_parser")) { + if (unlikely(PyDict_SetItemString(modules, "aiohttp._http_parser", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + if (unlikely(__Pyx_modinit_type_init_code() != 0)) goto __pyx_L1_error; + if (unlikely(__Pyx_modinit_type_import_code() != 0)) goto __pyx_L1_error; + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "aiohttp/_http_parser.pyx":11 + * Py_buffer, PyBytes_AsString, PyBytes_AsStringAndSize) + * + * from multidict import (CIMultiDict as _CIMultiDict, # <<<<<<<<<<<<<< + * CIMultiDictProxy as _CIMultiDictProxy) + * from yarl import URL as _URL + */ + __pyx_t_1 = PyList_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_CIMultiDict); + __Pyx_GIVEREF(__pyx_n_s_CIMultiDict); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_CIMultiDict); + __Pyx_INCREF(__pyx_n_s_CIMultiDictProxy); + __Pyx_GIVEREF(__pyx_n_s_CIMultiDictProxy); + PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_CIMultiDictProxy); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_multidict, __pyx_t_1, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_CIMultiDict); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CIMultiDict_2, __pyx_t_1) < 0) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_CIMultiDictProxy); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CIMultiDictProxy_2, __pyx_t_1) < 0) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":13 + * from multidict import (CIMultiDict as _CIMultiDict, + * CIMultiDictProxy as _CIMultiDictProxy) + * from yarl import URL as _URL # <<<<<<<<<<<<<< + * + * from aiohttp import hdrs + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_URL); + __Pyx_GIVEREF(__pyx_n_s_URL); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_URL); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_yarl, __pyx_t_2, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_URL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_URL_2, __pyx_t_2) < 0) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":15 + * from yarl import URL as _URL + * + * from aiohttp import hdrs # <<<<<<<<<<<<<< + * from .http_exceptions import ( + * BadHttpMessage, BadStatusLine, InvalidHeader, LineTooLong, InvalidURLError, + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_hdrs); + __Pyx_GIVEREF(__pyx_n_s_hdrs); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_hdrs); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_aiohttp, __pyx_t_1, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_hdrs, __pyx_t_1) < 0) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":17 + * from aiohttp import hdrs + * from .http_exceptions import ( + * BadHttpMessage, BadStatusLine, InvalidHeader, LineTooLong, InvalidURLError, # <<<<<<<<<<<<<< + * PayloadEncodingError, ContentLengthError, TransferEncodingError) + * from .http_writer import (HttpVersion as _HttpVersion, + */ + __pyx_t_2 = PyList_New(8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_BadHttpMessage); + __Pyx_GIVEREF(__pyx_n_s_BadHttpMessage); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_BadHttpMessage); + __Pyx_INCREF(__pyx_n_s_BadStatusLine); + __Pyx_GIVEREF(__pyx_n_s_BadStatusLine); + PyList_SET_ITEM(__pyx_t_2, 1, __pyx_n_s_BadStatusLine); + __Pyx_INCREF(__pyx_n_s_InvalidHeader); + __Pyx_GIVEREF(__pyx_n_s_InvalidHeader); + PyList_SET_ITEM(__pyx_t_2, 2, __pyx_n_s_InvalidHeader); + __Pyx_INCREF(__pyx_n_s_LineTooLong); + __Pyx_GIVEREF(__pyx_n_s_LineTooLong); + PyList_SET_ITEM(__pyx_t_2, 3, __pyx_n_s_LineTooLong); + __Pyx_INCREF(__pyx_n_s_InvalidURLError); + __Pyx_GIVEREF(__pyx_n_s_InvalidURLError); + PyList_SET_ITEM(__pyx_t_2, 4, __pyx_n_s_InvalidURLError); + __Pyx_INCREF(__pyx_n_s_PayloadEncodingError); + __Pyx_GIVEREF(__pyx_n_s_PayloadEncodingError); + PyList_SET_ITEM(__pyx_t_2, 5, __pyx_n_s_PayloadEncodingError); + __Pyx_INCREF(__pyx_n_s_ContentLengthError); + __Pyx_GIVEREF(__pyx_n_s_ContentLengthError); + PyList_SET_ITEM(__pyx_t_2, 6, __pyx_n_s_ContentLengthError); + __Pyx_INCREF(__pyx_n_s_TransferEncodingError); + __Pyx_GIVEREF(__pyx_n_s_TransferEncodingError); + PyList_SET_ITEM(__pyx_t_2, 7, __pyx_n_s_TransferEncodingError); + + /* "aiohttp/_http_parser.pyx":16 + * + * from aiohttp import hdrs + * from .http_exceptions import ( # <<<<<<<<<<<<<< + * BadHttpMessage, BadStatusLine, InvalidHeader, LineTooLong, InvalidURLError, + * PayloadEncodingError, ContentLengthError, TransferEncodingError) + */ + __pyx_t_1 = __Pyx_Import(__pyx_n_s_http_exceptions, __pyx_t_2, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_BadHttpMessage); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_BadHttpMessage, __pyx_t_2) < 0) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_BadStatusLine); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_BadStatusLine, __pyx_t_2) < 0) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_InvalidHeader); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_InvalidHeader, __pyx_t_2) < 0) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_LineTooLong, __pyx_t_2) < 0) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_InvalidURLError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_InvalidURLError, __pyx_t_2) < 0) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_PayloadEncodingError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_PayloadEncodingError, __pyx_t_2) < 0) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_ContentLengthError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_ContentLengthError, __pyx_t_2) < 0) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_TransferEncodingError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_TransferEncodingError, __pyx_t_2) < 0) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":19 + * BadHttpMessage, BadStatusLine, InvalidHeader, LineTooLong, InvalidURLError, + * PayloadEncodingError, ContentLengthError, TransferEncodingError) + * from .http_writer import (HttpVersion as _HttpVersion, # <<<<<<<<<<<<<< + * HttpVersion10 as _HttpVersion10, + * HttpVersion11 as _HttpVersion11) + */ + __pyx_t_1 = PyList_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_HttpVersion); + __Pyx_GIVEREF(__pyx_n_s_HttpVersion); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_HttpVersion); + __Pyx_INCREF(__pyx_n_s_HttpVersion10); + __Pyx_GIVEREF(__pyx_n_s_HttpVersion10); + PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_HttpVersion10); + __Pyx_INCREF(__pyx_n_s_HttpVersion11); + __Pyx_GIVEREF(__pyx_n_s_HttpVersion11); + PyList_SET_ITEM(__pyx_t_1, 2, __pyx_n_s_HttpVersion11); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_http_writer, __pyx_t_1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpVersion); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpVersion_2, __pyx_t_1) < 0) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpVersion10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpVersion10_2, __pyx_t_1) < 0) __PYX_ERR(0, 20, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpVersion11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpVersion11_2, __pyx_t_1) < 0) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_http_parser.pyx":22 + * HttpVersion10 as _HttpVersion10, + * HttpVersion11 as _HttpVersion11) + * from .http_parser import DeflateBuffer as _DeflateBuffer # <<<<<<<<<<<<<< + * from .streams import (EMPTY_PAYLOAD as _EMPTY_PAYLOAD, + * StreamReader as _StreamReader) + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_DeflateBuffer); + __Pyx_GIVEREF(__pyx_n_s_DeflateBuffer); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_DeflateBuffer); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_http_parser, __pyx_t_2, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_DeflateBuffer); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_DeflateBuffer_2, __pyx_t_2) < 0) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":23 + * HttpVersion11 as _HttpVersion11) + * from .http_parser import DeflateBuffer as _DeflateBuffer + * from .streams import (EMPTY_PAYLOAD as _EMPTY_PAYLOAD, # <<<<<<<<<<<<<< + * StreamReader as _StreamReader) + * + */ + __pyx_t_1 = PyList_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_EMPTY_PAYLOAD); + __Pyx_GIVEREF(__pyx_n_s_EMPTY_PAYLOAD); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_EMPTY_PAYLOAD); + __Pyx_INCREF(__pyx_n_s_StreamReader); + __Pyx_GIVEREF(__pyx_n_s_StreamReader); + PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_StreamReader); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_streams, __pyx_t_1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_EMPTY_PAYLOAD); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_EMPTY_PAYLOAD_2, __pyx_t_1) < 0) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_StreamReader); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_StreamReader_2, __pyx_t_1) < 0) __PYX_ERR(0, 24, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_headers.pxi":4 + * # Run ./tools/gen.py to update it after the origin changing. + * + * from . import hdrs # <<<<<<<<<<<<<< + * cdef tuple headers = ( + * hdrs.ACCEPT, + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(5, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_hdrs); + __Pyx_GIVEREF(__pyx_n_s_hdrs); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_hdrs); + __pyx_t_1 = __Pyx_Import(__pyx_n_s__4, __pyx_t_2, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_2)) __PYX_ERR(5, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_hdrs, __pyx_t_2) < 0) __PYX_ERR(5, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":6 + * from . import hdrs + * cdef tuple headers = ( + * hdrs.ACCEPT, # <<<<<<<<<<<<<< + * hdrs.ACCEPT_CHARSET, + * hdrs.ACCEPT_ENCODING, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCEPT); if (unlikely(!__pyx_t_2)) __PYX_ERR(5, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":7 + * cdef tuple headers = ( + * hdrs.ACCEPT, + * hdrs.ACCEPT_CHARSET, # <<<<<<<<<<<<<< + * hdrs.ACCEPT_ENCODING, + * hdrs.ACCEPT_LANGUAGE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCEPT_CHARSET); if (unlikely(!__pyx_t_3)) __PYX_ERR(5, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":8 + * hdrs.ACCEPT, + * hdrs.ACCEPT_CHARSET, + * hdrs.ACCEPT_ENCODING, # <<<<<<<<<<<<<< + * hdrs.ACCEPT_LANGUAGE, + * hdrs.ACCEPT_RANGES, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCEPT_ENCODING); if (unlikely(!__pyx_t_4)) __PYX_ERR(5, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":9 + * hdrs.ACCEPT_CHARSET, + * hdrs.ACCEPT_ENCODING, + * hdrs.ACCEPT_LANGUAGE, # <<<<<<<<<<<<<< + * hdrs.ACCEPT_RANGES, + * hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCEPT_LANGUAGE); if (unlikely(!__pyx_t_5)) __PYX_ERR(5, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":10 + * hdrs.ACCEPT_ENCODING, + * hdrs.ACCEPT_LANGUAGE, + * hdrs.ACCEPT_RANGES, # <<<<<<<<<<<<<< + * hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, + * hdrs.ACCESS_CONTROL_ALLOW_HEADERS, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCEPT_RANGES); if (unlikely(!__pyx_t_6)) __PYX_ERR(5, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":11 + * hdrs.ACCEPT_LANGUAGE, + * hdrs.ACCEPT_RANGES, + * hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, # <<<<<<<<<<<<<< + * hdrs.ACCESS_CONTROL_ALLOW_HEADERS, + * hdrs.ACCESS_CONTROL_ALLOW_METHODS, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_ALLOW_CREDENTIALS); if (unlikely(!__pyx_t_7)) __PYX_ERR(5, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":12 + * hdrs.ACCEPT_RANGES, + * hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, + * hdrs.ACCESS_CONTROL_ALLOW_HEADERS, # <<<<<<<<<<<<<< + * hdrs.ACCESS_CONTROL_ALLOW_METHODS, + * hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_ALLOW_HEADERS); if (unlikely(!__pyx_t_8)) __PYX_ERR(5, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":13 + * hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, + * hdrs.ACCESS_CONTROL_ALLOW_HEADERS, + * hdrs.ACCESS_CONTROL_ALLOW_METHODS, # <<<<<<<<<<<<<< + * hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, + * hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_ALLOW_METHODS); if (unlikely(!__pyx_t_9)) __PYX_ERR(5, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":14 + * hdrs.ACCESS_CONTROL_ALLOW_HEADERS, + * hdrs.ACCESS_CONTROL_ALLOW_METHODS, + * hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, # <<<<<<<<<<<<<< + * hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, + * hdrs.ACCESS_CONTROL_MAX_AGE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_ALLOW_ORIGIN); if (unlikely(!__pyx_t_10)) __PYX_ERR(5, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":15 + * hdrs.ACCESS_CONTROL_ALLOW_METHODS, + * hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, + * hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, # <<<<<<<<<<<<<< + * hdrs.ACCESS_CONTROL_MAX_AGE, + * hdrs.ACCESS_CONTROL_REQUEST_HEADERS, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_EXPOSE_HEADERS); if (unlikely(!__pyx_t_11)) __PYX_ERR(5, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":16 + * hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, + * hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, + * hdrs.ACCESS_CONTROL_MAX_AGE, # <<<<<<<<<<<<<< + * hdrs.ACCESS_CONTROL_REQUEST_HEADERS, + * hdrs.ACCESS_CONTROL_REQUEST_METHOD, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_MAX_AGE); if (unlikely(!__pyx_t_12)) __PYX_ERR(5, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":17 + * hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, + * hdrs.ACCESS_CONTROL_MAX_AGE, + * hdrs.ACCESS_CONTROL_REQUEST_HEADERS, # <<<<<<<<<<<<<< + * hdrs.ACCESS_CONTROL_REQUEST_METHOD, + * hdrs.AGE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_13 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_REQUEST_HEADERS); if (unlikely(!__pyx_t_13)) __PYX_ERR(5, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_13); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":18 + * hdrs.ACCESS_CONTROL_MAX_AGE, + * hdrs.ACCESS_CONTROL_REQUEST_HEADERS, + * hdrs.ACCESS_CONTROL_REQUEST_METHOD, # <<<<<<<<<<<<<< + * hdrs.AGE, + * hdrs.ALLOW, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_REQUEST_METHOD); if (unlikely(!__pyx_t_14)) __PYX_ERR(5, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":19 + * hdrs.ACCESS_CONTROL_REQUEST_HEADERS, + * hdrs.ACCESS_CONTROL_REQUEST_METHOD, + * hdrs.AGE, # <<<<<<<<<<<<<< + * hdrs.ALLOW, + * hdrs.AUTHORIZATION, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_15 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_AGE); if (unlikely(!__pyx_t_15)) __PYX_ERR(5, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_15); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":20 + * hdrs.ACCESS_CONTROL_REQUEST_METHOD, + * hdrs.AGE, + * hdrs.ALLOW, # <<<<<<<<<<<<<< + * hdrs.AUTHORIZATION, + * hdrs.CACHE_CONTROL, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 20, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_16 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ALLOW); if (unlikely(!__pyx_t_16)) __PYX_ERR(5, 20, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":21 + * hdrs.AGE, + * hdrs.ALLOW, + * hdrs.AUTHORIZATION, # <<<<<<<<<<<<<< + * hdrs.CACHE_CONTROL, + * hdrs.CONNECTION, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 21, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_AUTHORIZATION); if (unlikely(!__pyx_t_17)) __PYX_ERR(5, 21, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":22 + * hdrs.ALLOW, + * hdrs.AUTHORIZATION, + * hdrs.CACHE_CONTROL, # <<<<<<<<<<<<<< + * hdrs.CONNECTION, + * hdrs.CONTENT_DISPOSITION, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_18 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CACHE_CONTROL); if (unlikely(!__pyx_t_18)) __PYX_ERR(5, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_18); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":23 + * hdrs.AUTHORIZATION, + * hdrs.CACHE_CONTROL, + * hdrs.CONNECTION, # <<<<<<<<<<<<<< + * hdrs.CONTENT_DISPOSITION, + * hdrs.CONTENT_ENCODING, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_19 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONNECTION); if (unlikely(!__pyx_t_19)) __PYX_ERR(5, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_19); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":24 + * hdrs.CACHE_CONTROL, + * hdrs.CONNECTION, + * hdrs.CONTENT_DISPOSITION, # <<<<<<<<<<<<<< + * hdrs.CONTENT_ENCODING, + * hdrs.CONTENT_LANGUAGE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 24, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_20 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_DISPOSITION); if (unlikely(!__pyx_t_20)) __PYX_ERR(5, 24, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_20); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":25 + * hdrs.CONNECTION, + * hdrs.CONTENT_DISPOSITION, + * hdrs.CONTENT_ENCODING, # <<<<<<<<<<<<<< + * hdrs.CONTENT_LANGUAGE, + * hdrs.CONTENT_LENGTH, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 25, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_21 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_ENCODING); if (unlikely(!__pyx_t_21)) __PYX_ERR(5, 25, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_21); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":26 + * hdrs.CONTENT_DISPOSITION, + * hdrs.CONTENT_ENCODING, + * hdrs.CONTENT_LANGUAGE, # <<<<<<<<<<<<<< + * hdrs.CONTENT_LENGTH, + * hdrs.CONTENT_LOCATION, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 26, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_22 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_LANGUAGE); if (unlikely(!__pyx_t_22)) __PYX_ERR(5, 26, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_22); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":27 + * hdrs.CONTENT_ENCODING, + * hdrs.CONTENT_LANGUAGE, + * hdrs.CONTENT_LENGTH, # <<<<<<<<<<<<<< + * hdrs.CONTENT_LOCATION, + * hdrs.CONTENT_MD5, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 27, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_23 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_LENGTH); if (unlikely(!__pyx_t_23)) __PYX_ERR(5, 27, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_23); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":28 + * hdrs.CONTENT_LANGUAGE, + * hdrs.CONTENT_LENGTH, + * hdrs.CONTENT_LOCATION, # <<<<<<<<<<<<<< + * hdrs.CONTENT_MD5, + * hdrs.CONTENT_RANGE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 28, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_24 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_LOCATION); if (unlikely(!__pyx_t_24)) __PYX_ERR(5, 28, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_24); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":29 + * hdrs.CONTENT_LENGTH, + * hdrs.CONTENT_LOCATION, + * hdrs.CONTENT_MD5, # <<<<<<<<<<<<<< + * hdrs.CONTENT_RANGE, + * hdrs.CONTENT_TRANSFER_ENCODING, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 29, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_25 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_MD5); if (unlikely(!__pyx_t_25)) __PYX_ERR(5, 29, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_25); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":30 + * hdrs.CONTENT_LOCATION, + * hdrs.CONTENT_MD5, + * hdrs.CONTENT_RANGE, # <<<<<<<<<<<<<< + * hdrs.CONTENT_TRANSFER_ENCODING, + * hdrs.CONTENT_TYPE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 30, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_26 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_RANGE); if (unlikely(!__pyx_t_26)) __PYX_ERR(5, 30, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_26); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":31 + * hdrs.CONTENT_MD5, + * hdrs.CONTENT_RANGE, + * hdrs.CONTENT_TRANSFER_ENCODING, # <<<<<<<<<<<<<< + * hdrs.CONTENT_TYPE, + * hdrs.COOKIE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 31, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_27 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_TRANSFER_ENCODING); if (unlikely(!__pyx_t_27)) __PYX_ERR(5, 31, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_27); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":32 + * hdrs.CONTENT_RANGE, + * hdrs.CONTENT_TRANSFER_ENCODING, + * hdrs.CONTENT_TYPE, # <<<<<<<<<<<<<< + * hdrs.COOKIE, + * hdrs.DATE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 32, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_28 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_TYPE); if (unlikely(!__pyx_t_28)) __PYX_ERR(5, 32, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_28); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":33 + * hdrs.CONTENT_TRANSFER_ENCODING, + * hdrs.CONTENT_TYPE, + * hdrs.COOKIE, # <<<<<<<<<<<<<< + * hdrs.DATE, + * hdrs.DESTINATION, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 33, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_29 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_COOKIE); if (unlikely(!__pyx_t_29)) __PYX_ERR(5, 33, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_29); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":34 + * hdrs.CONTENT_TYPE, + * hdrs.COOKIE, + * hdrs.DATE, # <<<<<<<<<<<<<< + * hdrs.DESTINATION, + * hdrs.DIGEST, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 34, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_30 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_DATE); if (unlikely(!__pyx_t_30)) __PYX_ERR(5, 34, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_30); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":35 + * hdrs.COOKIE, + * hdrs.DATE, + * hdrs.DESTINATION, # <<<<<<<<<<<<<< + * hdrs.DIGEST, + * hdrs.ETAG, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 35, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_31 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_DESTINATION); if (unlikely(!__pyx_t_31)) __PYX_ERR(5, 35, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_31); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":36 + * hdrs.DATE, + * hdrs.DESTINATION, + * hdrs.DIGEST, # <<<<<<<<<<<<<< + * hdrs.ETAG, + * hdrs.EXPECT, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 36, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_32 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_DIGEST); if (unlikely(!__pyx_t_32)) __PYX_ERR(5, 36, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_32); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":37 + * hdrs.DESTINATION, + * hdrs.DIGEST, + * hdrs.ETAG, # <<<<<<<<<<<<<< + * hdrs.EXPECT, + * hdrs.EXPIRES, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 37, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_33 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ETAG); if (unlikely(!__pyx_t_33)) __PYX_ERR(5, 37, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_33); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":38 + * hdrs.DIGEST, + * hdrs.ETAG, + * hdrs.EXPECT, # <<<<<<<<<<<<<< + * hdrs.EXPIRES, + * hdrs.FORWARDED, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 38, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_34 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_EXPECT); if (unlikely(!__pyx_t_34)) __PYX_ERR(5, 38, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_34); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":39 + * hdrs.ETAG, + * hdrs.EXPECT, + * hdrs.EXPIRES, # <<<<<<<<<<<<<< + * hdrs.FORWARDED, + * hdrs.FROM, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 39, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_35 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_EXPIRES); if (unlikely(!__pyx_t_35)) __PYX_ERR(5, 39, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_35); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":40 + * hdrs.EXPECT, + * hdrs.EXPIRES, + * hdrs.FORWARDED, # <<<<<<<<<<<<<< + * hdrs.FROM, + * hdrs.HOST, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 40, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_36 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_FORWARDED); if (unlikely(!__pyx_t_36)) __PYX_ERR(5, 40, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_36); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":41 + * hdrs.EXPIRES, + * hdrs.FORWARDED, + * hdrs.FROM, # <<<<<<<<<<<<<< + * hdrs.HOST, + * hdrs.IF_MATCH, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 41, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_37 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_FROM); if (unlikely(!__pyx_t_37)) __PYX_ERR(5, 41, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_37); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":42 + * hdrs.FORWARDED, + * hdrs.FROM, + * hdrs.HOST, # <<<<<<<<<<<<<< + * hdrs.IF_MATCH, + * hdrs.IF_MODIFIED_SINCE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 42, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_38 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_HOST); if (unlikely(!__pyx_t_38)) __PYX_ERR(5, 42, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_38); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":43 + * hdrs.FROM, + * hdrs.HOST, + * hdrs.IF_MATCH, # <<<<<<<<<<<<<< + * hdrs.IF_MODIFIED_SINCE, + * hdrs.IF_NONE_MATCH, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 43, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_39 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_IF_MATCH); if (unlikely(!__pyx_t_39)) __PYX_ERR(5, 43, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_39); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":44 + * hdrs.HOST, + * hdrs.IF_MATCH, + * hdrs.IF_MODIFIED_SINCE, # <<<<<<<<<<<<<< + * hdrs.IF_NONE_MATCH, + * hdrs.IF_RANGE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_40 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_IF_MODIFIED_SINCE); if (unlikely(!__pyx_t_40)) __PYX_ERR(5, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_40); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":45 + * hdrs.IF_MATCH, + * hdrs.IF_MODIFIED_SINCE, + * hdrs.IF_NONE_MATCH, # <<<<<<<<<<<<<< + * hdrs.IF_RANGE, + * hdrs.IF_UNMODIFIED_SINCE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_41 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_IF_NONE_MATCH); if (unlikely(!__pyx_t_41)) __PYX_ERR(5, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_41); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":46 + * hdrs.IF_MODIFIED_SINCE, + * hdrs.IF_NONE_MATCH, + * hdrs.IF_RANGE, # <<<<<<<<<<<<<< + * hdrs.IF_UNMODIFIED_SINCE, + * hdrs.KEEP_ALIVE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 46, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_42 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_IF_RANGE); if (unlikely(!__pyx_t_42)) __PYX_ERR(5, 46, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_42); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":47 + * hdrs.IF_NONE_MATCH, + * hdrs.IF_RANGE, + * hdrs.IF_UNMODIFIED_SINCE, # <<<<<<<<<<<<<< + * hdrs.KEEP_ALIVE, + * hdrs.LAST_EVENT_ID, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 47, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_43 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_IF_UNMODIFIED_SINCE); if (unlikely(!__pyx_t_43)) __PYX_ERR(5, 47, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_43); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":48 + * hdrs.IF_RANGE, + * hdrs.IF_UNMODIFIED_SINCE, + * hdrs.KEEP_ALIVE, # <<<<<<<<<<<<<< + * hdrs.LAST_EVENT_ID, + * hdrs.LAST_MODIFIED, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 48, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_44 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_KEEP_ALIVE); if (unlikely(!__pyx_t_44)) __PYX_ERR(5, 48, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_44); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":49 + * hdrs.IF_UNMODIFIED_SINCE, + * hdrs.KEEP_ALIVE, + * hdrs.LAST_EVENT_ID, # <<<<<<<<<<<<<< + * hdrs.LAST_MODIFIED, + * hdrs.LINK, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 49, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_45 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_LAST_EVENT_ID); if (unlikely(!__pyx_t_45)) __PYX_ERR(5, 49, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_45); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":50 + * hdrs.KEEP_ALIVE, + * hdrs.LAST_EVENT_ID, + * hdrs.LAST_MODIFIED, # <<<<<<<<<<<<<< + * hdrs.LINK, + * hdrs.LOCATION, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 50, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_46 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_LAST_MODIFIED); if (unlikely(!__pyx_t_46)) __PYX_ERR(5, 50, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_46); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":51 + * hdrs.LAST_EVENT_ID, + * hdrs.LAST_MODIFIED, + * hdrs.LINK, # <<<<<<<<<<<<<< + * hdrs.LOCATION, + * hdrs.MAX_FORWARDS, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 51, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_47 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_LINK); if (unlikely(!__pyx_t_47)) __PYX_ERR(5, 51, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_47); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":52 + * hdrs.LAST_MODIFIED, + * hdrs.LINK, + * hdrs.LOCATION, # <<<<<<<<<<<<<< + * hdrs.MAX_FORWARDS, + * hdrs.ORIGIN, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 52, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_48 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_LOCATION); if (unlikely(!__pyx_t_48)) __PYX_ERR(5, 52, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_48); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":53 + * hdrs.LINK, + * hdrs.LOCATION, + * hdrs.MAX_FORWARDS, # <<<<<<<<<<<<<< + * hdrs.ORIGIN, + * hdrs.PRAGMA, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 53, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_49 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_MAX_FORWARDS); if (unlikely(!__pyx_t_49)) __PYX_ERR(5, 53, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_49); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":54 + * hdrs.LOCATION, + * hdrs.MAX_FORWARDS, + * hdrs.ORIGIN, # <<<<<<<<<<<<<< + * hdrs.PRAGMA, + * hdrs.PROXY_AUTHENTICATE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 54, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_50 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ORIGIN); if (unlikely(!__pyx_t_50)) __PYX_ERR(5, 54, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_50); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":55 + * hdrs.MAX_FORWARDS, + * hdrs.ORIGIN, + * hdrs.PRAGMA, # <<<<<<<<<<<<<< + * hdrs.PROXY_AUTHENTICATE, + * hdrs.PROXY_AUTHORIZATION, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 55, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_51 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_PRAGMA); if (unlikely(!__pyx_t_51)) __PYX_ERR(5, 55, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_51); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":56 + * hdrs.ORIGIN, + * hdrs.PRAGMA, + * hdrs.PROXY_AUTHENTICATE, # <<<<<<<<<<<<<< + * hdrs.PROXY_AUTHORIZATION, + * hdrs.RANGE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 56, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_52 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_PROXY_AUTHENTICATE); if (unlikely(!__pyx_t_52)) __PYX_ERR(5, 56, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_52); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":57 + * hdrs.PRAGMA, + * hdrs.PROXY_AUTHENTICATE, + * hdrs.PROXY_AUTHORIZATION, # <<<<<<<<<<<<<< + * hdrs.RANGE, + * hdrs.REFERER, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 57, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_53 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_PROXY_AUTHORIZATION); if (unlikely(!__pyx_t_53)) __PYX_ERR(5, 57, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_53); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":58 + * hdrs.PROXY_AUTHENTICATE, + * hdrs.PROXY_AUTHORIZATION, + * hdrs.RANGE, # <<<<<<<<<<<<<< + * hdrs.REFERER, + * hdrs.RETRY_AFTER, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 58, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_54 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_RANGE); if (unlikely(!__pyx_t_54)) __PYX_ERR(5, 58, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_54); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":59 + * hdrs.PROXY_AUTHORIZATION, + * hdrs.RANGE, + * hdrs.REFERER, # <<<<<<<<<<<<<< + * hdrs.RETRY_AFTER, + * hdrs.SEC_WEBSOCKET_ACCEPT, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 59, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_55 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_REFERER); if (unlikely(!__pyx_t_55)) __PYX_ERR(5, 59, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_55); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":60 + * hdrs.RANGE, + * hdrs.REFERER, + * hdrs.RETRY_AFTER, # <<<<<<<<<<<<<< + * hdrs.SEC_WEBSOCKET_ACCEPT, + * hdrs.SEC_WEBSOCKET_EXTENSIONS, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 60, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_56 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_RETRY_AFTER); if (unlikely(!__pyx_t_56)) __PYX_ERR(5, 60, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_56); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":61 + * hdrs.REFERER, + * hdrs.RETRY_AFTER, + * hdrs.SEC_WEBSOCKET_ACCEPT, # <<<<<<<<<<<<<< + * hdrs.SEC_WEBSOCKET_EXTENSIONS, + * hdrs.SEC_WEBSOCKET_KEY, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 61, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_57 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SEC_WEBSOCKET_ACCEPT); if (unlikely(!__pyx_t_57)) __PYX_ERR(5, 61, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_57); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":62 + * hdrs.RETRY_AFTER, + * hdrs.SEC_WEBSOCKET_ACCEPT, + * hdrs.SEC_WEBSOCKET_EXTENSIONS, # <<<<<<<<<<<<<< + * hdrs.SEC_WEBSOCKET_KEY, + * hdrs.SEC_WEBSOCKET_KEY1, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 62, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_58 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SEC_WEBSOCKET_EXTENSIONS); if (unlikely(!__pyx_t_58)) __PYX_ERR(5, 62, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_58); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":63 + * hdrs.SEC_WEBSOCKET_ACCEPT, + * hdrs.SEC_WEBSOCKET_EXTENSIONS, + * hdrs.SEC_WEBSOCKET_KEY, # <<<<<<<<<<<<<< + * hdrs.SEC_WEBSOCKET_KEY1, + * hdrs.SEC_WEBSOCKET_PROTOCOL, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 63, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_59 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SEC_WEBSOCKET_KEY); if (unlikely(!__pyx_t_59)) __PYX_ERR(5, 63, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_59); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":64 + * hdrs.SEC_WEBSOCKET_EXTENSIONS, + * hdrs.SEC_WEBSOCKET_KEY, + * hdrs.SEC_WEBSOCKET_KEY1, # <<<<<<<<<<<<<< + * hdrs.SEC_WEBSOCKET_PROTOCOL, + * hdrs.SEC_WEBSOCKET_VERSION, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 64, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_60 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SEC_WEBSOCKET_KEY1); if (unlikely(!__pyx_t_60)) __PYX_ERR(5, 64, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_60); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":65 + * hdrs.SEC_WEBSOCKET_KEY, + * hdrs.SEC_WEBSOCKET_KEY1, + * hdrs.SEC_WEBSOCKET_PROTOCOL, # <<<<<<<<<<<<<< + * hdrs.SEC_WEBSOCKET_VERSION, + * hdrs.SERVER, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 65, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_61 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SEC_WEBSOCKET_PROTOCOL); if (unlikely(!__pyx_t_61)) __PYX_ERR(5, 65, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_61); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":66 + * hdrs.SEC_WEBSOCKET_KEY1, + * hdrs.SEC_WEBSOCKET_PROTOCOL, + * hdrs.SEC_WEBSOCKET_VERSION, # <<<<<<<<<<<<<< + * hdrs.SERVER, + * hdrs.SET_COOKIE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_62 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SEC_WEBSOCKET_VERSION); if (unlikely(!__pyx_t_62)) __PYX_ERR(5, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_62); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":67 + * hdrs.SEC_WEBSOCKET_PROTOCOL, + * hdrs.SEC_WEBSOCKET_VERSION, + * hdrs.SERVER, # <<<<<<<<<<<<<< + * hdrs.SET_COOKIE, + * hdrs.TE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 67, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_63 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SERVER); if (unlikely(!__pyx_t_63)) __PYX_ERR(5, 67, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_63); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":68 + * hdrs.SEC_WEBSOCKET_VERSION, + * hdrs.SERVER, + * hdrs.SET_COOKIE, # <<<<<<<<<<<<<< + * hdrs.TE, + * hdrs.TRAILER, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 68, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_64 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SET_COOKIE); if (unlikely(!__pyx_t_64)) __PYX_ERR(5, 68, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_64); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":69 + * hdrs.SERVER, + * hdrs.SET_COOKIE, + * hdrs.TE, # <<<<<<<<<<<<<< + * hdrs.TRAILER, + * hdrs.TRANSFER_ENCODING, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 69, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_65 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_TE); if (unlikely(!__pyx_t_65)) __PYX_ERR(5, 69, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_65); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":70 + * hdrs.SET_COOKIE, + * hdrs.TE, + * hdrs.TRAILER, # <<<<<<<<<<<<<< + * hdrs.TRANSFER_ENCODING, + * hdrs.UPGRADE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 70, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_66 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_TRAILER); if (unlikely(!__pyx_t_66)) __PYX_ERR(5, 70, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_66); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":71 + * hdrs.TE, + * hdrs.TRAILER, + * hdrs.TRANSFER_ENCODING, # <<<<<<<<<<<<<< + * hdrs.UPGRADE, + * hdrs.URI, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 71, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_67 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_TRANSFER_ENCODING); if (unlikely(!__pyx_t_67)) __PYX_ERR(5, 71, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_67); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":72 + * hdrs.TRAILER, + * hdrs.TRANSFER_ENCODING, + * hdrs.UPGRADE, # <<<<<<<<<<<<<< + * hdrs.URI, + * hdrs.USER_AGENT, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 72, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_68 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_UPGRADE); if (unlikely(!__pyx_t_68)) __PYX_ERR(5, 72, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_68); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":73 + * hdrs.TRANSFER_ENCODING, + * hdrs.UPGRADE, + * hdrs.URI, # <<<<<<<<<<<<<< + * hdrs.USER_AGENT, + * hdrs.VARY, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 73, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_69 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_URI); if (unlikely(!__pyx_t_69)) __PYX_ERR(5, 73, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_69); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":74 + * hdrs.UPGRADE, + * hdrs.URI, + * hdrs.USER_AGENT, # <<<<<<<<<<<<<< + * hdrs.VARY, + * hdrs.VIA, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 74, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_70 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_USER_AGENT); if (unlikely(!__pyx_t_70)) __PYX_ERR(5, 74, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_70); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":75 + * hdrs.URI, + * hdrs.USER_AGENT, + * hdrs.VARY, # <<<<<<<<<<<<<< + * hdrs.VIA, + * hdrs.WANT_DIGEST, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 75, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_71 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_VARY); if (unlikely(!__pyx_t_71)) __PYX_ERR(5, 75, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_71); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":76 + * hdrs.USER_AGENT, + * hdrs.VARY, + * hdrs.VIA, # <<<<<<<<<<<<<< + * hdrs.WANT_DIGEST, + * hdrs.WARNING, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 76, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_72 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_VIA); if (unlikely(!__pyx_t_72)) __PYX_ERR(5, 76, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_72); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":77 + * hdrs.VARY, + * hdrs.VIA, + * hdrs.WANT_DIGEST, # <<<<<<<<<<<<<< + * hdrs.WARNING, + * hdrs.WEBSOCKET, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 77, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_73 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_WANT_DIGEST); if (unlikely(!__pyx_t_73)) __PYX_ERR(5, 77, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_73); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":78 + * hdrs.VIA, + * hdrs.WANT_DIGEST, + * hdrs.WARNING, # <<<<<<<<<<<<<< + * hdrs.WEBSOCKET, + * hdrs.WWW_AUTHENTICATE, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 78, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_74 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_WARNING); if (unlikely(!__pyx_t_74)) __PYX_ERR(5, 78, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_74); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":79 + * hdrs.WANT_DIGEST, + * hdrs.WARNING, + * hdrs.WEBSOCKET, # <<<<<<<<<<<<<< + * hdrs.WWW_AUTHENTICATE, + * hdrs.X_FORWARDED_FOR, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 79, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_75 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_WEBSOCKET); if (unlikely(!__pyx_t_75)) __PYX_ERR(5, 79, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_75); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":80 + * hdrs.WARNING, + * hdrs.WEBSOCKET, + * hdrs.WWW_AUTHENTICATE, # <<<<<<<<<<<<<< + * hdrs.X_FORWARDED_FOR, + * hdrs.X_FORWARDED_HOST, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 80, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_76 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_WWW_AUTHENTICATE); if (unlikely(!__pyx_t_76)) __PYX_ERR(5, 80, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_76); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":81 + * hdrs.WEBSOCKET, + * hdrs.WWW_AUTHENTICATE, + * hdrs.X_FORWARDED_FOR, # <<<<<<<<<<<<<< + * hdrs.X_FORWARDED_HOST, + * hdrs.X_FORWARDED_PROTO, + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 81, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_77 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_X_FORWARDED_FOR); if (unlikely(!__pyx_t_77)) __PYX_ERR(5, 81, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_77); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":82 + * hdrs.WWW_AUTHENTICATE, + * hdrs.X_FORWARDED_FOR, + * hdrs.X_FORWARDED_HOST, # <<<<<<<<<<<<<< + * hdrs.X_FORWARDED_PROTO, + * ) + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 82, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_78 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_X_FORWARDED_HOST); if (unlikely(!__pyx_t_78)) __PYX_ERR(5, 82, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_78); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":83 + * hdrs.X_FORWARDED_FOR, + * hdrs.X_FORWARDED_HOST, + * hdrs.X_FORWARDED_PROTO, # <<<<<<<<<<<<<< + * ) + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 83, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_79 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_X_FORWARDED_PROTO); if (unlikely(!__pyx_t_79)) __PYX_ERR(5, 83, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_headers.pxi":6 + * from . import hdrs + * cdef tuple headers = ( + * hdrs.ACCEPT, # <<<<<<<<<<<<<< + * hdrs.ACCEPT_CHARSET, + * hdrs.ACCEPT_ENCODING, + */ + __pyx_t_1 = PyTuple_New(78); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_3); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_1, 4, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_1, 5, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_1, 6, __pyx_t_8); + __Pyx_GIVEREF(__pyx_t_9); + PyTuple_SET_ITEM(__pyx_t_1, 7, __pyx_t_9); + __Pyx_GIVEREF(__pyx_t_10); + PyTuple_SET_ITEM(__pyx_t_1, 8, __pyx_t_10); + __Pyx_GIVEREF(__pyx_t_11); + PyTuple_SET_ITEM(__pyx_t_1, 9, __pyx_t_11); + __Pyx_GIVEREF(__pyx_t_12); + PyTuple_SET_ITEM(__pyx_t_1, 10, __pyx_t_12); + __Pyx_GIVEREF(__pyx_t_13); + PyTuple_SET_ITEM(__pyx_t_1, 11, __pyx_t_13); + __Pyx_GIVEREF(__pyx_t_14); + PyTuple_SET_ITEM(__pyx_t_1, 12, __pyx_t_14); + __Pyx_GIVEREF(__pyx_t_15); + PyTuple_SET_ITEM(__pyx_t_1, 13, __pyx_t_15); + __Pyx_GIVEREF(__pyx_t_16); + PyTuple_SET_ITEM(__pyx_t_1, 14, __pyx_t_16); + __Pyx_GIVEREF(__pyx_t_17); + PyTuple_SET_ITEM(__pyx_t_1, 15, __pyx_t_17); + __Pyx_GIVEREF(__pyx_t_18); + PyTuple_SET_ITEM(__pyx_t_1, 16, __pyx_t_18); + __Pyx_GIVEREF(__pyx_t_19); + PyTuple_SET_ITEM(__pyx_t_1, 17, __pyx_t_19); + __Pyx_GIVEREF(__pyx_t_20); + PyTuple_SET_ITEM(__pyx_t_1, 18, __pyx_t_20); + __Pyx_GIVEREF(__pyx_t_21); + PyTuple_SET_ITEM(__pyx_t_1, 19, __pyx_t_21); + __Pyx_GIVEREF(__pyx_t_22); + PyTuple_SET_ITEM(__pyx_t_1, 20, __pyx_t_22); + __Pyx_GIVEREF(__pyx_t_23); + PyTuple_SET_ITEM(__pyx_t_1, 21, __pyx_t_23); + __Pyx_GIVEREF(__pyx_t_24); + PyTuple_SET_ITEM(__pyx_t_1, 22, __pyx_t_24); + __Pyx_GIVEREF(__pyx_t_25); + PyTuple_SET_ITEM(__pyx_t_1, 23, __pyx_t_25); + __Pyx_GIVEREF(__pyx_t_26); + PyTuple_SET_ITEM(__pyx_t_1, 24, __pyx_t_26); + __Pyx_GIVEREF(__pyx_t_27); + PyTuple_SET_ITEM(__pyx_t_1, 25, __pyx_t_27); + __Pyx_GIVEREF(__pyx_t_28); + PyTuple_SET_ITEM(__pyx_t_1, 26, __pyx_t_28); + __Pyx_GIVEREF(__pyx_t_29); + PyTuple_SET_ITEM(__pyx_t_1, 27, __pyx_t_29); + __Pyx_GIVEREF(__pyx_t_30); + PyTuple_SET_ITEM(__pyx_t_1, 28, __pyx_t_30); + __Pyx_GIVEREF(__pyx_t_31); + PyTuple_SET_ITEM(__pyx_t_1, 29, __pyx_t_31); + __Pyx_GIVEREF(__pyx_t_32); + PyTuple_SET_ITEM(__pyx_t_1, 30, __pyx_t_32); + __Pyx_GIVEREF(__pyx_t_33); + PyTuple_SET_ITEM(__pyx_t_1, 31, __pyx_t_33); + __Pyx_GIVEREF(__pyx_t_34); + PyTuple_SET_ITEM(__pyx_t_1, 32, __pyx_t_34); + __Pyx_GIVEREF(__pyx_t_35); + PyTuple_SET_ITEM(__pyx_t_1, 33, __pyx_t_35); + __Pyx_GIVEREF(__pyx_t_36); + PyTuple_SET_ITEM(__pyx_t_1, 34, __pyx_t_36); + __Pyx_GIVEREF(__pyx_t_37); + PyTuple_SET_ITEM(__pyx_t_1, 35, __pyx_t_37); + __Pyx_GIVEREF(__pyx_t_38); + PyTuple_SET_ITEM(__pyx_t_1, 36, __pyx_t_38); + __Pyx_GIVEREF(__pyx_t_39); + PyTuple_SET_ITEM(__pyx_t_1, 37, __pyx_t_39); + __Pyx_GIVEREF(__pyx_t_40); + PyTuple_SET_ITEM(__pyx_t_1, 38, __pyx_t_40); + __Pyx_GIVEREF(__pyx_t_41); + PyTuple_SET_ITEM(__pyx_t_1, 39, __pyx_t_41); + __Pyx_GIVEREF(__pyx_t_42); + PyTuple_SET_ITEM(__pyx_t_1, 40, __pyx_t_42); + __Pyx_GIVEREF(__pyx_t_43); + PyTuple_SET_ITEM(__pyx_t_1, 41, __pyx_t_43); + __Pyx_GIVEREF(__pyx_t_44); + PyTuple_SET_ITEM(__pyx_t_1, 42, __pyx_t_44); + __Pyx_GIVEREF(__pyx_t_45); + PyTuple_SET_ITEM(__pyx_t_1, 43, __pyx_t_45); + __Pyx_GIVEREF(__pyx_t_46); + PyTuple_SET_ITEM(__pyx_t_1, 44, __pyx_t_46); + __Pyx_GIVEREF(__pyx_t_47); + PyTuple_SET_ITEM(__pyx_t_1, 45, __pyx_t_47); + __Pyx_GIVEREF(__pyx_t_48); + PyTuple_SET_ITEM(__pyx_t_1, 46, __pyx_t_48); + __Pyx_GIVEREF(__pyx_t_49); + PyTuple_SET_ITEM(__pyx_t_1, 47, __pyx_t_49); + __Pyx_GIVEREF(__pyx_t_50); + PyTuple_SET_ITEM(__pyx_t_1, 48, __pyx_t_50); + __Pyx_GIVEREF(__pyx_t_51); + PyTuple_SET_ITEM(__pyx_t_1, 49, __pyx_t_51); + __Pyx_GIVEREF(__pyx_t_52); + PyTuple_SET_ITEM(__pyx_t_1, 50, __pyx_t_52); + __Pyx_GIVEREF(__pyx_t_53); + PyTuple_SET_ITEM(__pyx_t_1, 51, __pyx_t_53); + __Pyx_GIVEREF(__pyx_t_54); + PyTuple_SET_ITEM(__pyx_t_1, 52, __pyx_t_54); + __Pyx_GIVEREF(__pyx_t_55); + PyTuple_SET_ITEM(__pyx_t_1, 53, __pyx_t_55); + __Pyx_GIVEREF(__pyx_t_56); + PyTuple_SET_ITEM(__pyx_t_1, 54, __pyx_t_56); + __Pyx_GIVEREF(__pyx_t_57); + PyTuple_SET_ITEM(__pyx_t_1, 55, __pyx_t_57); + __Pyx_GIVEREF(__pyx_t_58); + PyTuple_SET_ITEM(__pyx_t_1, 56, __pyx_t_58); + __Pyx_GIVEREF(__pyx_t_59); + PyTuple_SET_ITEM(__pyx_t_1, 57, __pyx_t_59); + __Pyx_GIVEREF(__pyx_t_60); + PyTuple_SET_ITEM(__pyx_t_1, 58, __pyx_t_60); + __Pyx_GIVEREF(__pyx_t_61); + PyTuple_SET_ITEM(__pyx_t_1, 59, __pyx_t_61); + __Pyx_GIVEREF(__pyx_t_62); + PyTuple_SET_ITEM(__pyx_t_1, 60, __pyx_t_62); + __Pyx_GIVEREF(__pyx_t_63); + PyTuple_SET_ITEM(__pyx_t_1, 61, __pyx_t_63); + __Pyx_GIVEREF(__pyx_t_64); + PyTuple_SET_ITEM(__pyx_t_1, 62, __pyx_t_64); + __Pyx_GIVEREF(__pyx_t_65); + PyTuple_SET_ITEM(__pyx_t_1, 63, __pyx_t_65); + __Pyx_GIVEREF(__pyx_t_66); + PyTuple_SET_ITEM(__pyx_t_1, 64, __pyx_t_66); + __Pyx_GIVEREF(__pyx_t_67); + PyTuple_SET_ITEM(__pyx_t_1, 65, __pyx_t_67); + __Pyx_GIVEREF(__pyx_t_68); + PyTuple_SET_ITEM(__pyx_t_1, 66, __pyx_t_68); + __Pyx_GIVEREF(__pyx_t_69); + PyTuple_SET_ITEM(__pyx_t_1, 67, __pyx_t_69); + __Pyx_GIVEREF(__pyx_t_70); + PyTuple_SET_ITEM(__pyx_t_1, 68, __pyx_t_70); + __Pyx_GIVEREF(__pyx_t_71); + PyTuple_SET_ITEM(__pyx_t_1, 69, __pyx_t_71); + __Pyx_GIVEREF(__pyx_t_72); + PyTuple_SET_ITEM(__pyx_t_1, 70, __pyx_t_72); + __Pyx_GIVEREF(__pyx_t_73); + PyTuple_SET_ITEM(__pyx_t_1, 71, __pyx_t_73); + __Pyx_GIVEREF(__pyx_t_74); + PyTuple_SET_ITEM(__pyx_t_1, 72, __pyx_t_74); + __Pyx_GIVEREF(__pyx_t_75); + PyTuple_SET_ITEM(__pyx_t_1, 73, __pyx_t_75); + __Pyx_GIVEREF(__pyx_t_76); + PyTuple_SET_ITEM(__pyx_t_1, 74, __pyx_t_76); + __Pyx_GIVEREF(__pyx_t_77); + PyTuple_SET_ITEM(__pyx_t_1, 75, __pyx_t_77); + __Pyx_GIVEREF(__pyx_t_78); + PyTuple_SET_ITEM(__pyx_t_1, 76, __pyx_t_78); + __Pyx_GIVEREF(__pyx_t_79); + PyTuple_SET_ITEM(__pyx_t_1, 77, __pyx_t_79); + __pyx_t_2 = 0; + __pyx_t_3 = 0; + __pyx_t_4 = 0; + __pyx_t_5 = 0; + __pyx_t_6 = 0; + __pyx_t_7 = 0; + __pyx_t_8 = 0; + __pyx_t_9 = 0; + __pyx_t_10 = 0; + __pyx_t_11 = 0; + __pyx_t_12 = 0; + __pyx_t_13 = 0; + __pyx_t_14 = 0; + __pyx_t_15 = 0; + __pyx_t_16 = 0; + __pyx_t_17 = 0; + __pyx_t_18 = 0; + __pyx_t_19 = 0; + __pyx_t_20 = 0; + __pyx_t_21 = 0; + __pyx_t_22 = 0; + __pyx_t_23 = 0; + __pyx_t_24 = 0; + __pyx_t_25 = 0; + __pyx_t_26 = 0; + __pyx_t_27 = 0; + __pyx_t_28 = 0; + __pyx_t_29 = 0; + __pyx_t_30 = 0; + __pyx_t_31 = 0; + __pyx_t_32 = 0; + __pyx_t_33 = 0; + __pyx_t_34 = 0; + __pyx_t_35 = 0; + __pyx_t_36 = 0; + __pyx_t_37 = 0; + __pyx_t_38 = 0; + __pyx_t_39 = 0; + __pyx_t_40 = 0; + __pyx_t_41 = 0; + __pyx_t_42 = 0; + __pyx_t_43 = 0; + __pyx_t_44 = 0; + __pyx_t_45 = 0; + __pyx_t_46 = 0; + __pyx_t_47 = 0; + __pyx_t_48 = 0; + __pyx_t_49 = 0; + __pyx_t_50 = 0; + __pyx_t_51 = 0; + __pyx_t_52 = 0; + __pyx_t_53 = 0; + __pyx_t_54 = 0; + __pyx_t_55 = 0; + __pyx_t_56 = 0; + __pyx_t_57 = 0; + __pyx_t_58 = 0; + __pyx_t_59 = 0; + __pyx_t_60 = 0; + __pyx_t_61 = 0; + __pyx_t_62 = 0; + __pyx_t_63 = 0; + __pyx_t_64 = 0; + __pyx_t_65 = 0; + __pyx_t_66 = 0; + __pyx_t_67 = 0; + __pyx_t_68 = 0; + __pyx_t_69 = 0; + __pyx_t_70 = 0; + __pyx_t_71 = 0; + __pyx_t_72 = 0; + __pyx_t_73 = 0; + __pyx_t_74 = 0; + __pyx_t_75 = 0; + __pyx_t_76 = 0; + __pyx_t_77 = 0; + __pyx_t_78 = 0; + __pyx_t_79 = 0; + __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_headers); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_headers, ((PyObject*)__pyx_t_1)); + __Pyx_GIVEREF(__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":40 + * char* PyByteArray_AsString(object) + * + * __all__ = ('HttpRequestParser', 'HttpResponseParser', # <<<<<<<<<<<<<< + * 'RawRequestMessage', 'RawResponseMessage') + * + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_all, __pyx_tuple__12) < 0) __PYX_ERR(0, 40, __pyx_L1_error) + + /* "aiohttp/_http_parser.pyx":43 + * 'RawRequestMessage', 'RawResponseMessage') + * + * cdef object URL = _URL # <<<<<<<<<<<<<< + * cdef object URL_build = URL.build + * cdef object CIMultiDict = _CIMultiDict + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_URL_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 43, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_URL); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_URL, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":44 + * + * cdef object URL = _URL + * cdef object URL_build = URL.build # <<<<<<<<<<<<<< + * cdef object CIMultiDict = _CIMultiDict + * cdef object CIMultiDictProxy = _CIMultiDictProxy + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_7aiohttp_12_http_parser_URL, __pyx_n_s_build); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_URL_build); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_URL_build, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":45 + * cdef object URL = _URL + * cdef object URL_build = URL.build + * cdef object CIMultiDict = _CIMultiDict # <<<<<<<<<<<<<< + * cdef object CIMultiDictProxy = _CIMultiDictProxy + * cdef object HttpVersion = _HttpVersion + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_CIMultiDict_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_CIMultiDict); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_CIMultiDict, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":46 + * cdef object URL_build = URL.build + * cdef object CIMultiDict = _CIMultiDict + * cdef object CIMultiDictProxy = _CIMultiDictProxy # <<<<<<<<<<<<<< + * cdef object HttpVersion = _HttpVersion + * cdef object HttpVersion10 = _HttpVersion10 + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_CIMultiDictProxy_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 46, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":47 + * cdef object CIMultiDict = _CIMultiDict + * cdef object CIMultiDictProxy = _CIMultiDictProxy + * cdef object HttpVersion = _HttpVersion # <<<<<<<<<<<<<< + * cdef object HttpVersion10 = _HttpVersion10 + * cdef object HttpVersion11 = _HttpVersion11 + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpVersion_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 47, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_HttpVersion, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":48 + * cdef object CIMultiDictProxy = _CIMultiDictProxy + * cdef object HttpVersion = _HttpVersion + * cdef object HttpVersion10 = _HttpVersion10 # <<<<<<<<<<<<<< + * cdef object HttpVersion11 = _HttpVersion11 + * cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpVersion10_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 48, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion10); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_HttpVersion10, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":49 + * cdef object HttpVersion = _HttpVersion + * cdef object HttpVersion10 = _HttpVersion10 + * cdef object HttpVersion11 = _HttpVersion11 # <<<<<<<<<<<<<< + * cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 + * cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpVersion11_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 49, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion11); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_HttpVersion11, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":50 + * cdef object HttpVersion10 = _HttpVersion10 + * cdef object HttpVersion11 = _HttpVersion11 + * cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 # <<<<<<<<<<<<<< + * cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING + * cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 50, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_79 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SEC_WEBSOCKET_KEY1); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 50, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1, __pyx_t_79); + __Pyx_GIVEREF(__pyx_t_79); + __pyx_t_79 = 0; + + /* "aiohttp/_http_parser.pyx":51 + * cdef object HttpVersion11 = _HttpVersion11 + * cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 + * cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING # <<<<<<<<<<<<<< + * cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD + * cdef object StreamReader = _StreamReader + */ + __Pyx_GetModuleGlobalName(__pyx_t_79, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_79)) __PYX_ERR(0, 51, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_79); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_79, __pyx_n_s_CONTENT_ENCODING); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 51, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_79); __pyx_t_79 = 0; + __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_CONTENT_ENCODING); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_CONTENT_ENCODING, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":52 + * cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 + * cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING + * cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD # <<<<<<<<<<<<<< + * cdef object StreamReader = _StreamReader + * cdef object DeflateBuffer = _DeflateBuffer + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_EMPTY_PAYLOAD_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 52, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":53 + * cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING + * cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD + * cdef object StreamReader = _StreamReader # <<<<<<<<<<<<<< + * cdef object DeflateBuffer = _DeflateBuffer + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_StreamReader_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 53, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_StreamReader); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_StreamReader, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":54 + * cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD + * cdef object StreamReader = _StreamReader + * cdef object DeflateBuffer = _DeflateBuffer # <<<<<<<<<<<<<< + * + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_DeflateBuffer_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 54, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_DeflateBuffer); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_DeflateBuffer, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":68 + * DEF METHODS_COUNT = 34; + * + * cdef list _http_method = [] # <<<<<<<<<<<<<< + * + * for i in range(METHODS_COUNT): + */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 68, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser__http_method); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser__http_method, ((PyObject*)__pyx_t_1)); + __Pyx_GIVEREF(__pyx_t_1); + __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":70 + * cdef list _http_method = [] + * + * for i in range(METHODS_COUNT): # <<<<<<<<<<<<<< + * _http_method.append( + * cparser.http_method_str( i).decode('ascii')) + */ + for (__pyx_t_80 = 0; __pyx_t_80 < 34; __pyx_t_80+=1) { + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_t_80); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 70, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_i, __pyx_t_1) < 0) __PYX_ERR(0, 70, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":71 + * + * for i in range(METHODS_COUNT): + * _http_method.append( # <<<<<<<<<<<<<< + * cparser.http_method_str( i).decode('ascii')) + * + */ + if (unlikely(__pyx_v_7aiohttp_12_http_parser__http_method == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); + __PYX_ERR(0, 71, __pyx_L1_error) + } + + /* "aiohttp/_http_parser.pyx":72 + * for i in range(METHODS_COUNT): + * _http_method.append( + * cparser.http_method_str( i).decode('ascii')) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_i); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 72, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_81 = ((enum http_method)__Pyx_PyInt_As_enum__http_method(__pyx_t_1)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 72, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_82 = http_method_str(((enum http_method)__pyx_t_81)); + __pyx_t_1 = __Pyx_decode_c_string(__pyx_t_82, 0, strlen(__pyx_t_82), NULL, NULL, PyUnicode_DecodeASCII); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 72, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + + /* "aiohttp/_http_parser.pyx":71 + * + * for i in range(METHODS_COUNT): + * _http_method.append( # <<<<<<<<<<<<<< + * cparser.http_method_str( i).decode('ascii')) + * + */ + __pyx_t_83 = __Pyx_PyList_Append(__pyx_v_7aiohttp_12_http_parser__http_method, __pyx_t_1); if (unlikely(__pyx_t_83 == ((int)-1))) __PYX_ERR(0, 71, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + + /* "aiohttp/_http_parser.pyx":755 + * + * + * def parse_url(url): # <<<<<<<<<<<<<< + * cdef: + * Py_buffer py_buf + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_12_http_parser_1parse_url, NULL, __pyx_n_s_aiohttp__http_parser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 755, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_parse_url, __pyx_t_1) < 0) __PYX_ERR(0, 755, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_RawRequestMessage(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_12_http_parser_3__pyx_unpickle_RawRequestMessage, NULL, __pyx_n_s_aiohttp__http_parser); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_RawRequestMessage, __pyx_t_1) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":11 + * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] + * if len(__pyx_state) > 10 and hasattr(__pyx_result, '__dict__'): + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_12_http_parser_5__pyx_unpickle_RawResponseMessage, NULL, __pyx_n_s_aiohttp__http_parser); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_RawResponseMessag, __pyx_t_1) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_parser.pyx":1 + * #cython: language_level=3 # <<<<<<<<<<<<<< + * # + * # Based on https://github.com/MagicStack/httptools + */ + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_XDECREF(__pyx_t_13); + __Pyx_XDECREF(__pyx_t_14); + __Pyx_XDECREF(__pyx_t_15); + __Pyx_XDECREF(__pyx_t_16); + __Pyx_XDECREF(__pyx_t_17); + __Pyx_XDECREF(__pyx_t_18); + __Pyx_XDECREF(__pyx_t_19); + __Pyx_XDECREF(__pyx_t_20); + __Pyx_XDECREF(__pyx_t_21); + __Pyx_XDECREF(__pyx_t_22); + __Pyx_XDECREF(__pyx_t_23); + __Pyx_XDECREF(__pyx_t_24); + __Pyx_XDECREF(__pyx_t_25); + __Pyx_XDECREF(__pyx_t_26); + __Pyx_XDECREF(__pyx_t_27); + __Pyx_XDECREF(__pyx_t_28); + __Pyx_XDECREF(__pyx_t_29); + __Pyx_XDECREF(__pyx_t_30); + __Pyx_XDECREF(__pyx_t_31); + __Pyx_XDECREF(__pyx_t_32); + __Pyx_XDECREF(__pyx_t_33); + __Pyx_XDECREF(__pyx_t_34); + __Pyx_XDECREF(__pyx_t_35); + __Pyx_XDECREF(__pyx_t_36); + __Pyx_XDECREF(__pyx_t_37); + __Pyx_XDECREF(__pyx_t_38); + __Pyx_XDECREF(__pyx_t_39); + __Pyx_XDECREF(__pyx_t_40); + __Pyx_XDECREF(__pyx_t_41); + __Pyx_XDECREF(__pyx_t_42); + __Pyx_XDECREF(__pyx_t_43); + __Pyx_XDECREF(__pyx_t_44); + __Pyx_XDECREF(__pyx_t_45); + __Pyx_XDECREF(__pyx_t_46); + __Pyx_XDECREF(__pyx_t_47); + __Pyx_XDECREF(__pyx_t_48); + __Pyx_XDECREF(__pyx_t_49); + __Pyx_XDECREF(__pyx_t_50); + __Pyx_XDECREF(__pyx_t_51); + __Pyx_XDECREF(__pyx_t_52); + __Pyx_XDECREF(__pyx_t_53); + __Pyx_XDECREF(__pyx_t_54); + __Pyx_XDECREF(__pyx_t_55); + __Pyx_XDECREF(__pyx_t_56); + __Pyx_XDECREF(__pyx_t_57); + __Pyx_XDECREF(__pyx_t_58); + __Pyx_XDECREF(__pyx_t_59); + __Pyx_XDECREF(__pyx_t_60); + __Pyx_XDECREF(__pyx_t_61); + __Pyx_XDECREF(__pyx_t_62); + __Pyx_XDECREF(__pyx_t_63); + __Pyx_XDECREF(__pyx_t_64); + __Pyx_XDECREF(__pyx_t_65); + __Pyx_XDECREF(__pyx_t_66); + __Pyx_XDECREF(__pyx_t_67); + __Pyx_XDECREF(__pyx_t_68); + __Pyx_XDECREF(__pyx_t_69); + __Pyx_XDECREF(__pyx_t_70); + __Pyx_XDECREF(__pyx_t_71); + __Pyx_XDECREF(__pyx_t_72); + __Pyx_XDECREF(__pyx_t_73); + __Pyx_XDECREF(__pyx_t_74); + __Pyx_XDECREF(__pyx_t_75); + __Pyx_XDECREF(__pyx_t_76); + __Pyx_XDECREF(__pyx_t_77); + __Pyx_XDECREF(__pyx_t_78); + __Pyx_XDECREF(__pyx_t_79); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init aiohttp._http_parser", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + Py_CLEAR(__pyx_m); + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init aiohttp._http_parser"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* GetItemInt */ +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (!j) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyList_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { + PyObject *r = PyList_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyTuple_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; + if (likely(m && m->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { + Py_ssize_t l = m->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return m->sq_item(o, i); + } + } +#else + if (is_list || PySequence_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/* decode_c_bytes */ +static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes( + const char* cstring, Py_ssize_t length, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { + if (unlikely((start < 0) | (stop < 0))) { + if (start < 0) { + start += length; + if (start < 0) + start = 0; + } + if (stop < 0) + stop += length; + } + if (stop > length) + stop = length; + length = stop - start; + if (unlikely(length <= 0)) + return PyUnicode_FromUnicode(NULL, 0); + cstring += start; + if (decode_func) { + return decode_func(cstring, length, errors); + } else { + return PyUnicode_Decode(cstring, length, encoding, errors); + } +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* None */ +static CYTHON_INLINE void __Pyx_RaiseClosureNameError(const char *varname) { + PyErr_Format(PyExc_NameError, "free variable '%s' referenced before assignment in enclosing scope", varname); +} + +/* RaiseTooManyValuesToUnpack */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +/* RaiseNeedMoreValuesToUnpack */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", + index, (index == 1) ? "" : "s"); +} + +/* IterFinish */ +static CYTHON_INLINE int __Pyx_IterFinish(void) { +#if CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* exc_type = tstate->curexc_type; + if (unlikely(exc_type)) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) { + PyObject *exc_value, *exc_tb; + exc_value = tstate->curexc_value; + exc_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; + Py_DECREF(exc_type); + Py_XDECREF(exc_value); + Py_XDECREF(exc_tb); + return 0; + } else { + return -1; + } + } + return 0; +#else + if (unlikely(PyErr_Occurred())) { + if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { + PyErr_Clear(); + return 0; + } else { + return -1; + } + } + return 0; +#endif +} + +/* UnpackItemEndCheck */ +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { + if (unlikely(retval)) { + Py_DECREF(retval); + __Pyx_RaiseTooManyValuesError(expected); + return -1; + } else { + return __Pyx_IterFinish(); + } + return 0; +} + +/* KeywordStringCheck */ +static int __Pyx_CheckKeywordStrings( + PyObject *kwdict, + const char* function_name, + int kw_allowed) +{ + PyObject* key = 0; + Py_ssize_t pos = 0; +#if CYTHON_COMPILING_IN_PYPY + if (!kw_allowed && PyDict_Next(kwdict, &pos, &key, 0)) + goto invalid_keyword; + return 1; +#else + while (PyDict_Next(kwdict, &pos, &key, 0)) { + #if PY_MAJOR_VERSION < 3 + if (unlikely(!PyString_Check(key))) + #endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } + if ((!kw_allowed) && unlikely(key)) + goto invalid_keyword; + return 1; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + return 0; +#endif +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif + return 0; +} + +/* ExtTypeTest */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(__Pyx_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +/* DictGetItem */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { + PyObject *value; + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (!PyErr_Occurred()) { + if (unlikely(PyTuple_Check(key))) { + PyObject* args = PyTuple_Pack(1, key); + if (likely(args)) { + PyErr_SetObject(PyExc_KeyError, args); + Py_DECREF(args); + } + } else { + PyErr_SetObject(PyExc_KeyError, key); + } + } + return NULL; + } + Py_INCREF(value); + return value; +} +#endif + +/* PyErrExceptionMatches */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; icurexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; + if (unlikely(PyTuple_Check(err))) + return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); +} +#endif + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* GetAttr */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { +#if CYTHON_USE_TYPE_SLOTS +#if PY_MAJOR_VERSION >= 3 + if (likely(PyUnicode_Check(n))) +#else + if (likely(PyString_Check(n))) +#endif + return __Pyx_PyObject_GetAttrStr(o, n); +#endif + return PyObject_GetAttr(o, n); +} + +/* GetAttr3 */ +static PyObject *__Pyx_GetAttr3Default(PyObject *d) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + return NULL; + __Pyx_PyErr_Clear(); + Py_INCREF(d); + return d; +} +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { + PyObject *r = __Pyx_GetAttr(o, n); + return (likely(r)) ? r : __Pyx_GetAttr3Default(d); +} + +/* GetModuleGlobalName */ +#if CYTHON_USE_DICT_VERSIONS +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) +#else +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) +#endif +{ + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } else if (unlikely(PyErr_Occurred())) { + return NULL; + } +#else + result = PyDict_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } + PyErr_Clear(); +#endif + return __Pyx_GetBuiltinName(name); +} + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif +#endif + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallNoArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, NULL, 0); + } +#endif +#ifdef __Pyx_CyFunction_USED + if (likely(PyCFunction_Check(func) || __Pyx_CyFunction_Check(func))) +#else + if (likely(PyCFunction_Check(func))) +#endif + { + if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { + return __Pyx_PyObject_CallMethO(func, NULL); + } + } + return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); +} +#endif + +/* PyCFunctionFastCall */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + int flags = PyCFunction_GET_FLAGS(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { + return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); + } else { + return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); + } +} +#endif + +/* PyObjectCallOneArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif + if (likely(PyCFunction_Check(func))) { + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* PyObjectCall2Args */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { + PyObject *args, *result = NULL; + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyFunction_FastCall(function, args, 2); + } + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyCFunction_FastCall(function, args, 2); + } + #endif + args = PyTuple_New(2); + if (unlikely(!args)) goto done; + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 0, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 1, arg2); + Py_INCREF(function); + result = __Pyx_PyObject_Call(function, args, NULL); + Py_DECREF(args); + Py_DECREF(function); +done: + return result; +} + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* BytesEquals */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY + return PyObject_RichCompareBool(s1, s2, equals); +#else + if (s1 == s2) { + return (equals == Py_EQ); + } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { + const char *ps1, *ps2; + Py_ssize_t length = PyBytes_GET_SIZE(s1); + if (length != PyBytes_GET_SIZE(s2)) + return (equals == Py_NE); + ps1 = PyBytes_AS_STRING(s1); + ps2 = PyBytes_AS_STRING(s2); + if (ps1[0] != ps2[0]) { + return (equals == Py_NE); + } else if (length == 1) { + return (equals == Py_EQ); + } else { + int result; +#if CYTHON_USE_UNICODE_INTERNALS + Py_hash_t hash1, hash2; + hash1 = ((PyBytesObject*)s1)->ob_shash; + hash2 = ((PyBytesObject*)s2)->ob_shash; + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + return (equals == Py_NE); + } +#endif + result = memcmp(ps1, ps2, (size_t)length); + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { + return (equals == Py_NE); + } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { + return (equals == Py_NE); + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +#endif +} + +/* UnicodeEquals */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY + return PyObject_RichCompareBool(s1, s2, equals); +#else +#if PY_MAJOR_VERSION < 3 + PyObject* owned_ref = NULL; +#endif + int s1_is_unicode, s2_is_unicode; + if (s1 == s2) { + goto return_eq; + } + s1_is_unicode = PyUnicode_CheckExact(s1); + s2_is_unicode = PyUnicode_CheckExact(s2); +#if PY_MAJOR_VERSION < 3 + if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { + owned_ref = PyUnicode_FromObject(s2); + if (unlikely(!owned_ref)) + return -1; + s2 = owned_ref; + s2_is_unicode = 1; + } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { + owned_ref = PyUnicode_FromObject(s1); + if (unlikely(!owned_ref)) + return -1; + s1 = owned_ref; + s1_is_unicode = 1; + } else if (((!s2_is_unicode) & (!s1_is_unicode))) { + return __Pyx_PyBytes_Equals(s1, s2, equals); + } +#endif + if (s1_is_unicode & s2_is_unicode) { + Py_ssize_t length; + int kind; + void *data1, *data2; + if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) + return -1; + length = __Pyx_PyUnicode_GET_LENGTH(s1); + if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { + goto return_ne; + } +#if CYTHON_USE_UNICODE_INTERNALS + { + Py_hash_t hash1, hash2; + #if CYTHON_PEP393_ENABLED + hash1 = ((PyASCIIObject*)s1)->hash; + hash2 = ((PyASCIIObject*)s2)->hash; + #else + hash1 = ((PyUnicodeObject*)s1)->hash; + hash2 = ((PyUnicodeObject*)s2)->hash; + #endif + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + goto return_ne; + } + } +#endif + kind = __Pyx_PyUnicode_KIND(s1); + if (kind != __Pyx_PyUnicode_KIND(s2)) { + goto return_ne; + } + data1 = __Pyx_PyUnicode_DATA(s1); + data2 = __Pyx_PyUnicode_DATA(s2); + if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { + goto return_ne; + } else if (length == 1) { + goto return_eq; + } else { + int result = memcmp(data1, data2, (size_t)(length * kind)); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & s2_is_unicode) { + goto return_ne; + } else if ((s2 == Py_None) & s1_is_unicode) { + goto return_ne; + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +return_eq: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ); +return_ne: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_NE); +#endif +} + +/* SliceObject */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice(PyObject* obj, + Py_ssize_t cstart, Py_ssize_t cstop, + PyObject** _py_start, PyObject** _py_stop, PyObject** _py_slice, + int has_cstart, int has_cstop, CYTHON_UNUSED int wraparound) { +#if CYTHON_USE_TYPE_SLOTS + PyMappingMethods* mp; +#if PY_MAJOR_VERSION < 3 + PySequenceMethods* ms = Py_TYPE(obj)->tp_as_sequence; + if (likely(ms && ms->sq_slice)) { + if (!has_cstart) { + if (_py_start && (*_py_start != Py_None)) { + cstart = __Pyx_PyIndex_AsSsize_t(*_py_start); + if ((cstart == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; + } else + cstart = 0; + } + if (!has_cstop) { + if (_py_stop && (*_py_stop != Py_None)) { + cstop = __Pyx_PyIndex_AsSsize_t(*_py_stop); + if ((cstop == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; + } else + cstop = PY_SSIZE_T_MAX; + } + if (wraparound && unlikely((cstart < 0) | (cstop < 0)) && likely(ms->sq_length)) { + Py_ssize_t l = ms->sq_length(obj); + if (likely(l >= 0)) { + if (cstop < 0) { + cstop += l; + if (cstop < 0) cstop = 0; + } + if (cstart < 0) { + cstart += l; + if (cstart < 0) cstart = 0; + } + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + goto bad; + PyErr_Clear(); + } + } + return ms->sq_slice(obj, cstart, cstop); + } +#endif + mp = Py_TYPE(obj)->tp_as_mapping; + if (likely(mp && mp->mp_subscript)) +#endif + { + PyObject* result; + PyObject *py_slice, *py_start, *py_stop; + if (_py_slice) { + py_slice = *_py_slice; + } else { + PyObject* owned_start = NULL; + PyObject* owned_stop = NULL; + if (_py_start) { + py_start = *_py_start; + } else { + if (has_cstart) { + owned_start = py_start = PyInt_FromSsize_t(cstart); + if (unlikely(!py_start)) goto bad; + } else + py_start = Py_None; + } + if (_py_stop) { + py_stop = *_py_stop; + } else { + if (has_cstop) { + owned_stop = py_stop = PyInt_FromSsize_t(cstop); + if (unlikely(!py_stop)) { + Py_XDECREF(owned_start); + goto bad; + } + } else + py_stop = Py_None; + } + py_slice = PySlice_New(py_start, py_stop, Py_None); + Py_XDECREF(owned_start); + Py_XDECREF(owned_stop); + if (unlikely(!py_slice)) goto bad; + } +#if CYTHON_USE_TYPE_SLOTS + result = mp->mp_subscript(obj, py_slice); +#else + result = PyObject_GetItem(obj, py_slice); +#endif + if (!_py_slice) { + Py_DECREF(py_slice); + } + return result; + } + PyErr_Format(PyExc_TypeError, + "'%.200s' object is unsliceable", Py_TYPE(obj)->tp_name); +bad: + return NULL; +} + +/* GetException */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) +#endif +{ + PyObject *local_type, *local_value, *local_tb; +#if CYTHON_FAST_THREAD_STATE + PyObject *tmp_type, *tmp_value, *tmp_tb; + local_type = tstate->curexc_type; + local_value = tstate->curexc_value; + local_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(&local_type, &local_value, &local_tb); +#endif + PyErr_NormalizeException(&local_type, &local_value, &local_tb); +#if CYTHON_FAST_THREAD_STATE + if (unlikely(tstate->curexc_type)) +#else + if (unlikely(PyErr_Occurred())) +#endif + goto bad; + #if PY_MAJOR_VERSION >= 3 + if (local_tb) { + if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) + goto bad; + } + #endif + Py_XINCREF(local_tb); + Py_XINCREF(local_type); + Py_XINCREF(local_value); + *type = local_type; + *value = local_value; + *tb = local_tb; +#if CYTHON_FAST_THREAD_STATE + #if CYTHON_USE_EXC_INFO_STACK + { + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = local_type; + exc_info->exc_value = local_value; + exc_info->exc_traceback = local_tb; + } + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = local_type; + tstate->exc_value = local_value; + tstate->exc_traceback = local_tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_SetExcInfo(local_type, local_value, local_tb); +#endif + return 0; +bad: + *type = 0; + *value = 0; + *tb = 0; + Py_XDECREF(local_type); + Py_XDECREF(local_value); + Py_XDECREF(local_tb); + return -1; +} + +/* SwapException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = *type; + exc_info->exc_value = *value; + exc_info->exc_traceback = *tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = *type; + tstate->exc_value = *value; + tstate->exc_traceback = *tb; + #endif + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); + PyErr_SetExcInfo(*type, *value, *tb); + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#endif + +/* GetTopmostException */ +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * +__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) +{ + _PyErr_StackItem *exc_info = tstate->exc_info; + while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) && + exc_info->previous_item != NULL) + { + exc_info = exc_info->previous_item; + } + return exc_info; +} +#endif + +/* SaveResetException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); + *type = exc_info->exc_type; + *value = exc_info->exc_value; + *tb = exc_info->exc_traceback; + #else + *type = tstate->exc_type; + *value = tstate->exc_value; + *tb = tstate->exc_traceback; + #endif + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); +} +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = type; + exc_info->exc_value = value; + exc_info->exc_traceback = tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = type; + tstate->exc_value = value; + tstate->exc_traceback = tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +#endif + +/* decode_c_string */ +static CYTHON_INLINE PyObject* __Pyx_decode_c_string( + const char* cstring, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { + Py_ssize_t length; + if (unlikely((start < 0) | (stop < 0))) { + size_t slen = strlen(cstring); + if (unlikely(slen > (size_t) PY_SSIZE_T_MAX)) { + PyErr_SetString(PyExc_OverflowError, + "c-string too long to convert to Python"); + return NULL; + } + length = (Py_ssize_t) slen; + if (start < 0) { + start += length; + if (start < 0) + start = 0; + } + if (stop < 0) + stop += length; + } + length = stop - start; + if (unlikely(length <= 0)) + return PyUnicode_FromUnicode(NULL, 0); + cstring += start; + if (decode_func) { + return decode_func(cstring, length, errors); + } else { + return PyUnicode_Decode(cstring, length, encoding, errors); + } +} + +/* UnpackUnboundCMethod */ +static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) { + PyObject *method; + method = __Pyx_PyObject_GetAttrStr(target->type, *target->method_name); + if (unlikely(!method)) + return -1; + target->method = method; +#if CYTHON_COMPILING_IN_CPYTHON + #if PY_MAJOR_VERSION >= 3 + if (likely(__Pyx_TypeCheck(method, &PyMethodDescr_Type))) + #endif + { + PyMethodDescrObject *descr = (PyMethodDescrObject*) method; + target->func = descr->d_method->ml_meth; + target->flag = descr->d_method->ml_flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_STACKLESS); + } +#endif + return 0; +} + +/* CallUnboundCMethod1 */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg) { + if (likely(cfunc->func)) { + int flag = cfunc->flag; + if (flag == METH_O) { + return (*(cfunc->func))(self, arg); + } else if (PY_VERSION_HEX >= 0x030600B1 && flag == METH_FASTCALL) { + if (PY_VERSION_HEX >= 0x030700A0) { + return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, &arg, 1); + } else { + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); + } + } else if (PY_VERSION_HEX >= 0x030700A0 && flag == (METH_FASTCALL | METH_KEYWORDS)) { + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); + } + } + return __Pyx__CallUnboundCMethod1(cfunc, self, arg); +} +#endif +static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg){ + PyObject *args, *result = NULL; + if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_COMPILING_IN_CPYTHON + if (cfunc->func && (cfunc->flag & METH_VARARGS)) { + args = PyTuple_New(1); + if (unlikely(!args)) goto bad; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + if (cfunc->flag & METH_KEYWORDS) + result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); + else + result = (*cfunc->func)(self, args); + } else { + args = PyTuple_New(2); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 1, arg); + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); + } +#else + args = PyTuple_Pack(2, self, arg); + if (unlikely(!args)) goto bad; + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); +#endif +bad: + Py_XDECREF(args); + return result; +} + +/* Import */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.')) { + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +/* ImportFrom */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* HasAttr */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { + PyObject *r; + if (unlikely(!__Pyx_PyBaseString_Check(n))) { + PyErr_SetString(PyExc_TypeError, + "hasattr(): attribute name must be string"); + return -1; + } + r = __Pyx_GetAttr(o, n); + if (unlikely(!r)) { + PyErr_Clear(); + return 0; + } else { + Py_DECREF(r); + return 1; + } +} + +/* PyObject_GenericGetAttrNoDict */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'%.50s' object has no attribute '%U'", + tp->tp_name, attr_name); +#else + "'%.50s' object has no attribute '%.400s'", + tp->tp_name, PyString_AS_STRING(attr_name)); +#endif + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { + PyObject *descr; + PyTypeObject *tp = Py_TYPE(obj); + if (unlikely(!PyString_Check(attr_name))) { + return PyObject_GenericGetAttr(obj, attr_name); + } + assert(!tp->tp_dictoffset); + descr = _PyType_Lookup(tp, attr_name); + if (unlikely(!descr)) { + return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); + } + Py_INCREF(descr); + #if PY_MAJOR_VERSION < 3 + if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) + #endif + { + descrgetfunc f = Py_TYPE(descr)->tp_descr_get; + if (unlikely(f)) { + PyObject *res = f(descr, obj, (PyObject *)tp); + Py_DECREF(descr); + return res; + } + } + return descr; +} +#endif + +/* PyObject_GenericGetAttr */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { + if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { + return PyObject_GenericGetAttr(obj, attr_name); + } + return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); +} +#endif + +/* SetupReduce */ +static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { + int ret; + PyObject *name_attr; + name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); + if (likely(name_attr)) { + ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); + } else { + ret = -1; + } + if (unlikely(ret < 0)) { + PyErr_Clear(); + ret = 0; + } + Py_XDECREF(name_attr); + return ret; +} +static int __Pyx_setup_reduce(PyObject* type_obj) { + int ret = 0; + PyObject *object_reduce = NULL; + PyObject *object_reduce_ex = NULL; + PyObject *reduce = NULL; + PyObject *reduce_ex = NULL; + PyObject *reduce_cython = NULL; + PyObject *setstate = NULL; + PyObject *setstate_cython = NULL; +#if CYTHON_USE_PYTYPE_LOOKUP + if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto GOOD; +#else + if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto GOOD; +#endif +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD; +#else + object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD; +#endif + reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto BAD; + if (reduce_ex == object_reduce_ex) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD; +#else + object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD; +#endif + reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto BAD; + if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { + reduce_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_cython); if (unlikely(!reduce_cython)) goto BAD; + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto BAD; + setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); + if (!setstate) PyErr_Clear(); + if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { + setstate_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate_cython); if (unlikely(!setstate_cython)) goto BAD; + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto BAD; + } + PyType_Modified((PyTypeObject*)type_obj); + } + } + goto GOOD; +BAD: + if (!PyErr_Occurred()) + PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); + ret = -1; +GOOD: +#if !CYTHON_USE_PYTYPE_LOOKUP + Py_XDECREF(object_reduce); + Py_XDECREF(object_reduce_ex); +#endif + Py_XDECREF(reduce); + Py_XDECREF(reduce_ex); + Py_XDECREF(reduce_cython); + Py_XDECREF(setstate); + Py_XDECREF(setstate_cython); + return ret; +} + +/* SetVTable */ +static int __Pyx_SetVtable(PyObject *dict, void *vtable) { +#if PY_VERSION_HEX >= 0x02070000 + PyObject *ob = PyCapsule_New(vtable, 0, 0); +#else + PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); +#endif + if (!ob) + goto bad; + if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0) + goto bad; + Py_DECREF(ob); + return 0; +bad: + Py_XDECREF(ob); + return -1; +} + +/* TypeImport */ +#ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name, + size_t size, enum __Pyx_ImportType_CheckSize check_size) +{ + PyObject *result = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + result = PyObject_GetAttrString(module, class_name); + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if ((size_t)basicsize < size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(result); + return NULL; +} +#endif + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { + const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(int) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(int) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(int) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(int), + little, !is_unsigned); + } +} + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_int(unsigned int value) { + const unsigned int neg_one = (unsigned int) ((unsigned int) 0 - (unsigned int) 1), const_zero = (unsigned int) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(unsigned int) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(unsigned int) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(unsigned int) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(unsigned int) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(unsigned int) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(unsigned int), + little, !is_unsigned); + } +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_short(unsigned short value) { + const unsigned short neg_one = (unsigned short) ((unsigned short) 0 - (unsigned short) 1), const_zero = (unsigned short) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(unsigned short) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(unsigned short) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(unsigned short) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(unsigned short) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(unsigned short) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(unsigned short), + little, !is_unsigned); + } +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint16_t(uint16_t value) { + const uint16_t neg_one = (uint16_t) ((uint16_t) 0 - (uint16_t) 1), const_zero = (uint16_t) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(uint16_t) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(uint16_t) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(uint16_t) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(uint16_t) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(uint16_t) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(uint16_t), + little, !is_unsigned); + } +} + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { + const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* CIntFromPy */ +static CYTHON_INLINE enum http_method __Pyx_PyInt_As_enum__http_method(PyObject *x) { + const enum http_method neg_one = (enum http_method) ((enum http_method) 0 - (enum http_method) 1), const_zero = (enum http_method) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(enum http_method) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(enum http_method, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (enum http_method) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (enum http_method) 0; + case 1: __PYX_VERIFY_RETURN_INT(enum http_method, digit, digits[0]) + case 2: + if (8 * sizeof(enum http_method) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(enum http_method, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(enum http_method) >= 2 * PyLong_SHIFT) { + return (enum http_method) (((((enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(enum http_method) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(enum http_method, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(enum http_method) >= 3 * PyLong_SHIFT) { + return (enum http_method) (((((((enum http_method)digits[2]) << PyLong_SHIFT) | (enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(enum http_method) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(enum http_method, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(enum http_method) >= 4 * PyLong_SHIFT) { + return (enum http_method) (((((((((enum http_method)digits[3]) << PyLong_SHIFT) | (enum http_method)digits[2]) << PyLong_SHIFT) | (enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (enum http_method) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(enum http_method) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(enum http_method, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(enum http_method) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(enum http_method, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (enum http_method) 0; + case -1: __PYX_VERIFY_RETURN_INT(enum http_method, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(enum http_method, digit, +digits[0]) + case -2: + if (8 * sizeof(enum http_method) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(enum http_method, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(enum http_method) - 1 > 2 * PyLong_SHIFT) { + return (enum http_method) (((enum http_method)-1)*(((((enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(enum http_method) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(enum http_method, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(enum http_method) - 1 > 2 * PyLong_SHIFT) { + return (enum http_method) ((((((enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(enum http_method) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(enum http_method, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(enum http_method) - 1 > 3 * PyLong_SHIFT) { + return (enum http_method) (((enum http_method)-1)*(((((((enum http_method)digits[2]) << PyLong_SHIFT) | (enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(enum http_method) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(enum http_method, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(enum http_method) - 1 > 3 * PyLong_SHIFT) { + return (enum http_method) ((((((((enum http_method)digits[2]) << PyLong_SHIFT) | (enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(enum http_method) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(enum http_method, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(enum http_method) - 1 > 4 * PyLong_SHIFT) { + return (enum http_method) (((enum http_method)-1)*(((((((((enum http_method)digits[3]) << PyLong_SHIFT) | (enum http_method)digits[2]) << PyLong_SHIFT) | (enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(enum http_method) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(enum http_method, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(enum http_method) - 1 > 4 * PyLong_SHIFT) { + return (enum http_method) ((((((((((enum http_method)digits[3]) << PyLong_SHIFT) | (enum http_method)digits[2]) << PyLong_SHIFT) | (enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0]))); + } + } + break; + } +#endif + if (sizeof(enum http_method) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(enum http_method, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(enum http_method) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(enum http_method, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + enum http_method val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (enum http_method) -1; + } + } else { + enum http_method val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (enum http_method) -1; + val = __Pyx_PyInt_As_enum__http_method(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to enum http_method"); + return (enum http_method) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to enum http_method"); + return (enum http_method) -1; +} + +/* CIntFromPy */ +static CYTHON_INLINE size_t __Pyx_PyInt_As_size_t(PyObject *x) { + const size_t neg_one = (size_t) ((size_t) 0 - (size_t) 1), const_zero = (size_t) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(size_t) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(size_t, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (size_t) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (size_t) 0; + case 1: __PYX_VERIFY_RETURN_INT(size_t, digit, digits[0]) + case 2: + if (8 * sizeof(size_t) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) >= 2 * PyLong_SHIFT) { + return (size_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(size_t) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) >= 3 * PyLong_SHIFT) { + return (size_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(size_t) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) >= 4 * PyLong_SHIFT) { + return (size_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (size_t) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(size_t) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(size_t, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(size_t) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(size_t, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (size_t) 0; + case -1: __PYX_VERIFY_RETURN_INT(size_t, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(size_t, digit, +digits[0]) + case -2: + if (8 * sizeof(size_t) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) { + return (size_t) (((size_t)-1)*(((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(size_t) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) { + return (size_t) ((((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) { + return (size_t) (((size_t)-1)*(((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(size_t) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) { + return (size_t) ((((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) - 1 > 4 * PyLong_SHIFT) { + return (size_t) (((size_t)-1)*(((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(size_t) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) - 1 > 4 * PyLong_SHIFT) { + return (size_t) ((((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); + } + } + break; + } +#endif + if (sizeof(size_t) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(size_t, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(size_t) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(size_t, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + size_t val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (size_t) -1; + } + } else { + size_t val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (size_t) -1; + val = __Pyx_PyInt_As_size_t(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to size_t"); + return (size_t) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to size_t"); + return (size_t) -1; +} + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; itp_name); + if (cached_type) { + if (!PyType_Check((PyObject*)cached_type)) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s is not a type object", + type->tp_name); + goto bad; + } + if (cached_type->tp_basicsize != type->tp_basicsize) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s has the wrong size, try recompiling", + type->tp_name); + goto bad; + } + } else { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + if (PyType_Ready(type) < 0) goto bad; + if (PyObject_SetAttrString(fake_module, type->tp_name, (PyObject*) type) < 0) + goto bad; + Py_INCREF(type); + cached_type = type; + } +done: + Py_DECREF(fake_module); + return cached_type; +bad: + Py_XDECREF(cached_type); + cached_type = NULL; + goto done; +} + +/* PyObjectGetMethod */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { + PyObject *attr; +#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP + PyTypeObject *tp = Py_TYPE(obj); + PyObject *descr; + descrgetfunc f = NULL; + PyObject **dictptr, *dict; + int meth_found = 0; + assert (*method == NULL); + if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; + } + if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { + return 0; + } + descr = _PyType_Lookup(tp, name); + if (likely(descr != NULL)) { + Py_INCREF(descr); +#if PY_MAJOR_VERSION >= 3 + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type))) + #endif +#else + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr))) + #endif +#endif + { + meth_found = 1; + } else { + f = Py_TYPE(descr)->tp_descr_get; + if (f != NULL && PyDescr_IsData(descr)) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + } + } + dictptr = _PyObject_GetDictPtr(obj); + if (dictptr != NULL && (dict = *dictptr) != NULL) { + Py_INCREF(dict); + attr = __Pyx_PyDict_GetItemStr(dict, name); + if (attr != NULL) { + Py_INCREF(attr); + Py_DECREF(dict); + Py_XDECREF(descr); + goto try_unpack; + } + Py_DECREF(dict); + } + if (meth_found) { + *method = descr; + return 1; + } + if (f != NULL) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + if (descr != NULL) { + *method = descr; + return 0; + } + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'%.50s' object has no attribute '%U'", + tp->tp_name, name); +#else + "'%.50s' object has no attribute '%.400s'", + tp->tp_name, PyString_AS_STRING(name)); +#endif + return 0; +#else + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; +#endif +try_unpack: +#if CYTHON_UNPACK_METHODS + if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { + PyObject *function = PyMethod_GET_FUNCTION(attr); + Py_INCREF(function); + Py_DECREF(attr); + *method = function; + return 1; + } +#endif + *method = attr; + return 0; +} + +/* PyObjectCallMethod1 */ +static PyObject* __Pyx__PyObject_CallMethod1(PyObject* method, PyObject* arg) { + PyObject *result = __Pyx_PyObject_CallOneArg(method, arg); + Py_DECREF(method); + return result; +} +static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg) { + PyObject *method = NULL, *result; + int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); + if (likely(is_method)) { + result = __Pyx_PyObject_Call2Args(method, obj, arg); + Py_DECREF(method); + return result; + } + if (unlikely(!method)) return NULL; + return __Pyx__PyObject_CallMethod1(method, arg); +} + +/* CoroutineBase */ +#include +#include +#define __Pyx_Coroutine_Undelegate(gen) Py_CLEAR((gen)->yieldfrom) +static int __Pyx_PyGen__FetchStopIterationValue(CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject **pvalue) { + PyObject *et, *ev, *tb; + PyObject *value = NULL; + __Pyx_ErrFetch(&et, &ev, &tb); + if (!et) { + Py_XDECREF(tb); + Py_XDECREF(ev); + Py_INCREF(Py_None); + *pvalue = Py_None; + return 0; + } + if (likely(et == PyExc_StopIteration)) { + if (!ev) { + Py_INCREF(Py_None); + value = Py_None; + } +#if PY_VERSION_HEX >= 0x030300A0 + else if (Py_TYPE(ev) == (PyTypeObject*)PyExc_StopIteration) { + value = ((PyStopIterationObject *)ev)->value; + Py_INCREF(value); + Py_DECREF(ev); + } +#endif + else if (unlikely(PyTuple_Check(ev))) { + if (PyTuple_GET_SIZE(ev) >= 1) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + value = PyTuple_GET_ITEM(ev, 0); + Py_INCREF(value); +#else + value = PySequence_ITEM(ev, 0); +#endif + } else { + Py_INCREF(Py_None); + value = Py_None; + } + Py_DECREF(ev); + } + else if (!__Pyx_TypeCheck(ev, (PyTypeObject*)PyExc_StopIteration)) { + value = ev; + } + if (likely(value)) { + Py_XDECREF(tb); + Py_DECREF(et); + *pvalue = value; + return 0; + } + } else if (!__Pyx_PyErr_GivenExceptionMatches(et, PyExc_StopIteration)) { + __Pyx_ErrRestore(et, ev, tb); + return -1; + } + PyErr_NormalizeException(&et, &ev, &tb); + if (unlikely(!PyObject_TypeCheck(ev, (PyTypeObject*)PyExc_StopIteration))) { + __Pyx_ErrRestore(et, ev, tb); + return -1; + } + Py_XDECREF(tb); + Py_DECREF(et); +#if PY_VERSION_HEX >= 0x030300A0 + value = ((PyStopIterationObject *)ev)->value; + Py_INCREF(value); + Py_DECREF(ev); +#else + { + PyObject* args = __Pyx_PyObject_GetAttrStr(ev, __pyx_n_s_args); + Py_DECREF(ev); + if (likely(args)) { + value = PySequence_GetItem(args, 0); + Py_DECREF(args); + } + if (unlikely(!value)) { + __Pyx_ErrRestore(NULL, NULL, NULL); + Py_INCREF(Py_None); + value = Py_None; + } + } +#endif + *pvalue = value; + return 0; +} +static CYTHON_INLINE +void __Pyx_Coroutine_ExceptionClear(__Pyx_ExcInfoStruct *exc_state) { + PyObject *t, *v, *tb; + t = exc_state->exc_type; + v = exc_state->exc_value; + tb = exc_state->exc_traceback; + exc_state->exc_type = NULL; + exc_state->exc_value = NULL; + exc_state->exc_traceback = NULL; + Py_XDECREF(t); + Py_XDECREF(v); + Py_XDECREF(tb); +} +#define __Pyx_Coroutine_AlreadyRunningError(gen) (__Pyx__Coroutine_AlreadyRunningError(gen), (PyObject*)NULL) +static void __Pyx__Coroutine_AlreadyRunningError(CYTHON_UNUSED __pyx_CoroutineObject *gen) { + const char *msg; + if ((0)) { + #ifdef __Pyx_Coroutine_USED + } else if (__Pyx_Coroutine_Check((PyObject*)gen)) { + msg = "coroutine already executing"; + #endif + #ifdef __Pyx_AsyncGen_USED + } else if (__Pyx_AsyncGen_CheckExact((PyObject*)gen)) { + msg = "async generator already executing"; + #endif + } else { + msg = "generator already executing"; + } + PyErr_SetString(PyExc_ValueError, msg); +} +#define __Pyx_Coroutine_NotStartedError(gen) (__Pyx__Coroutine_NotStartedError(gen), (PyObject*)NULL) +static void __Pyx__Coroutine_NotStartedError(CYTHON_UNUSED PyObject *gen) { + const char *msg; + if ((0)) { + #ifdef __Pyx_Coroutine_USED + } else if (__Pyx_Coroutine_Check(gen)) { + msg = "can't send non-None value to a just-started coroutine"; + #endif + #ifdef __Pyx_AsyncGen_USED + } else if (__Pyx_AsyncGen_CheckExact(gen)) { + msg = "can't send non-None value to a just-started async generator"; + #endif + } else { + msg = "can't send non-None value to a just-started generator"; + } + PyErr_SetString(PyExc_TypeError, msg); +} +#define __Pyx_Coroutine_AlreadyTerminatedError(gen, value, closing) (__Pyx__Coroutine_AlreadyTerminatedError(gen, value, closing), (PyObject*)NULL) +static void __Pyx__Coroutine_AlreadyTerminatedError(CYTHON_UNUSED PyObject *gen, PyObject *value, CYTHON_UNUSED int closing) { + #ifdef __Pyx_Coroutine_USED + if (!closing && __Pyx_Coroutine_Check(gen)) { + PyErr_SetString(PyExc_RuntimeError, "cannot reuse already awaited coroutine"); + } else + #endif + if (value) { + #ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(gen)) + PyErr_SetNone(__Pyx_PyExc_StopAsyncIteration); + else + #endif + PyErr_SetNone(PyExc_StopIteration); + } +} +static +PyObject *__Pyx_Coroutine_SendEx(__pyx_CoroutineObject *self, PyObject *value, int closing) { + __Pyx_PyThreadState_declare + PyThreadState *tstate; + __Pyx_ExcInfoStruct *exc_state; + PyObject *retval; + assert(!self->is_running); + if (unlikely(self->resume_label == 0)) { + if (unlikely(value && value != Py_None)) { + return __Pyx_Coroutine_NotStartedError((PyObject*)self); + } + } + if (unlikely(self->resume_label == -1)) { + return __Pyx_Coroutine_AlreadyTerminatedError((PyObject*)self, value, closing); + } +#if CYTHON_FAST_THREAD_STATE + __Pyx_PyThreadState_assign + tstate = __pyx_tstate; +#else + tstate = __Pyx_PyThreadState_Current; +#endif + exc_state = &self->gi_exc_state; + if (exc_state->exc_type) { + #if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_PYSTON + #else + if (exc_state->exc_traceback) { + PyTracebackObject *tb = (PyTracebackObject *) exc_state->exc_traceback; + PyFrameObject *f = tb->tb_frame; + Py_XINCREF(tstate->frame); + assert(f->f_back == NULL); + f->f_back = tstate->frame; + } + #endif + } +#if CYTHON_USE_EXC_INFO_STACK + exc_state->previous_item = tstate->exc_info; + tstate->exc_info = exc_state; +#else + if (exc_state->exc_type) { + __Pyx_ExceptionSwap(&exc_state->exc_type, &exc_state->exc_value, &exc_state->exc_traceback); + } else { + __Pyx_Coroutine_ExceptionClear(exc_state); + __Pyx_ExceptionSave(&exc_state->exc_type, &exc_state->exc_value, &exc_state->exc_traceback); + } +#endif + self->is_running = 1; + retval = self->body((PyObject *) self, tstate, value); + self->is_running = 0; +#if CYTHON_USE_EXC_INFO_STACK + exc_state = &self->gi_exc_state; + tstate->exc_info = exc_state->previous_item; + exc_state->previous_item = NULL; + __Pyx_Coroutine_ResetFrameBackpointer(exc_state); +#endif + return retval; +} +static CYTHON_INLINE void __Pyx_Coroutine_ResetFrameBackpointer(__Pyx_ExcInfoStruct *exc_state) { + PyObject *exc_tb = exc_state->exc_traceback; + if (likely(exc_tb)) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_PYSTON +#else + PyTracebackObject *tb = (PyTracebackObject *) exc_tb; + PyFrameObject *f = tb->tb_frame; + Py_CLEAR(f->f_back); +#endif + } +} +static CYTHON_INLINE +PyObject *__Pyx_Coroutine_MethodReturn(CYTHON_UNUSED PyObject* gen, PyObject *retval) { + if (unlikely(!retval)) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (!__Pyx_PyErr_Occurred()) { + PyObject *exc = PyExc_StopIteration; + #ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(gen)) + exc = __Pyx_PyExc_StopAsyncIteration; + #endif + __Pyx_PyErr_SetNone(exc); + } + } + return retval; +} +static CYTHON_INLINE +PyObject *__Pyx_Coroutine_FinishDelegation(__pyx_CoroutineObject *gen) { + PyObject *ret; + PyObject *val = NULL; + __Pyx_Coroutine_Undelegate(gen); + __Pyx_PyGen__FetchStopIterationValue(__Pyx_PyThreadState_Current, &val); + ret = __Pyx_Coroutine_SendEx(gen, val, 0); + Py_XDECREF(val); + return ret; +} +static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value) { + PyObject *retval; + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject*) self; + PyObject *yf = gen->yieldfrom; + if (unlikely(gen->is_running)) + return __Pyx_Coroutine_AlreadyRunningError(gen); + if (yf) { + PyObject *ret; + gen->is_running = 1; + #ifdef __Pyx_Generator_USED + if (__Pyx_Generator_CheckExact(yf)) { + ret = __Pyx_Coroutine_Send(yf, value); + } else + #endif + #ifdef __Pyx_Coroutine_USED + if (__Pyx_Coroutine_Check(yf)) { + ret = __Pyx_Coroutine_Send(yf, value); + } else + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_PyAsyncGenASend_CheckExact(yf)) { + ret = __Pyx_async_gen_asend_send(yf, value); + } else + #endif + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) + if (PyGen_CheckExact(yf)) { + ret = _PyGen_Send((PyGenObject*)yf, value == Py_None ? NULL : value); + } else + #endif + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03050000 && defined(PyCoro_CheckExact) && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) + if (PyCoro_CheckExact(yf)) { + ret = _PyGen_Send((PyGenObject*)yf, value == Py_None ? NULL : value); + } else + #endif + { + if (value == Py_None) + ret = Py_TYPE(yf)->tp_iternext(yf); + else + ret = __Pyx_PyObject_CallMethod1(yf, __pyx_n_s_send, value); + } + gen->is_running = 0; + if (likely(ret)) { + return ret; + } + retval = __Pyx_Coroutine_FinishDelegation(gen); + } else { + retval = __Pyx_Coroutine_SendEx(gen, value, 0); + } + return __Pyx_Coroutine_MethodReturn(self, retval); +} +static int __Pyx_Coroutine_CloseIter(__pyx_CoroutineObject *gen, PyObject *yf) { + PyObject *retval = NULL; + int err = 0; + #ifdef __Pyx_Generator_USED + if (__Pyx_Generator_CheckExact(yf)) { + retval = __Pyx_Coroutine_Close(yf); + if (!retval) + return -1; + } else + #endif + #ifdef __Pyx_Coroutine_USED + if (__Pyx_Coroutine_Check(yf)) { + retval = __Pyx_Coroutine_Close(yf); + if (!retval) + return -1; + } else + if (__Pyx_CoroutineAwait_CheckExact(yf)) { + retval = __Pyx_CoroutineAwait_Close((__pyx_CoroutineAwaitObject*)yf, NULL); + if (!retval) + return -1; + } else + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_PyAsyncGenASend_CheckExact(yf)) { + retval = __Pyx_async_gen_asend_close(yf, NULL); + } else + if (__pyx_PyAsyncGenAThrow_CheckExact(yf)) { + retval = __Pyx_async_gen_athrow_close(yf, NULL); + } else + #endif + { + PyObject *meth; + gen->is_running = 1; + meth = __Pyx_PyObject_GetAttrStr(yf, __pyx_n_s_close); + if (unlikely(!meth)) { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_WriteUnraisable(yf); + } + PyErr_Clear(); + } else { + retval = PyObject_CallFunction(meth, NULL); + Py_DECREF(meth); + if (!retval) + err = -1; + } + gen->is_running = 0; + } + Py_XDECREF(retval); + return err; +} +static PyObject *__Pyx_Generator_Next(PyObject *self) { + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject*) self; + PyObject *yf = gen->yieldfrom; + if (unlikely(gen->is_running)) + return __Pyx_Coroutine_AlreadyRunningError(gen); + if (yf) { + PyObject *ret; + gen->is_running = 1; + #ifdef __Pyx_Generator_USED + if (__Pyx_Generator_CheckExact(yf)) { + ret = __Pyx_Generator_Next(yf); + } else + #endif + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) + if (PyGen_CheckExact(yf)) { + ret = _PyGen_Send((PyGenObject*)yf, NULL); + } else + #endif + #ifdef __Pyx_Coroutine_USED + if (__Pyx_Coroutine_Check(yf)) { + ret = __Pyx_Coroutine_Send(yf, Py_None); + } else + #endif + ret = Py_TYPE(yf)->tp_iternext(yf); + gen->is_running = 0; + if (likely(ret)) { + return ret; + } + return __Pyx_Coroutine_FinishDelegation(gen); + } + return __Pyx_Coroutine_SendEx(gen, Py_None, 0); +} +static PyObject *__Pyx_Coroutine_Close_Method(PyObject *self, CYTHON_UNUSED PyObject *arg) { + return __Pyx_Coroutine_Close(self); +} +static PyObject *__Pyx_Coroutine_Close(PyObject *self) { + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; + PyObject *retval, *raised_exception; + PyObject *yf = gen->yieldfrom; + int err = 0; + if (unlikely(gen->is_running)) + return __Pyx_Coroutine_AlreadyRunningError(gen); + if (yf) { + Py_INCREF(yf); + err = __Pyx_Coroutine_CloseIter(gen, yf); + __Pyx_Coroutine_Undelegate(gen); + Py_DECREF(yf); + } + if (err == 0) + PyErr_SetNone(PyExc_GeneratorExit); + retval = __Pyx_Coroutine_SendEx(gen, NULL, 1); + if (unlikely(retval)) { + const char *msg; + Py_DECREF(retval); + if ((0)) { + #ifdef __Pyx_Coroutine_USED + } else if (__Pyx_Coroutine_Check(self)) { + msg = "coroutine ignored GeneratorExit"; + #endif + #ifdef __Pyx_AsyncGen_USED + } else if (__Pyx_AsyncGen_CheckExact(self)) { +#if PY_VERSION_HEX < 0x03060000 + msg = "async generator ignored GeneratorExit - might require Python 3.6+ finalisation (PEP 525)"; +#else + msg = "async generator ignored GeneratorExit"; +#endif + #endif + } else { + msg = "generator ignored GeneratorExit"; + } + PyErr_SetString(PyExc_RuntimeError, msg); + return NULL; + } + raised_exception = PyErr_Occurred(); + if (likely(!raised_exception || __Pyx_PyErr_GivenExceptionMatches2(raised_exception, PyExc_GeneratorExit, PyExc_StopIteration))) { + if (raised_exception) PyErr_Clear(); + Py_INCREF(Py_None); + return Py_None; + } + return NULL; +} +static PyObject *__Pyx__Coroutine_Throw(PyObject *self, PyObject *typ, PyObject *val, PyObject *tb, + PyObject *args, int close_on_genexit) { + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; + PyObject *yf = gen->yieldfrom; + if (unlikely(gen->is_running)) + return __Pyx_Coroutine_AlreadyRunningError(gen); + if (yf) { + PyObject *ret; + Py_INCREF(yf); + if (__Pyx_PyErr_GivenExceptionMatches(typ, PyExc_GeneratorExit) && close_on_genexit) { + int err = __Pyx_Coroutine_CloseIter(gen, yf); + Py_DECREF(yf); + __Pyx_Coroutine_Undelegate(gen); + if (err < 0) + return __Pyx_Coroutine_MethodReturn(self, __Pyx_Coroutine_SendEx(gen, NULL, 0)); + goto throw_here; + } + gen->is_running = 1; + if (0 + #ifdef __Pyx_Generator_USED + || __Pyx_Generator_CheckExact(yf) + #endif + #ifdef __Pyx_Coroutine_USED + || __Pyx_Coroutine_Check(yf) + #endif + ) { + ret = __Pyx__Coroutine_Throw(yf, typ, val, tb, args, close_on_genexit); + #ifdef __Pyx_Coroutine_USED + } else if (__Pyx_CoroutineAwait_CheckExact(yf)) { + ret = __Pyx__Coroutine_Throw(((__pyx_CoroutineAwaitObject*)yf)->coroutine, typ, val, tb, args, close_on_genexit); + #endif + } else { + PyObject *meth = __Pyx_PyObject_GetAttrStr(yf, __pyx_n_s_throw); + if (unlikely(!meth)) { + Py_DECREF(yf); + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) { + gen->is_running = 0; + return NULL; + } + PyErr_Clear(); + __Pyx_Coroutine_Undelegate(gen); + gen->is_running = 0; + goto throw_here; + } + if (likely(args)) { + ret = PyObject_CallObject(meth, args); + } else { + ret = PyObject_CallFunctionObjArgs(meth, typ, val, tb, NULL); + } + Py_DECREF(meth); + } + gen->is_running = 0; + Py_DECREF(yf); + if (!ret) { + ret = __Pyx_Coroutine_FinishDelegation(gen); + } + return __Pyx_Coroutine_MethodReturn(self, ret); + } +throw_here: + __Pyx_Raise(typ, val, tb, NULL); + return __Pyx_Coroutine_MethodReturn(self, __Pyx_Coroutine_SendEx(gen, NULL, 0)); +} +static PyObject *__Pyx_Coroutine_Throw(PyObject *self, PyObject *args) { + PyObject *typ; + PyObject *val = NULL; + PyObject *tb = NULL; + if (!PyArg_UnpackTuple(args, (char *)"throw", 1, 3, &typ, &val, &tb)) + return NULL; + return __Pyx__Coroutine_Throw(self, typ, val, tb, args, 1); +} +static CYTHON_INLINE int __Pyx_Coroutine_traverse_excstate(__Pyx_ExcInfoStruct *exc_state, visitproc visit, void *arg) { + Py_VISIT(exc_state->exc_type); + Py_VISIT(exc_state->exc_value); + Py_VISIT(exc_state->exc_traceback); + return 0; +} +static int __Pyx_Coroutine_traverse(__pyx_CoroutineObject *gen, visitproc visit, void *arg) { + Py_VISIT(gen->closure); + Py_VISIT(gen->classobj); + Py_VISIT(gen->yieldfrom); + return __Pyx_Coroutine_traverse_excstate(&gen->gi_exc_state, visit, arg); +} +static int __Pyx_Coroutine_clear(PyObject *self) { + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; + Py_CLEAR(gen->closure); + Py_CLEAR(gen->classobj); + Py_CLEAR(gen->yieldfrom); + __Pyx_Coroutine_ExceptionClear(&gen->gi_exc_state); +#ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(self)) { + Py_CLEAR(((__pyx_PyAsyncGenObject*)gen)->ag_finalizer); + } +#endif + Py_CLEAR(gen->gi_code); + Py_CLEAR(gen->gi_name); + Py_CLEAR(gen->gi_qualname); + Py_CLEAR(gen->gi_modulename); + return 0; +} +static void __Pyx_Coroutine_dealloc(PyObject *self) { + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; + PyObject_GC_UnTrack(gen); + if (gen->gi_weakreflist != NULL) + PyObject_ClearWeakRefs(self); + if (gen->resume_label >= 0) { + PyObject_GC_Track(self); +#if PY_VERSION_HEX >= 0x030400a1 && CYTHON_USE_TP_FINALIZE + if (PyObject_CallFinalizerFromDealloc(self)) +#else + Py_TYPE(gen)->tp_del(self); + if (self->ob_refcnt > 0) +#endif + { + return; + } + PyObject_GC_UnTrack(self); + } +#ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(self)) { + /* We have to handle this case for asynchronous generators + right here, because this code has to be between UNTRACK + and GC_Del. */ + Py_CLEAR(((__pyx_PyAsyncGenObject*)self)->ag_finalizer); + } +#endif + __Pyx_Coroutine_clear(self); + PyObject_GC_Del(gen); +} +static void __Pyx_Coroutine_del(PyObject *self) { + PyObject *error_type, *error_value, *error_traceback; + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; + __Pyx_PyThreadState_declare + if (gen->resume_label < 0) { + return; + } +#if !CYTHON_USE_TP_FINALIZE + assert(self->ob_refcnt == 0); + self->ob_refcnt = 1; +#endif + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&error_type, &error_value, &error_traceback); +#ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(self)) { + __pyx_PyAsyncGenObject *agen = (__pyx_PyAsyncGenObject*)self; + PyObject *finalizer = agen->ag_finalizer; + if (finalizer && !agen->ag_closed) { + PyObject *res = __Pyx_PyObject_CallOneArg(finalizer, self); + if (unlikely(!res)) { + PyErr_WriteUnraisable(self); + } else { + Py_DECREF(res); + } + __Pyx_ErrRestore(error_type, error_value, error_traceback); + return; + } + } +#endif + if (unlikely(gen->resume_label == 0 && !error_value)) { +#ifdef __Pyx_Coroutine_USED +#ifdef __Pyx_Generator_USED + if (!__Pyx_Generator_CheckExact(self)) +#endif + { + PyObject_GC_UnTrack(self); +#if PY_MAJOR_VERSION >= 3 || defined(PyErr_WarnFormat) + if (unlikely(PyErr_WarnFormat(PyExc_RuntimeWarning, 1, "coroutine '%.50S' was never awaited", gen->gi_qualname) < 0)) + PyErr_WriteUnraisable(self); +#else + {PyObject *msg; + char *cmsg; + #if CYTHON_COMPILING_IN_PYPY + msg = NULL; + cmsg = (char*) "coroutine was never awaited"; + #else + char *cname; + PyObject *qualname; + qualname = gen->gi_qualname; + cname = PyString_AS_STRING(qualname); + msg = PyString_FromFormat("coroutine '%.50s' was never awaited", cname); + if (unlikely(!msg)) { + PyErr_Clear(); + cmsg = (char*) "coroutine was never awaited"; + } else { + cmsg = PyString_AS_STRING(msg); + } + #endif + if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, cmsg, 1) < 0)) + PyErr_WriteUnraisable(self); + Py_XDECREF(msg);} +#endif + PyObject_GC_Track(self); + } +#endif + } else { + PyObject *res = __Pyx_Coroutine_Close(self); + if (unlikely(!res)) { + if (PyErr_Occurred()) + PyErr_WriteUnraisable(self); + } else { + Py_DECREF(res); + } + } + __Pyx_ErrRestore(error_type, error_value, error_traceback); +#if !CYTHON_USE_TP_FINALIZE + assert(self->ob_refcnt > 0); + if (--self->ob_refcnt == 0) { + return; + } + { + Py_ssize_t refcnt = self->ob_refcnt; + _Py_NewReference(self); + self->ob_refcnt = refcnt; + } +#if CYTHON_COMPILING_IN_CPYTHON + assert(PyType_IS_GC(self->ob_type) && + _Py_AS_GC(self)->gc.gc_refs != _PyGC_REFS_UNTRACKED); + _Py_DEC_REFTOTAL; +#endif +#ifdef COUNT_ALLOCS + --Py_TYPE(self)->tp_frees; + --Py_TYPE(self)->tp_allocs; +#endif +#endif +} +static PyObject * +__Pyx_Coroutine_get_name(__pyx_CoroutineObject *self, CYTHON_UNUSED void *context) +{ + PyObject *name = self->gi_name; + if (unlikely(!name)) name = Py_None; + Py_INCREF(name); + return name; +} +static int +__Pyx_Coroutine_set_name(__pyx_CoroutineObject *self, PyObject *value, CYTHON_UNUSED void *context) +{ + PyObject *tmp; +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__name__ must be set to a string object"); + return -1; + } + tmp = self->gi_name; + Py_INCREF(value); + self->gi_name = value; + Py_XDECREF(tmp); + return 0; +} +static PyObject * +__Pyx_Coroutine_get_qualname(__pyx_CoroutineObject *self, CYTHON_UNUSED void *context) +{ + PyObject *name = self->gi_qualname; + if (unlikely(!name)) name = Py_None; + Py_INCREF(name); + return name; +} +static int +__Pyx_Coroutine_set_qualname(__pyx_CoroutineObject *self, PyObject *value, CYTHON_UNUSED void *context) +{ + PyObject *tmp; +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__qualname__ must be set to a string object"); + return -1; + } + tmp = self->gi_qualname; + Py_INCREF(value); + self->gi_qualname = value; + Py_XDECREF(tmp); + return 0; +} +static __pyx_CoroutineObject *__Pyx__Coroutine_New( + PyTypeObject* type, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, + PyObject *name, PyObject *qualname, PyObject *module_name) { + __pyx_CoroutineObject *gen = PyObject_GC_New(__pyx_CoroutineObject, type); + if (unlikely(!gen)) + return NULL; + return __Pyx__Coroutine_NewInit(gen, body, code, closure, name, qualname, module_name); +} +static __pyx_CoroutineObject *__Pyx__Coroutine_NewInit( + __pyx_CoroutineObject *gen, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, + PyObject *name, PyObject *qualname, PyObject *module_name) { + gen->body = body; + gen->closure = closure; + Py_XINCREF(closure); + gen->is_running = 0; + gen->resume_label = 0; + gen->classobj = NULL; + gen->yieldfrom = NULL; + gen->gi_exc_state.exc_type = NULL; + gen->gi_exc_state.exc_value = NULL; + gen->gi_exc_state.exc_traceback = NULL; +#if CYTHON_USE_EXC_INFO_STACK + gen->gi_exc_state.previous_item = NULL; +#endif + gen->gi_weakreflist = NULL; + Py_XINCREF(qualname); + gen->gi_qualname = qualname; + Py_XINCREF(name); + gen->gi_name = name; + Py_XINCREF(module_name); + gen->gi_modulename = module_name; + Py_XINCREF(code); + gen->gi_code = code; + PyObject_GC_Track(gen); + return gen; +} + +/* PatchModuleWithCoroutine */ +static PyObject* __Pyx_Coroutine_patch_module(PyObject* module, const char* py_code) { +#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + int result; + PyObject *globals, *result_obj; + globals = PyDict_New(); if (unlikely(!globals)) goto ignore; + result = PyDict_SetItemString(globals, "_cython_coroutine_type", + #ifdef __Pyx_Coroutine_USED + (PyObject*)__pyx_CoroutineType); + #else + Py_None); + #endif + if (unlikely(result < 0)) goto ignore; + result = PyDict_SetItemString(globals, "_cython_generator_type", + #ifdef __Pyx_Generator_USED + (PyObject*)__pyx_GeneratorType); + #else + Py_None); + #endif + if (unlikely(result < 0)) goto ignore; + if (unlikely(PyDict_SetItemString(globals, "_module", module) < 0)) goto ignore; + if (unlikely(PyDict_SetItemString(globals, "__builtins__", __pyx_b) < 0)) goto ignore; + result_obj = PyRun_String(py_code, Py_file_input, globals, globals); + if (unlikely(!result_obj)) goto ignore; + Py_DECREF(result_obj); + Py_DECREF(globals); + return module; +ignore: + Py_XDECREF(globals); + PyErr_WriteUnraisable(module); + if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, "Cython module failed to patch module with custom type", 1) < 0)) { + Py_DECREF(module); + module = NULL; + } +#else + py_code++; +#endif + return module; +} + +/* PatchGeneratorABC */ +#ifndef CYTHON_REGISTER_ABCS +#define CYTHON_REGISTER_ABCS 1 +#endif +#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) +static PyObject* __Pyx_patch_abc_module(PyObject *module); +static PyObject* __Pyx_patch_abc_module(PyObject *module) { + module = __Pyx_Coroutine_patch_module( + module, "" +"if _cython_generator_type is not None:\n" +" try: Generator = _module.Generator\n" +" except AttributeError: pass\n" +" else: Generator.register(_cython_generator_type)\n" +"if _cython_coroutine_type is not None:\n" +" try: Coroutine = _module.Coroutine\n" +" except AttributeError: pass\n" +" else: Coroutine.register(_cython_coroutine_type)\n" + ); + return module; +} +#endif +static int __Pyx_patch_abc(void) { +#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + static int abc_patched = 0; + if (CYTHON_REGISTER_ABCS && !abc_patched) { + PyObject *module; + module = PyImport_ImportModule((PY_MAJOR_VERSION >= 3) ? "collections.abc" : "collections"); + if (!module) { + PyErr_WriteUnraisable(NULL); + if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, + ((PY_MAJOR_VERSION >= 3) ? + "Cython module failed to register with collections.abc module" : + "Cython module failed to register with collections module"), 1) < 0)) { + return -1; + } + } else { + module = __Pyx_patch_abc_module(module); + abc_patched = 1; + if (unlikely(!module)) + return -1; + Py_DECREF(module); + } + module = PyImport_ImportModule("backports_abc"); + if (module) { + module = __Pyx_patch_abc_module(module); + Py_XDECREF(module); + } + if (!module) { + PyErr_Clear(); + } + } +#else + if ((0)) __Pyx_Coroutine_patch_module(NULL, NULL); +#endif + return 0; +} + +/* Generator */ +static PyMethodDef __pyx_Generator_methods[] = { + {"send", (PyCFunction) __Pyx_Coroutine_Send, METH_O, + (char*) PyDoc_STR("send(arg) -> send 'arg' into generator,\nreturn next yielded value or raise StopIteration.")}, + {"throw", (PyCFunction) __Pyx_Coroutine_Throw, METH_VARARGS, + (char*) PyDoc_STR("throw(typ[,val[,tb]]) -> raise exception in generator,\nreturn next yielded value or raise StopIteration.")}, + {"close", (PyCFunction) __Pyx_Coroutine_Close_Method, METH_NOARGS, + (char*) PyDoc_STR("close() -> raise GeneratorExit inside generator.")}, + {0, 0, 0, 0} +}; +static PyMemberDef __pyx_Generator_memberlist[] = { + {(char *) "gi_running", T_BOOL, offsetof(__pyx_CoroutineObject, is_running), READONLY, NULL}, + {(char*) "gi_yieldfrom", T_OBJECT, offsetof(__pyx_CoroutineObject, yieldfrom), READONLY, + (char*) PyDoc_STR("object being iterated by 'yield from', or None")}, + {(char*) "gi_code", T_OBJECT, offsetof(__pyx_CoroutineObject, gi_code), READONLY, NULL}, + {0, 0, 0, 0, 0} +}; +static PyGetSetDef __pyx_Generator_getsets[] = { + {(char *) "__name__", (getter)__Pyx_Coroutine_get_name, (setter)__Pyx_Coroutine_set_name, + (char*) PyDoc_STR("name of the generator"), 0}, + {(char *) "__qualname__", (getter)__Pyx_Coroutine_get_qualname, (setter)__Pyx_Coroutine_set_qualname, + (char*) PyDoc_STR("qualified name of the generator"), 0}, + {0, 0, 0, 0, 0} +}; +static PyTypeObject __pyx_GeneratorType_type = { + PyVarObject_HEAD_INIT(0, 0) + "generator", + sizeof(__pyx_CoroutineObject), + 0, + (destructor) __Pyx_Coroutine_dealloc, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_HAVE_FINALIZE, + 0, + (traverseproc) __Pyx_Coroutine_traverse, + 0, + 0, + offsetof(__pyx_CoroutineObject, gi_weakreflist), + 0, + (iternextfunc) __Pyx_Generator_Next, + __pyx_Generator_methods, + __pyx_Generator_memberlist, + __pyx_Generator_getsets, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, +#if CYTHON_USE_TP_FINALIZE + 0, +#else + __Pyx_Coroutine_del, +#endif + 0, +#if CYTHON_USE_TP_FINALIZE + __Pyx_Coroutine_del, +#elif PY_VERSION_HEX >= 0x030400a1 + 0, +#endif +}; +static int __pyx_Generator_init(void) { + __pyx_GeneratorType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + __pyx_GeneratorType_type.tp_iter = PyObject_SelfIter; + __pyx_GeneratorType = __Pyx_FetchCommonType(&__pyx_GeneratorType_type); + if (unlikely(!__pyx_GeneratorType)) { + return -1; + } + return 0; +} + +/* CheckBinaryVersion */ +static int __Pyx_check_binary_version(void) { + char ctversion[4], rtversion[4]; + PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); + PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); + if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compiletime version %s of module '%.100s' " + "does not match runtime version %s", + ctversion, __Pyx_MODULE_NAME, rtversion); + return PyErr_WarnEx(NULL, message, 1); + } + return 0; +} + +/* InitStrings */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/venv/lib/python3.7/site-packages/aiohttp/_http_parser.cpython-37m-x86_64-linux-gnu.so b/venv/lib/python3.7/site-packages/aiohttp/_http_parser.cpython-37m-x86_64-linux-gnu.so new file mode 100755 index 0000000..2fa2fd8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/aiohttp/_http_parser.cpython-37m-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.7/site-packages/aiohttp/_http_parser.pyx b/venv/lib/python3.7/site-packages/aiohttp/_http_parser.pyx new file mode 100644 index 0000000..817b482 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_http_parser.pyx @@ -0,0 +1,845 @@ +#cython: language_level=3 +# +# Based on https://github.com/MagicStack/httptools +# +from __future__ import absolute_import, print_function +from cpython.mem cimport PyMem_Malloc, PyMem_Free +from libc.string cimport memcpy +from cpython cimport (PyObject_GetBuffer, PyBuffer_Release, PyBUF_SIMPLE, + Py_buffer, PyBytes_AsString, PyBytes_AsStringAndSize) + +from multidict import (CIMultiDict as _CIMultiDict, + CIMultiDictProxy as _CIMultiDictProxy) +from yarl import URL as _URL + +from aiohttp import hdrs +from .http_exceptions import ( + BadHttpMessage, BadStatusLine, InvalidHeader, LineTooLong, InvalidURLError, + PayloadEncodingError, ContentLengthError, TransferEncodingError) +from .http_writer import (HttpVersion as _HttpVersion, + HttpVersion10 as _HttpVersion10, + HttpVersion11 as _HttpVersion11) +from .http_parser import DeflateBuffer as _DeflateBuffer +from .streams import (EMPTY_PAYLOAD as _EMPTY_PAYLOAD, + StreamReader as _StreamReader) + +cimport cython +from aiohttp cimport _cparser as cparser + +include "_headers.pxi" + +from aiohttp cimport _find_header + +DEF DEFAULT_FREELIST_SIZE = 250 + +cdef extern from "Python.h": + int PyByteArray_Resize(object, Py_ssize_t) except -1 + Py_ssize_t PyByteArray_Size(object) except -1 + char* PyByteArray_AsString(object) + +__all__ = ('HttpRequestParser', 'HttpResponseParser', + 'RawRequestMessage', 'RawResponseMessage') + +cdef object URL = _URL +cdef object URL_build = URL.build +cdef object CIMultiDict = _CIMultiDict +cdef object CIMultiDictProxy = _CIMultiDictProxy +cdef object HttpVersion = _HttpVersion +cdef object HttpVersion10 = _HttpVersion10 +cdef object HttpVersion11 = _HttpVersion11 +cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 +cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING +cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD +cdef object StreamReader = _StreamReader +cdef object DeflateBuffer = _DeflateBuffer + + +cdef inline object extend(object buf, const char* at, size_t length): + cdef Py_ssize_t s + cdef char* ptr + s = PyByteArray_Size(buf) + PyByteArray_Resize(buf, s + length) + ptr = PyByteArray_AsString(buf) + memcpy(ptr + s, at, length) + + +DEF METHODS_COUNT = 34; + +cdef list _http_method = [] + +for i in range(METHODS_COUNT): + _http_method.append( + cparser.http_method_str( i).decode('ascii')) + + +cdef inline str http_method_str(int i): + if i < METHODS_COUNT: + return _http_method[i] + else: + return "" + +cdef inline object find_header(bytes raw_header): + cdef Py_ssize_t size + cdef char *buf + cdef int idx + PyBytes_AsStringAndSize(raw_header, &buf, &size) + idx = _find_header.find_header(buf, size) + if idx == -1: + return raw_header.decode('utf-8', 'surrogateescape') + return headers[idx] + + +@cython.freelist(DEFAULT_FREELIST_SIZE) +cdef class RawRequestMessage: + cdef readonly str method + cdef readonly str path + cdef readonly object version # HttpVersion + cdef readonly object headers # CIMultiDict + cdef readonly object raw_headers # tuple + cdef readonly object should_close + cdef readonly object compression + cdef readonly object upgrade + cdef readonly object chunked + cdef readonly object url # yarl.URL + + def __init__(self, method, path, version, headers, raw_headers, + should_close, compression, upgrade, chunked, url): + self.method = method + self.path = path + self.version = version + self.headers = headers + self.raw_headers = raw_headers + self.should_close = should_close + self.compression = compression + self.upgrade = upgrade + self.chunked = chunked + self.url = url + + def __repr__(self): + info = [] + info.append(("method", self.method)) + info.append(("path", self.path)) + info.append(("version", self.version)) + info.append(("headers", self.headers)) + info.append(("raw_headers", self.raw_headers)) + info.append(("should_close", self.should_close)) + info.append(("compression", self.compression)) + info.append(("upgrade", self.upgrade)) + info.append(("chunked", self.chunked)) + info.append(("url", self.url)) + sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + return '' + + def _replace(self, **dct): + cdef RawRequestMessage ret + ret = _new_request_message(self.method, + self.path, + self.version, + self.headers, + self.raw_headers, + self.should_close, + self.compression, + self.upgrade, + self.chunked, + self.url) + if "method" in dct: + ret.method = dct["method"] + if "path" in dct: + ret.path = dct["path"] + if "version" in dct: + ret.version = dct["version"] + if "headers" in dct: + ret.headers = dct["headers"] + if "raw_headers" in dct: + ret.raw_headers = dct["raw_headers"] + if "should_close" in dct: + ret.should_close = dct["should_close"] + if "compression" in dct: + ret.compression = dct["compression"] + if "upgrade" in dct: + ret.upgrade = dct["upgrade"] + if "chunked" in dct: + ret.chunked = dct["chunked"] + if "url" in dct: + ret.url = dct["url"] + return ret + +cdef _new_request_message(str method, + str path, + object version, + object headers, + object raw_headers, + bint should_close, + object compression, + bint upgrade, + bint chunked, + object url): + cdef RawRequestMessage ret + ret = RawRequestMessage.__new__(RawRequestMessage) + ret.method = method + ret.path = path + ret.version = version + ret.headers = headers + ret.raw_headers = raw_headers + ret.should_close = should_close + ret.compression = compression + ret.upgrade = upgrade + ret.chunked = chunked + ret.url = url + return ret + + +@cython.freelist(DEFAULT_FREELIST_SIZE) +cdef class RawResponseMessage: + cdef readonly object version # HttpVersion + cdef readonly int code + cdef readonly str reason + cdef readonly object headers # CIMultiDict + cdef readonly object raw_headers # tuple + cdef readonly object should_close + cdef readonly object compression + cdef readonly object upgrade + cdef readonly object chunked + + def __init__(self, version, code, reason, headers, raw_headers, + should_close, compression, upgrade, chunked): + self.version = version + self.code = code + self.reason = reason + self.headers = headers + self.raw_headers = raw_headers + self.should_close = should_close + self.compression = compression + self.upgrade = upgrade + self.chunked = chunked + + def __repr__(self): + info = [] + info.append(("version", self.version)) + info.append(("code", self.code)) + info.append(("reason", self.reason)) + info.append(("headers", self.headers)) + info.append(("raw_headers", self.raw_headers)) + info.append(("should_close", self.should_close)) + info.append(("compression", self.compression)) + info.append(("upgrade", self.upgrade)) + info.append(("chunked", self.chunked)) + sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + return '' + + +cdef _new_response_message(object version, + int code, + str reason, + object headers, + object raw_headers, + bint should_close, + object compression, + bint upgrade, + bint chunked): + cdef RawResponseMessage ret + ret = RawResponseMessage.__new__(RawResponseMessage) + ret.version = version + ret.code = code + ret.reason = reason + ret.headers = headers + ret.raw_headers = raw_headers + ret.should_close = should_close + ret.compression = compression + ret.upgrade = upgrade + ret.chunked = chunked + return ret + + +@cython.internal +cdef class HttpParser: + + cdef: + cparser.http_parser* _cparser + cparser.http_parser_settings* _csettings + + bytearray _raw_name + bytearray _raw_value + bint _has_value + + object _protocol + object _loop + object _timer + + size_t _max_line_size + size_t _max_field_size + size_t _max_headers + bint _response_with_body + + bint _started + object _url + bytearray _buf + str _path + str _reason + object _headers + list _raw_headers + bint _upgraded + list _messages + object _payload + bint _payload_error + object _payload_exception + object _last_error + bint _auto_decompress + + str _content_encoding + + Py_buffer py_buf + + def __cinit__(self): + self._cparser = \ + PyMem_Malloc(sizeof(cparser.http_parser)) + if self._cparser is NULL: + raise MemoryError() + + self._csettings = \ + PyMem_Malloc(sizeof(cparser.http_parser_settings)) + if self._csettings is NULL: + raise MemoryError() + + def __dealloc__(self): + PyMem_Free(self._cparser) + PyMem_Free(self._csettings) + + cdef _init(self, cparser.http_parser_type mode, + object protocol, object loop, object timer=None, + size_t max_line_size=8190, size_t max_headers=32768, + size_t max_field_size=8190, payload_exception=None, + bint response_with_body=True, bint auto_decompress=True): + cparser.http_parser_init(self._cparser, mode) + self._cparser.data = self + self._cparser.content_length = 0 + + cparser.http_parser_settings_init(self._csettings) + + self._protocol = protocol + self._loop = loop + self._timer = timer + + self._buf = bytearray() + self._payload = None + self._payload_error = 0 + self._payload_exception = payload_exception + self._messages = [] + + self._raw_name = bytearray() + self._raw_value = bytearray() + self._has_value = False + + self._max_line_size = max_line_size + self._max_headers = max_headers + self._max_field_size = max_field_size + self._response_with_body = response_with_body + self._upgraded = False + self._auto_decompress = auto_decompress + self._content_encoding = None + + self._csettings.on_url = cb_on_url + self._csettings.on_status = cb_on_status + self._csettings.on_header_field = cb_on_header_field + self._csettings.on_header_value = cb_on_header_value + self._csettings.on_headers_complete = cb_on_headers_complete + self._csettings.on_body = cb_on_body + self._csettings.on_message_begin = cb_on_message_begin + self._csettings.on_message_complete = cb_on_message_complete + self._csettings.on_chunk_header = cb_on_chunk_header + self._csettings.on_chunk_complete = cb_on_chunk_complete + + self._last_error = None + + cdef _process_header(self): + if self._raw_name: + raw_name = bytes(self._raw_name) + raw_value = bytes(self._raw_value) + + name = find_header(raw_name) + value = raw_value.decode('utf-8', 'surrogateescape') + + self._headers.add(name, value) + + if name is CONTENT_ENCODING: + self._content_encoding = value + + PyByteArray_Resize(self._raw_name, 0) + PyByteArray_Resize(self._raw_value, 0) + self._has_value = False + self._raw_headers.append((raw_name, raw_value)) + + cdef _on_header_field(self, char* at, size_t length): + cdef Py_ssize_t size + cdef char *buf + if self._has_value: + self._process_header() + + size = PyByteArray_Size(self._raw_name) + PyByteArray_Resize(self._raw_name, size + length) + buf = PyByteArray_AsString(self._raw_name) + memcpy(buf + size, at, length) + + cdef _on_header_value(self, char* at, size_t length): + cdef Py_ssize_t size + cdef char *buf + + size = PyByteArray_Size(self._raw_value) + PyByteArray_Resize(self._raw_value, size + length) + buf = PyByteArray_AsString(self._raw_value) + memcpy(buf + size, at, length) + self._has_value = True + + cdef _on_headers_complete(self): + self._process_header() + + method = http_method_str(self._cparser.method) + should_close = not cparser.http_should_keep_alive(self._cparser) + upgrade = self._cparser.upgrade + chunked = self._cparser.flags & cparser.F_CHUNKED + + raw_headers = tuple(self._raw_headers) + headers = CIMultiDictProxy(self._headers) + + if upgrade or self._cparser.method == 5: # cparser.CONNECT: + self._upgraded = True + + # do not support old websocket spec + if SEC_WEBSOCKET_KEY1 in headers: + raise InvalidHeader(SEC_WEBSOCKET_KEY1) + + encoding = None + enc = self._content_encoding + if enc is not None: + self._content_encoding = None + enc = enc.lower() + if enc in ('gzip', 'deflate', 'br'): + encoding = enc + + if self._cparser.type == cparser.HTTP_REQUEST: + msg = _new_request_message( + method, self._path, + self.http_version(), headers, raw_headers, + should_close, encoding, upgrade, chunked, self._url) + else: + msg = _new_response_message( + self.http_version(), self._cparser.status_code, self._reason, + headers, raw_headers, should_close, encoding, + upgrade, chunked) + + if (self._cparser.content_length > 0 or chunked or + self._cparser.method == 5): # CONNECT: 5 + payload = StreamReader( + self._protocol, timer=self._timer, loop=self._loop) + else: + payload = EMPTY_PAYLOAD + + self._payload = payload + if encoding is not None and self._auto_decompress: + self._payload = DeflateBuffer(payload, encoding) + + if not self._response_with_body: + payload = EMPTY_PAYLOAD + + self._messages.append((msg, payload)) + + cdef _on_message_complete(self): + self._payload.feed_eof() + self._payload = None + + cdef _on_chunk_header(self): + self._payload.begin_http_chunk_receiving() + + cdef _on_chunk_complete(self): + self._payload.end_http_chunk_receiving() + + cdef object _on_status_complete(self): + pass + + cdef inline http_version(self): + cdef cparser.http_parser* parser = self._cparser + + if parser.http_major == 1: + if parser.http_minor == 0: + return HttpVersion10 + elif parser.http_minor == 1: + return HttpVersion11 + + return HttpVersion(parser.http_major, parser.http_minor) + + ### Public API ### + + def feed_eof(self): + cdef bytes desc + + if self._payload is not None: + if self._cparser.flags & cparser.F_CHUNKED: + raise TransferEncodingError( + "Not enough data for satisfy transfer length header.") + elif self._cparser.flags & cparser.F_CONTENTLENGTH: + raise ContentLengthError( + "Not enough data for satisfy content length header.") + elif self._cparser.http_errno != cparser.HPE_OK: + desc = cparser.http_errno_description( + self._cparser.http_errno) + raise PayloadEncodingError(desc.decode('latin-1')) + else: + self._payload.feed_eof() + elif self._started: + self._on_headers_complete() + if self._messages: + return self._messages[-1][0] + + def feed_data(self, data): + cdef: + size_t data_len + size_t nb + + PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) + data_len = self.py_buf.len + + nb = cparser.http_parser_execute( + self._cparser, + self._csettings, + self.py_buf.buf, + data_len) + + PyBuffer_Release(&self.py_buf) + + # i am not sure about cparser.HPE_INVALID_METHOD, + # seems get err for valid request + # test_client_functional.py::test_post_data_with_bytesio_file + if (self._cparser.http_errno != cparser.HPE_OK and + (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or + self._cparser.method == 0)): + if self._payload_error == 0: + if self._last_error is not None: + ex = self._last_error + self._last_error = None + else: + ex = parser_error_from_errno( + self._cparser.http_errno) + self._payload = None + raise ex + + if self._messages: + messages = self._messages + self._messages = [] + else: + messages = () + + if self._upgraded: + return messages, True, data[nb:] + else: + return messages, False, b'' + + +cdef class HttpRequestParser(HttpParser): + + def __init__(self, protocol, loop, timer=None, + size_t max_line_size=8190, size_t max_headers=32768, + size_t max_field_size=8190, payload_exception=None, + bint response_with_body=True, bint read_until_eof=False): + self._init(cparser.HTTP_REQUEST, protocol, loop, timer, + max_line_size, max_headers, max_field_size, + payload_exception, response_with_body) + + cdef object _on_status_complete(self): + cdef Py_buffer py_buf + if not self._buf: + return + self._path = self._buf.decode('utf-8', 'surrogateescape') + if self._cparser.method == 5: # CONNECT + self._url = URL(self._path) + else: + PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) + try: + self._url = _parse_url(py_buf.buf, + py_buf.len) + finally: + PyBuffer_Release(&py_buf) + PyByteArray_Resize(self._buf, 0) + + +cdef class HttpResponseParser(HttpParser): + + def __init__(self, protocol, loop, timer=None, + size_t max_line_size=8190, size_t max_headers=32768, + size_t max_field_size=8190, payload_exception=None, + bint response_with_body=True, bint read_until_eof=False, + bint auto_decompress=True): + self._init(cparser.HTTP_RESPONSE, protocol, loop, timer, + max_line_size, max_headers, max_field_size, + payload_exception, response_with_body, auto_decompress) + + cdef object _on_status_complete(self): + if self._buf: + self._reason = self._buf.decode('utf-8', 'surrogateescape') + PyByteArray_Resize(self._buf, 0) + + +cdef int cb_on_message_begin(cparser.http_parser* parser) except -1: + cdef HttpParser pyparser = parser.data + + pyparser._started = True + pyparser._headers = CIMultiDict() + pyparser._raw_headers = [] + PyByteArray_Resize(pyparser._buf, 0) + pyparser._path = None + pyparser._reason = None + return 0 + + +cdef int cb_on_url(cparser.http_parser* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + try: + if length > pyparser._max_line_size: + raise LineTooLong( + 'Status line is too long', pyparser._max_line_size, length) + extend(pyparser._buf, at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_status(cparser.http_parser* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef str reason + try: + if length > pyparser._max_line_size: + raise LineTooLong( + 'Status line is too long', pyparser._max_line_size, length) + extend(pyparser._buf, at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_header_field(cparser.http_parser* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef Py_ssize_t size + try: + pyparser._on_status_complete() + size = len(pyparser._raw_name) + length + if size > pyparser._max_field_size: + raise LineTooLong( + 'Header name is too long', pyparser._max_field_size, size) + pyparser._on_header_field(at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_header_value(cparser.http_parser* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef Py_ssize_t size + try: + size = len(pyparser._raw_value) + length + if size > pyparser._max_field_size: + raise LineTooLong( + 'Header value is too long', pyparser._max_field_size, size) + pyparser._on_header_value(at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_status_complete() + pyparser._on_headers_complete() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT + return 2 + else: + return 0 + + +cdef int cb_on_body(cparser.http_parser* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef bytes body = at[:length] + try: + pyparser._payload.feed_data(body, length) + except BaseException as exc: + if pyparser._payload_exception is not None: + pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) + else: + pyparser._payload.set_exception(exc) + pyparser._payload_error = 1 + return -1 + else: + return 0 + + +cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._started = False + pyparser._on_message_complete() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + return 0 + + +cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_chunk_header() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + return 0 + + +cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_chunk_complete() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + return 0 + + +cdef parser_error_from_errno(cparser.http_errno errno): + cdef bytes desc = cparser.http_errno_description(errno) + + if errno in (cparser.HPE_CB_message_begin, + cparser.HPE_CB_url, + cparser.HPE_CB_header_field, + cparser.HPE_CB_header_value, + cparser.HPE_CB_headers_complete, + cparser.HPE_CB_body, + cparser.HPE_CB_message_complete, + cparser.HPE_CB_status, + cparser.HPE_CB_chunk_header, + cparser.HPE_CB_chunk_complete): + cls = BadHttpMessage + + elif errno == cparser.HPE_INVALID_STATUS: + cls = BadStatusLine + + elif errno == cparser.HPE_INVALID_METHOD: + cls = BadStatusLine + + elif errno == cparser.HPE_INVALID_URL: + cls = InvalidURLError + + else: + cls = BadHttpMessage + + return cls(desc.decode('latin-1')) + + +def parse_url(url): + cdef: + Py_buffer py_buf + char* buf_data + + PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) + try: + buf_data = py_buf.buf + return _parse_url(buf_data, py_buf.len) + finally: + PyBuffer_Release(&py_buf) + + +cdef _parse_url(char* buf_data, size_t length): + cdef: + cparser.http_parser_url* parsed + int res + str schema = None + str host = None + object port = None + str path = None + str query = None + str fragment = None + str user = None + str password = None + str userinfo = None + object result = None + int off + int ln + + parsed = \ + PyMem_Malloc(sizeof(cparser.http_parser_url)) + if parsed is NULL: + raise MemoryError() + cparser.http_parser_url_init(parsed) + try: + res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) + + if res == 0: + if parsed.field_set & (1 << cparser.UF_SCHEMA): + off = parsed.field_data[cparser.UF_SCHEMA].off + ln = parsed.field_data[cparser.UF_SCHEMA].len + schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + else: + schema = '' + + if parsed.field_set & (1 << cparser.UF_HOST): + off = parsed.field_data[cparser.UF_HOST].off + ln = parsed.field_data[cparser.UF_HOST].len + host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + else: + host = '' + + if parsed.field_set & (1 << cparser.UF_PORT): + port = parsed.port + + if parsed.field_set & (1 << cparser.UF_PATH): + off = parsed.field_data[cparser.UF_PATH].off + ln = parsed.field_data[cparser.UF_PATH].len + path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + else: + path = '' + + if parsed.field_set & (1 << cparser.UF_QUERY): + off = parsed.field_data[cparser.UF_QUERY].off + ln = parsed.field_data[cparser.UF_QUERY].len + query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + else: + query = '' + + if parsed.field_set & (1 << cparser.UF_FRAGMENT): + off = parsed.field_data[cparser.UF_FRAGMENT].off + ln = parsed.field_data[cparser.UF_FRAGMENT].len + fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + else: + fragment = '' + + if parsed.field_set & (1 << cparser.UF_USERINFO): + off = parsed.field_data[cparser.UF_USERINFO].off + ln = parsed.field_data[cparser.UF_USERINFO].len + userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') + + user, sep, password = userinfo.partition(':') + + return URL_build(scheme=schema, + user=user, password=password, host=host, port=port, + path=path, query=query, fragment=fragment) + else: + raise InvalidURLError("invalid url {!r}".format(buf_data)) + finally: + PyMem_Free(parsed) diff --git a/venv/lib/python3.7/site-packages/aiohttp/_http_writer.c b/venv/lib/python3.7/site-packages/aiohttp/_http_writer.c new file mode 100644 index 0000000..f407c35 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_http_writer.c @@ -0,0 +1,5616 @@ +/* Generated by Cython 0.29.2 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "depends": [], + "name": "aiohttp._http_writer", + "sources": [ + "aiohttp/_http_writer.pyx" + ] + }, + "module_name": "aiohttp._http_writer" +} +END: Cython Metadata */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.6+ or Python 3.3+. +#else +#define CYTHON_ABI "0_29_2" +#define CYTHON_HEX_VERSION 0x001D02F0 +#define CYTHON_FUTURE_DIVISION 0 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif + #ifndef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) + #endif + #ifndef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if CYTHON_USE_DICT_VERSIONS +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ + } +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 + #define PyMem_RawMalloc(n) PyMem_Malloc(n) + #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) + #define PyMem_RawFree(p) PyMem_Free(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; // PyThread_create_key reports success always +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif // TSS (Thread Specific Storage) API +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact + #define PyObject_Unicode PyObject_Str +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + + +#define __PYX_ERR(f_index, lineno, Ln_error) \ +{ \ + __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ +} + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__aiohttp___http_writer +#define __PYX_HAVE_API__aiohttp___http_writer +/* Early includes */ +#include +#include +#include +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m = NULL; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime = NULL; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + + +static const char *__pyx_f[] = { + "aiohttp/_http_writer.pyx", + "type.pxd", +}; + +/*--- Type declarations ---*/ +struct __pyx_t_7aiohttp_12_http_writer_Writer; + +/* "aiohttp/_http_writer.pyx":19 + * # ----------------- writer --------------------------- + * + * cdef struct Writer: # <<<<<<<<<<<<<< + * char *buf + * Py_ssize_t size + */ +struct __pyx_t_7aiohttp_12_http_writer_Writer { + char *buf; + Py_ssize_t size; + Py_ssize_t pos; +}; + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* WriteUnraisableException.proto */ +static void __Pyx_WriteUnraisable(const char *name, int clineno, + int lineno, const char *filename, + int full_traceback, int nogil); + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyObjectCall2Args.proto */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* ArgTypeTest.proto */ +#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ + ((likely((Py_TYPE(obj) == type) | (none_allowed && (obj == Py_None)))) ? 1 :\ + __Pyx__ArgTypeTest(obj, type, name, exact)) +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); + +/* GetTopmostException.proto */ +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); +#endif + +/* ReRaiseException.proto */ +static CYTHON_INLINE void __Pyx_ReraiseException(void); + +/* PyObjectCallNoArg.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); +#else +#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) +#endif + +/* RaiseTooManyValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +/* RaiseNeedMoreValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +/* IterFinish.proto */ +static CYTHON_INLINE int __Pyx_IterFinish(void); + +/* UnpackItemEndCheck.proto */ +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); + +/* GetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* SwapException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* SaveResetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +#else +#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) +#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) +#endif + +/* TypeImport.proto */ +#ifndef __PYX_HAVE_RT_ImportType_proto +#define __PYX_HAVE_RT_ImportType_proto +enum __Pyx_ImportType_CheckSize { + __Pyx_ImportType_CheckSize_Error = 0, + __Pyx_ImportType_CheckSize_Warn = 1, + __Pyx_ImportType_CheckSize_Ignore = 2 +}; +static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); +#endif + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* GetModuleGlobalName.proto */ +#if CYTHON_USE_DICT_VERSIONS +#define __Pyx_GetModuleGlobalName(var, name) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ + (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ + __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +#define __Pyx_GetModuleGlobalNameUncached(var, name) {\ + PY_UINT64_T __pyx_dict_version;\ + PyObject *__pyx_dict_cached_value;\ + (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); +#else +#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) +#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); +#endif + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* UnicodeAsUCS4.proto */ +static CYTHON_INLINE Py_UCS4 __Pyx_PyUnicode_AsPy_UCS4(PyObject*); + +/* ObjectAsUCS4.proto */ +#define __Pyx_PyObject_AsPy_UCS4(x)\ + (likely(PyUnicode_Check(x)) ? __Pyx_PyUnicode_AsPy_UCS4(x) : __Pyx__PyObject_AsPy_UCS4(x)) +static Py_UCS4 __Pyx__PyObject_AsPy_UCS4(PyObject*); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + + +/* Module declarations from 'libc.stdint' */ + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'cpython' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from 'cpython.exc' */ + +/* Module declarations from 'cpython.mem' */ + +/* Module declarations from 'cpython.bytes' */ + +/* Module declarations from 'aiohttp._http_writer' */ +static char __pyx_v_7aiohttp_12_http_writer_BUFFER[0x4000]; +static PyObject *__pyx_v_7aiohttp_12_http_writer__istr = 0; +static CYTHON_INLINE void __pyx_f_7aiohttp_12_http_writer__init_writer(struct __pyx_t_7aiohttp_12_http_writer_Writer *); /*proto*/ +static CYTHON_INLINE void __pyx_f_7aiohttp_12_http_writer__release_writer(struct __pyx_t_7aiohttp_12_http_writer_Writer *); /*proto*/ +static CYTHON_INLINE int __pyx_f_7aiohttp_12_http_writer__write_byte(struct __pyx_t_7aiohttp_12_http_writer_Writer *, uint8_t); /*proto*/ +static CYTHON_INLINE int __pyx_f_7aiohttp_12_http_writer__write_utf8(struct __pyx_t_7aiohttp_12_http_writer_Writer *, Py_UCS4); /*proto*/ +static CYTHON_INLINE int __pyx_f_7aiohttp_12_http_writer__write_str(struct __pyx_t_7aiohttp_12_http_writer_Writer *, PyObject *); /*proto*/ +static PyObject *__pyx_f_7aiohttp_12_http_writer_to_str(PyObject *); /*proto*/ +#define __Pyx_MODULE_NAME "aiohttp._http_writer" +extern int __pyx_module_is_main_aiohttp___http_writer; +int __pyx_module_is_main_aiohttp___http_writer = 0; + +/* Implementation of 'aiohttp._http_writer' */ +static PyObject *__pyx_builtin_TypeError; +static const char __pyx_k_key[] = "key"; +static const char __pyx_k_ret[] = "ret"; +static const char __pyx_k_val[] = "val"; +static const char __pyx_k_istr[] = "istr"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_items[] = "items"; +static const char __pyx_k_format[] = "format"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_writer[] = "writer"; +static const char __pyx_k_headers[] = "headers"; +static const char __pyx_k_TypeError[] = "TypeError"; +static const char __pyx_k_multidict[] = "multidict"; +static const char __pyx_k_status_line[] = "status_line"; +static const char __pyx_k_serialize_headers[] = "_serialize_headers"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_aiohttp__http_writer[] = "aiohttp._http_writer"; +static const char __pyx_k_aiohttp__http_writer_pyx[] = "aiohttp/_http_writer.pyx"; +static const char __pyx_k_Cannot_serialize_non_str_key_r[] = "Cannot serialize non-str key {!r}"; +static PyObject *__pyx_kp_s_Cannot_serialize_non_str_key_r; +static PyObject *__pyx_n_s_TypeError; +static PyObject *__pyx_n_s_aiohttp__http_writer; +static PyObject *__pyx_kp_s_aiohttp__http_writer_pyx; +static PyObject *__pyx_n_s_cline_in_traceback; +static PyObject *__pyx_n_s_format; +static PyObject *__pyx_n_s_headers; +static PyObject *__pyx_n_s_import; +static PyObject *__pyx_n_s_istr; +static PyObject *__pyx_n_s_items; +static PyObject *__pyx_n_s_key; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_multidict; +static PyObject *__pyx_n_s_name; +static PyObject *__pyx_n_s_ret; +static PyObject *__pyx_n_s_serialize_headers; +static PyObject *__pyx_n_s_status_line; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_n_s_val; +static PyObject *__pyx_n_s_writer; +static PyObject *__pyx_pf_7aiohttp_12_http_writer__serialize_headers(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_status_line, PyObject *__pyx_v_headers); /* proto */ +static PyObject *__pyx_tuple_; +static PyObject *__pyx_codeobj__2; +/* Late includes */ + +/* "aiohttp/_http_writer.pyx":25 + * + * + * cdef inline void _init_writer(Writer* writer): # <<<<<<<<<<<<<< + * writer.buf = &BUFFER[0] + * writer.size = BUF_SIZE + */ + +static CYTHON_INLINE void __pyx_f_7aiohttp_12_http_writer__init_writer(struct __pyx_t_7aiohttp_12_http_writer_Writer *__pyx_v_writer) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_init_writer", 0); + + /* "aiohttp/_http_writer.pyx":26 + * + * cdef inline void _init_writer(Writer* writer): + * writer.buf = &BUFFER[0] # <<<<<<<<<<<<<< + * writer.size = BUF_SIZE + * writer.pos = 0 + */ + __pyx_v_writer->buf = (&(__pyx_v_7aiohttp_12_http_writer_BUFFER[0])); + + /* "aiohttp/_http_writer.pyx":27 + * cdef inline void _init_writer(Writer* writer): + * writer.buf = &BUFFER[0] + * writer.size = BUF_SIZE # <<<<<<<<<<<<<< + * writer.pos = 0 + * + */ + __pyx_v_writer->size = 0x4000; + + /* "aiohttp/_http_writer.pyx":28 + * writer.buf = &BUFFER[0] + * writer.size = BUF_SIZE + * writer.pos = 0 # <<<<<<<<<<<<<< + * + * + */ + __pyx_v_writer->pos = 0; + + /* "aiohttp/_http_writer.pyx":25 + * + * + * cdef inline void _init_writer(Writer* writer): # <<<<<<<<<<<<<< + * writer.buf = &BUFFER[0] + * writer.size = BUF_SIZE + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +/* "aiohttp/_http_writer.pyx":31 + * + * + * cdef inline void _release_writer(Writer* writer): # <<<<<<<<<<<<<< + * if writer.buf != BUFFER: + * PyMem_Free(writer.buf) + */ + +static CYTHON_INLINE void __pyx_f_7aiohttp_12_http_writer__release_writer(struct __pyx_t_7aiohttp_12_http_writer_Writer *__pyx_v_writer) { + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("_release_writer", 0); + + /* "aiohttp/_http_writer.pyx":32 + * + * cdef inline void _release_writer(Writer* writer): + * if writer.buf != BUFFER: # <<<<<<<<<<<<<< + * PyMem_Free(writer.buf) + * + */ + __pyx_t_1 = ((__pyx_v_writer->buf != __pyx_v_7aiohttp_12_http_writer_BUFFER) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_writer.pyx":33 + * cdef inline void _release_writer(Writer* writer): + * if writer.buf != BUFFER: + * PyMem_Free(writer.buf) # <<<<<<<<<<<<<< + * + * + */ + PyMem_Free(__pyx_v_writer->buf); + + /* "aiohttp/_http_writer.pyx":32 + * + * cdef inline void _release_writer(Writer* writer): + * if writer.buf != BUFFER: # <<<<<<<<<<<<<< + * PyMem_Free(writer.buf) + * + */ + } + + /* "aiohttp/_http_writer.pyx":31 + * + * + * cdef inline void _release_writer(Writer* writer): # <<<<<<<<<<<<<< + * if writer.buf != BUFFER: + * PyMem_Free(writer.buf) + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +/* "aiohttp/_http_writer.pyx":36 + * + * + * cdef inline int _write_byte(Writer* writer, uint8_t ch): # <<<<<<<<<<<<<< + * cdef char * buf + * cdef Py_ssize_t size + */ + +static CYTHON_INLINE int __pyx_f_7aiohttp_12_http_writer__write_byte(struct __pyx_t_7aiohttp_12_http_writer_Writer *__pyx_v_writer, uint8_t __pyx_v_ch) { + char *__pyx_v_buf; + Py_ssize_t __pyx_v_size; + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2; + __Pyx_RefNannySetupContext("_write_byte", 0); + + /* "aiohttp/_http_writer.pyx":40 + * cdef Py_ssize_t size + * + * if writer.pos == writer.size: # <<<<<<<<<<<<<< + * # reallocate + * size = writer.size + BUF_SIZE + */ + __pyx_t_1 = ((__pyx_v_writer->pos == __pyx_v_writer->size) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_writer.pyx":42 + * if writer.pos == writer.size: + * # reallocate + * size = writer.size + BUF_SIZE # <<<<<<<<<<<<<< + * if writer.buf == BUFFER: + * buf = PyMem_Malloc(size) + */ + __pyx_v_size = (__pyx_v_writer->size + 0x4000); + + /* "aiohttp/_http_writer.pyx":43 + * # reallocate + * size = writer.size + BUF_SIZE + * if writer.buf == BUFFER: # <<<<<<<<<<<<<< + * buf = PyMem_Malloc(size) + * if buf == NULL: + */ + __pyx_t_1 = ((__pyx_v_writer->buf == __pyx_v_7aiohttp_12_http_writer_BUFFER) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_writer.pyx":44 + * size = writer.size + BUF_SIZE + * if writer.buf == BUFFER: + * buf = PyMem_Malloc(size) # <<<<<<<<<<<<<< + * if buf == NULL: + * PyErr_NoMemory() + */ + __pyx_v_buf = ((char *)PyMem_Malloc(__pyx_v_size)); + + /* "aiohttp/_http_writer.pyx":45 + * if writer.buf == BUFFER: + * buf = PyMem_Malloc(size) + * if buf == NULL: # <<<<<<<<<<<<<< + * PyErr_NoMemory() + * return -1 + */ + __pyx_t_1 = ((__pyx_v_buf == NULL) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_writer.pyx":46 + * buf = PyMem_Malloc(size) + * if buf == NULL: + * PyErr_NoMemory() # <<<<<<<<<<<<<< + * return -1 + * memcpy(buf, writer.buf, writer.size) + */ + __pyx_t_2 = PyErr_NoMemory(); if (unlikely(__pyx_t_2 == ((PyObject *)NULL))) __PYX_ERR(0, 46, __pyx_L1_error) + + /* "aiohttp/_http_writer.pyx":47 + * if buf == NULL: + * PyErr_NoMemory() + * return -1 # <<<<<<<<<<<<<< + * memcpy(buf, writer.buf, writer.size) + * else: + */ + __pyx_r = -1; + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":45 + * if writer.buf == BUFFER: + * buf = PyMem_Malloc(size) + * if buf == NULL: # <<<<<<<<<<<<<< + * PyErr_NoMemory() + * return -1 + */ + } + + /* "aiohttp/_http_writer.pyx":48 + * PyErr_NoMemory() + * return -1 + * memcpy(buf, writer.buf, writer.size) # <<<<<<<<<<<<<< + * else: + * buf = PyMem_Realloc(writer.buf, size) + */ + (void)(memcpy(__pyx_v_buf, __pyx_v_writer->buf, __pyx_v_writer->size)); + + /* "aiohttp/_http_writer.pyx":43 + * # reallocate + * size = writer.size + BUF_SIZE + * if writer.buf == BUFFER: # <<<<<<<<<<<<<< + * buf = PyMem_Malloc(size) + * if buf == NULL: + */ + goto __pyx_L4; + } + + /* "aiohttp/_http_writer.pyx":50 + * memcpy(buf, writer.buf, writer.size) + * else: + * buf = PyMem_Realloc(writer.buf, size) # <<<<<<<<<<<<<< + * if buf == NULL: + * PyErr_NoMemory() + */ + /*else*/ { + __pyx_v_buf = ((char *)PyMem_Realloc(__pyx_v_writer->buf, __pyx_v_size)); + + /* "aiohttp/_http_writer.pyx":51 + * else: + * buf = PyMem_Realloc(writer.buf, size) + * if buf == NULL: # <<<<<<<<<<<<<< + * PyErr_NoMemory() + * return -1 + */ + __pyx_t_1 = ((__pyx_v_buf == NULL) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_writer.pyx":52 + * buf = PyMem_Realloc(writer.buf, size) + * if buf == NULL: + * PyErr_NoMemory() # <<<<<<<<<<<<<< + * return -1 + * writer.buf = buf + */ + __pyx_t_2 = PyErr_NoMemory(); if (unlikely(__pyx_t_2 == ((PyObject *)NULL))) __PYX_ERR(0, 52, __pyx_L1_error) + + /* "aiohttp/_http_writer.pyx":53 + * if buf == NULL: + * PyErr_NoMemory() + * return -1 # <<<<<<<<<<<<<< + * writer.buf = buf + * writer.size = size + */ + __pyx_r = -1; + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":51 + * else: + * buf = PyMem_Realloc(writer.buf, size) + * if buf == NULL: # <<<<<<<<<<<<<< + * PyErr_NoMemory() + * return -1 + */ + } + } + __pyx_L4:; + + /* "aiohttp/_http_writer.pyx":54 + * PyErr_NoMemory() + * return -1 + * writer.buf = buf # <<<<<<<<<<<<<< + * writer.size = size + * writer.buf[writer.pos] = ch + */ + __pyx_v_writer->buf = __pyx_v_buf; + + /* "aiohttp/_http_writer.pyx":55 + * return -1 + * writer.buf = buf + * writer.size = size # <<<<<<<<<<<<<< + * writer.buf[writer.pos] = ch + * writer.pos += 1 + */ + __pyx_v_writer->size = __pyx_v_size; + + /* "aiohttp/_http_writer.pyx":40 + * cdef Py_ssize_t size + * + * if writer.pos == writer.size: # <<<<<<<<<<<<<< + * # reallocate + * size = writer.size + BUF_SIZE + */ + } + + /* "aiohttp/_http_writer.pyx":56 + * writer.buf = buf + * writer.size = size + * writer.buf[writer.pos] = ch # <<<<<<<<<<<<<< + * writer.pos += 1 + * return 0 + */ + (__pyx_v_writer->buf[__pyx_v_writer->pos]) = ((char)__pyx_v_ch); + + /* "aiohttp/_http_writer.pyx":57 + * writer.size = size + * writer.buf[writer.pos] = ch + * writer.pos += 1 # <<<<<<<<<<<<<< + * return 0 + * + */ + __pyx_v_writer->pos = (__pyx_v_writer->pos + 1); + + /* "aiohttp/_http_writer.pyx":58 + * writer.buf[writer.pos] = ch + * writer.pos += 1 + * return 0 # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = 0; + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":36 + * + * + * cdef inline int _write_byte(Writer* writer, uint8_t ch): # <<<<<<<<<<<<<< + * cdef char * buf + * cdef Py_ssize_t size + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_WriteUnraisable("aiohttp._http_writer._write_byte", __pyx_clineno, __pyx_lineno, __pyx_filename, 1, 0); + __pyx_r = 0; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_writer.pyx":61 + * + * + * cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): # <<<<<<<<<<<<<< + * cdef uint64_t utf = symbol + * + */ + +static CYTHON_INLINE int __pyx_f_7aiohttp_12_http_writer__write_utf8(struct __pyx_t_7aiohttp_12_http_writer_Writer *__pyx_v_writer, Py_UCS4 __pyx_v_symbol) { + uint64_t __pyx_v_utf; + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + __Pyx_RefNannySetupContext("_write_utf8", 0); + + /* "aiohttp/_http_writer.pyx":62 + * + * cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): + * cdef uint64_t utf = symbol # <<<<<<<<<<<<<< + * + * if utf < 0x80: + */ + __pyx_v_utf = ((uint64_t)__pyx_v_symbol); + + /* "aiohttp/_http_writer.pyx":64 + * cdef uint64_t utf = symbol + * + * if utf < 0x80: # <<<<<<<<<<<<<< + * return _write_byte(writer, utf) + * elif utf < 0x800: + */ + __pyx_t_1 = ((__pyx_v_utf < 0x80) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_writer.pyx":65 + * + * if utf < 0x80: + * return _write_byte(writer, utf) # <<<<<<<<<<<<<< + * elif utf < 0x800: + * if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: + */ + __pyx_r = __pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)__pyx_v_utf)); + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":64 + * cdef uint64_t utf = symbol + * + * if utf < 0x80: # <<<<<<<<<<<<<< + * return _write_byte(writer, utf) + * elif utf < 0x800: + */ + } + + /* "aiohttp/_http_writer.pyx":66 + * if utf < 0x80: + * return _write_byte(writer, utf) + * elif utf < 0x800: # <<<<<<<<<<<<<< + * if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: + * return -1 + */ + __pyx_t_1 = ((__pyx_v_utf < 0x800) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_writer.pyx":67 + * return _write_byte(writer, utf) + * elif utf < 0x800: + * if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: # <<<<<<<<<<<<<< + * return -1 + * return _write_byte(writer, (0x80 | (utf & 0x3f))) + */ + __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0xc0 | (__pyx_v_utf >> 6)))) < 0) != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_writer.pyx":68 + * elif utf < 0x800: + * if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: + * return -1 # <<<<<<<<<<<<<< + * return _write_byte(writer, (0x80 | (utf & 0x3f))) + * elif 0xD800 <= utf <= 0xDFFF: + */ + __pyx_r = -1; + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":67 + * return _write_byte(writer, utf) + * elif utf < 0x800: + * if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: # <<<<<<<<<<<<<< + * return -1 + * return _write_byte(writer, (0x80 | (utf & 0x3f))) + */ + } + + /* "aiohttp/_http_writer.pyx":69 + * if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: + * return -1 + * return _write_byte(writer, (0x80 | (utf & 0x3f))) # <<<<<<<<<<<<<< + * elif 0xD800 <= utf <= 0xDFFF: + * # surogate pair, ignored + */ + __pyx_r = __pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0x80 | (__pyx_v_utf & 0x3f)))); + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":66 + * if utf < 0x80: + * return _write_byte(writer, utf) + * elif utf < 0x800: # <<<<<<<<<<<<<< + * if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: + * return -1 + */ + } + + /* "aiohttp/_http_writer.pyx":70 + * return -1 + * return _write_byte(writer, (0x80 | (utf & 0x3f))) + * elif 0xD800 <= utf <= 0xDFFF: # <<<<<<<<<<<<<< + * # surogate pair, ignored + * return 0 + */ + __pyx_t_1 = (0xD800 <= __pyx_v_utf); + if (__pyx_t_1) { + __pyx_t_1 = (__pyx_v_utf <= 0xDFFF); + } + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "aiohttp/_http_writer.pyx":72 + * elif 0xD800 <= utf <= 0xDFFF: + * # surogate pair, ignored + * return 0 # <<<<<<<<<<<<<< + * elif utf < 0x10000: + * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: + */ + __pyx_r = 0; + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":70 + * return -1 + * return _write_byte(writer, (0x80 | (utf & 0x3f))) + * elif 0xD800 <= utf <= 0xDFFF: # <<<<<<<<<<<<<< + * # surogate pair, ignored + * return 0 + */ + } + + /* "aiohttp/_http_writer.pyx":73 + * # surogate pair, ignored + * return 0 + * elif utf < 0x10000: # <<<<<<<<<<<<<< + * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: + * return -1 + */ + __pyx_t_2 = ((__pyx_v_utf < 0x10000) != 0); + if (__pyx_t_2) { + + /* "aiohttp/_http_writer.pyx":74 + * return 0 + * elif utf < 0x10000: + * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: # <<<<<<<<<<<<<< + * return -1 + * if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: + */ + __pyx_t_2 = ((__pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0xe0 | (__pyx_v_utf >> 12)))) < 0) != 0); + if (__pyx_t_2) { + + /* "aiohttp/_http_writer.pyx":75 + * elif utf < 0x10000: + * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: + * return -1 # <<<<<<<<<<<<<< + * if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: + * return -1 + */ + __pyx_r = -1; + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":74 + * return 0 + * elif utf < 0x10000: + * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: # <<<<<<<<<<<<<< + * return -1 + * if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: + */ + } + + /* "aiohttp/_http_writer.pyx":76 + * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: + * return -1 + * if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: # <<<<<<<<<<<<<< + * return -1 + * return _write_byte(writer, (0x80 | (utf & 0x3f))) + */ + __pyx_t_2 = ((__pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0x80 | ((__pyx_v_utf >> 6) & 0x3f)))) < 0) != 0); + if (__pyx_t_2) { + + /* "aiohttp/_http_writer.pyx":77 + * return -1 + * if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: + * return -1 # <<<<<<<<<<<<<< + * return _write_byte(writer, (0x80 | (utf & 0x3f))) + * elif utf > 0x10FFFF: + */ + __pyx_r = -1; + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":76 + * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: + * return -1 + * if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: # <<<<<<<<<<<<<< + * return -1 + * return _write_byte(writer, (0x80 | (utf & 0x3f))) + */ + } + + /* "aiohttp/_http_writer.pyx":78 + * if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: + * return -1 + * return _write_byte(writer, (0x80 | (utf & 0x3f))) # <<<<<<<<<<<<<< + * elif utf > 0x10FFFF: + * # symbol is too large + */ + __pyx_r = __pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0x80 | (__pyx_v_utf & 0x3f)))); + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":73 + * # surogate pair, ignored + * return 0 + * elif utf < 0x10000: # <<<<<<<<<<<<<< + * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: + * return -1 + */ + } + + /* "aiohttp/_http_writer.pyx":79 + * return -1 + * return _write_byte(writer, (0x80 | (utf & 0x3f))) + * elif utf > 0x10FFFF: # <<<<<<<<<<<<<< + * # symbol is too large + * return 0 + */ + __pyx_t_2 = ((__pyx_v_utf > 0x10FFFF) != 0); + if (__pyx_t_2) { + + /* "aiohttp/_http_writer.pyx":81 + * elif utf > 0x10FFFF: + * # symbol is too large + * return 0 # <<<<<<<<<<<<<< + * else: + * if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: + */ + __pyx_r = 0; + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":79 + * return -1 + * return _write_byte(writer, (0x80 | (utf & 0x3f))) + * elif utf > 0x10FFFF: # <<<<<<<<<<<<<< + * # symbol is too large + * return 0 + */ + } + + /* "aiohttp/_http_writer.pyx":83 + * return 0 + * else: + * if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: # <<<<<<<<<<<<<< + * return -1 + * if _write_byte(writer, + */ + /*else*/ { + __pyx_t_2 = ((__pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0xf0 | (__pyx_v_utf >> 18)))) < 0) != 0); + if (__pyx_t_2) { + + /* "aiohttp/_http_writer.pyx":84 + * else: + * if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: + * return -1 # <<<<<<<<<<<<<< + * if _write_byte(writer, + * (0x80 | ((utf >> 12) & 0x3f))) < 0: + */ + __pyx_r = -1; + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":83 + * return 0 + * else: + * if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: # <<<<<<<<<<<<<< + * return -1 + * if _write_byte(writer, + */ + } + + /* "aiohttp/_http_writer.pyx":86 + * return -1 + * if _write_byte(writer, + * (0x80 | ((utf >> 12) & 0x3f))) < 0: # <<<<<<<<<<<<<< + * return -1 + * if _write_byte(writer, + */ + __pyx_t_2 = ((__pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0x80 | ((__pyx_v_utf >> 12) & 0x3f)))) < 0) != 0); + + /* "aiohttp/_http_writer.pyx":85 + * if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: + * return -1 + * if _write_byte(writer, # <<<<<<<<<<<<<< + * (0x80 | ((utf >> 12) & 0x3f))) < 0: + * return -1 + */ + if (__pyx_t_2) { + + /* "aiohttp/_http_writer.pyx":87 + * if _write_byte(writer, + * (0x80 | ((utf >> 12) & 0x3f))) < 0: + * return -1 # <<<<<<<<<<<<<< + * if _write_byte(writer, + * (0x80 | ((utf >> 6) & 0x3f))) < 0: + */ + __pyx_r = -1; + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":85 + * if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: + * return -1 + * if _write_byte(writer, # <<<<<<<<<<<<<< + * (0x80 | ((utf >> 12) & 0x3f))) < 0: + * return -1 + */ + } + + /* "aiohttp/_http_writer.pyx":89 + * return -1 + * if _write_byte(writer, + * (0x80 | ((utf >> 6) & 0x3f))) < 0: # <<<<<<<<<<<<<< + * return -1 + * return _write_byte(writer, (0x80 | (utf & 0x3f))) + */ + __pyx_t_2 = ((__pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0x80 | ((__pyx_v_utf >> 6) & 0x3f)))) < 0) != 0); + + /* "aiohttp/_http_writer.pyx":88 + * (0x80 | ((utf >> 12) & 0x3f))) < 0: + * return -1 + * if _write_byte(writer, # <<<<<<<<<<<<<< + * (0x80 | ((utf >> 6) & 0x3f))) < 0: + * return -1 + */ + if (__pyx_t_2) { + + /* "aiohttp/_http_writer.pyx":90 + * if _write_byte(writer, + * (0x80 | ((utf >> 6) & 0x3f))) < 0: + * return -1 # <<<<<<<<<<<<<< + * return _write_byte(writer, (0x80 | (utf & 0x3f))) + * + */ + __pyx_r = -1; + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":88 + * (0x80 | ((utf >> 12) & 0x3f))) < 0: + * return -1 + * if _write_byte(writer, # <<<<<<<<<<<<<< + * (0x80 | ((utf >> 6) & 0x3f))) < 0: + * return -1 + */ + } + + /* "aiohttp/_http_writer.pyx":91 + * (0x80 | ((utf >> 6) & 0x3f))) < 0: + * return -1 + * return _write_byte(writer, (0x80 | (utf & 0x3f))) # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = __pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0x80 | (__pyx_v_utf & 0x3f)))); + goto __pyx_L0; + } + + /* "aiohttp/_http_writer.pyx":61 + * + * + * cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): # <<<<<<<<<<<<<< + * cdef uint64_t utf = symbol + * + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_writer.pyx":94 + * + * + * cdef inline int _write_str(Writer* writer, str s): # <<<<<<<<<<<<<< + * cdef Py_UCS4 ch + * for ch in s: + */ + +static CYTHON_INLINE int __pyx_f_7aiohttp_12_http_writer__write_str(struct __pyx_t_7aiohttp_12_http_writer_Writer *__pyx_v_writer, PyObject *__pyx_v_s) { + Py_UCS4 __pyx_v_ch; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *(*__pyx_t_2)(PyObject *); + PyObject *__pyx_t_3 = NULL; + Py_UCS4 __pyx_t_4; + int __pyx_t_5; + __Pyx_RefNannySetupContext("_write_str", 0); + + /* "aiohttp/_http_writer.pyx":96 + * cdef inline int _write_str(Writer* writer, str s): + * cdef Py_UCS4 ch + * for ch in s: # <<<<<<<<<<<<<< + * if _write_utf8(writer, ch) < 0: + * return -1 + */ + __pyx_t_1 = PyObject_GetIter(__pyx_v_s); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 96, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 96, __pyx_L1_error) + for (;;) { + { + __pyx_t_3 = __pyx_t_2(__pyx_t_1); + if (unlikely(!__pyx_t_3)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 96, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_3); + } + __pyx_t_4 = __Pyx_PyObject_AsPy_UCS4(__pyx_t_3); if (unlikely((__pyx_t_4 == (Py_UCS4)-1) && PyErr_Occurred())) __PYX_ERR(0, 96, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_ch = __pyx_t_4; + + /* "aiohttp/_http_writer.pyx":97 + * cdef Py_UCS4 ch + * for ch in s: + * if _write_utf8(writer, ch) < 0: # <<<<<<<<<<<<<< + * return -1 + * + */ + __pyx_t_5 = ((__pyx_f_7aiohttp_12_http_writer__write_utf8(__pyx_v_writer, __pyx_v_ch) < 0) != 0); + if (__pyx_t_5) { + + /* "aiohttp/_http_writer.pyx":98 + * for ch in s: + * if _write_utf8(writer, ch) < 0: + * return -1 # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = -1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":97 + * cdef Py_UCS4 ch + * for ch in s: + * if _write_utf8(writer, ch) < 0: # <<<<<<<<<<<<<< + * return -1 + * + */ + } + + /* "aiohttp/_http_writer.pyx":96 + * cdef inline int _write_str(Writer* writer, str s): + * cdef Py_UCS4 ch + * for ch in s: # <<<<<<<<<<<<<< + * if _write_utf8(writer, ch) < 0: + * return -1 + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_http_writer.pyx":94 + * + * + * cdef inline int _write_str(Writer* writer, str s): # <<<<<<<<<<<<<< + * cdef Py_UCS4 ch + * for ch in s: + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_WriteUnraisable("aiohttp._http_writer._write_str", __pyx_clineno, __pyx_lineno, __pyx_filename, 1, 0); + __pyx_r = 0; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_writer.pyx":103 + * # --------------- _serialize_headers ---------------------- + * + * cdef str to_str(object s): # <<<<<<<<<<<<<< + * typ = type(s) + * if typ is str: + */ + +static PyObject *__pyx_f_7aiohttp_12_http_writer_to_str(PyObject *__pyx_v_s) { + PyTypeObject *__pyx_v_typ = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + __Pyx_RefNannySetupContext("to_str", 0); + + /* "aiohttp/_http_writer.pyx":104 + * + * cdef str to_str(object s): + * typ = type(s) # <<<<<<<<<<<<<< + * if typ is str: + * return s + */ + __Pyx_INCREF(((PyObject *)Py_TYPE(__pyx_v_s))); + __pyx_v_typ = ((PyTypeObject*)((PyObject *)Py_TYPE(__pyx_v_s))); + + /* "aiohttp/_http_writer.pyx":105 + * cdef str to_str(object s): + * typ = type(s) + * if typ is str: # <<<<<<<<<<<<<< + * return s + * elif typ is _istr: + */ + __pyx_t_1 = (__pyx_v_typ == (&PyString_Type)); + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "aiohttp/_http_writer.pyx":106 + * typ = type(s) + * if typ is str: + * return s # <<<<<<<<<<<<<< + * elif typ is _istr: + * return PyObject_Str(s) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject*)__pyx_v_s)); + __pyx_r = ((PyObject*)__pyx_v_s); + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":105 + * cdef str to_str(object s): + * typ = type(s) + * if typ is str: # <<<<<<<<<<<<<< + * return s + * elif typ is _istr: + */ + } + + /* "aiohttp/_http_writer.pyx":107 + * if typ is str: + * return s + * elif typ is _istr: # <<<<<<<<<<<<<< + * return PyObject_Str(s) + * elif not isinstance(s, str): + */ + __pyx_t_2 = (__pyx_v_typ == ((PyTypeObject*)__pyx_v_7aiohttp_12_http_writer__istr)); + __pyx_t_1 = (__pyx_t_2 != 0); + if (__pyx_t_1) { + + /* "aiohttp/_http_writer.pyx":108 + * return s + * elif typ is _istr: + * return PyObject_Str(s) # <<<<<<<<<<<<<< + * elif not isinstance(s, str): + * raise TypeError("Cannot serialize non-str key {!r}".format(s)) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = PyObject_Str(__pyx_v_s); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 108, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(PyString_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 108, __pyx_L1_error) + __pyx_r = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "aiohttp/_http_writer.pyx":107 + * if typ is str: + * return s + * elif typ is _istr: # <<<<<<<<<<<<<< + * return PyObject_Str(s) + * elif not isinstance(s, str): + */ + } + + /* "aiohttp/_http_writer.pyx":109 + * elif typ is _istr: + * return PyObject_Str(s) + * elif not isinstance(s, str): # <<<<<<<<<<<<<< + * raise TypeError("Cannot serialize non-str key {!r}".format(s)) + * else: + */ + __pyx_t_1 = PyString_Check(__pyx_v_s); + __pyx_t_2 = ((!(__pyx_t_1 != 0)) != 0); + if (unlikely(__pyx_t_2)) { + + /* "aiohttp/_http_writer.pyx":110 + * return PyObject_Str(s) + * elif not isinstance(s, str): + * raise TypeError("Cannot serialize non-str key {!r}".format(s)) # <<<<<<<<<<<<<< + * else: + * return str(s) + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s_Cannot_serialize_non_str_key_r, __pyx_n_s_format); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_5, __pyx_v_s) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_v_s); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_builtin_TypeError, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(0, 110, __pyx_L1_error) + + /* "aiohttp/_http_writer.pyx":109 + * elif typ is _istr: + * return PyObject_Str(s) + * elif not isinstance(s, str): # <<<<<<<<<<<<<< + * raise TypeError("Cannot serialize non-str key {!r}".format(s)) + * else: + */ + } + + /* "aiohttp/_http_writer.pyx":112 + * raise TypeError("Cannot serialize non-str key {!r}".format(s)) + * else: + * return str(s) # <<<<<<<<<<<<<< + * + * + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyString_Type)), __pyx_v_s); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 112, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + if (!(likely(PyString_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_4)->tp_name), 0))) __PYX_ERR(0, 112, __pyx_L1_error) + __pyx_r = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + goto __pyx_L0; + } + + /* "aiohttp/_http_writer.pyx":103 + * # --------------- _serialize_headers ---------------------- + * + * cdef str to_str(object s): # <<<<<<<<<<<<<< + * typ = type(s) + * if typ is str: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("aiohttp._http_writer.to_str", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_typ); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "aiohttp/_http_writer.pyx":115 + * + * + * def _serialize_headers(str status_line, headers): # <<<<<<<<<<<<<< + * cdef Writer writer + * cdef object key + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_12_http_writer_1_serialize_headers(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7aiohttp_12_http_writer_1_serialize_headers = {"_serialize_headers", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_12_http_writer_1_serialize_headers, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_7aiohttp_12_http_writer_1_serialize_headers(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_status_line = 0; + PyObject *__pyx_v_headers = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_serialize_headers (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_status_line,&__pyx_n_s_headers,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_status_line)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_headers)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("_serialize_headers", 1, 2, 2, 1); __PYX_ERR(0, 115, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_serialize_headers") < 0)) __PYX_ERR(0, 115, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_status_line = ((PyObject*)values[0]); + __pyx_v_headers = values[1]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("_serialize_headers", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 115, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._http_writer._serialize_headers", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_status_line), (&PyString_Type), 1, "status_line", 1))) __PYX_ERR(0, 115, __pyx_L1_error) + __pyx_r = __pyx_pf_7aiohttp_12_http_writer__serialize_headers(__pyx_self, __pyx_v_status_line, __pyx_v_headers); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_12_http_writer__serialize_headers(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_status_line, PyObject *__pyx_v_headers) { + struct __pyx_t_7aiohttp_12_http_writer_Writer __pyx_v_writer; + PyObject *__pyx_v_key = 0; + PyObject *__pyx_v_val = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + Py_ssize_t __pyx_t_5; + PyObject *(*__pyx_t_6)(PyObject *); + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *(*__pyx_t_9)(PyObject *); + int __pyx_t_10; + int __pyx_t_11; + char const *__pyx_t_12; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + PyObject *__pyx_t_18 = NULL; + __Pyx_RefNannySetupContext("_serialize_headers", 0); + + /* "aiohttp/_http_writer.pyx":121 + * cdef bytes ret + * + * _init_writer(&writer) # <<<<<<<<<<<<<< + * + * try: + */ + __pyx_f_7aiohttp_12_http_writer__init_writer((&__pyx_v_writer)); + + /* "aiohttp/_http_writer.pyx":123 + * _init_writer(&writer) + * + * try: # <<<<<<<<<<<<<< + * if _write_str(&writer, status_line) < 0: + * raise + */ + /*try:*/ { + + /* "aiohttp/_http_writer.pyx":124 + * + * try: + * if _write_str(&writer, status_line) < 0: # <<<<<<<<<<<<<< + * raise + * if _write_byte(&writer, '\r') < 0: + */ + __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_str((&__pyx_v_writer), __pyx_v_status_line) < 0) != 0); + if (unlikely(__pyx_t_1)) { + + /* "aiohttp/_http_writer.pyx":125 + * try: + * if _write_str(&writer, status_line) < 0: + * raise # <<<<<<<<<<<<<< + * if _write_byte(&writer, '\r') < 0: + * raise + */ + __Pyx_ReraiseException(); __PYX_ERR(0, 125, __pyx_L4_error) + + /* "aiohttp/_http_writer.pyx":124 + * + * try: + * if _write_str(&writer, status_line) < 0: # <<<<<<<<<<<<<< + * raise + * if _write_byte(&writer, '\r') < 0: + */ + } + + /* "aiohttp/_http_writer.pyx":126 + * if _write_str(&writer, status_line) < 0: + * raise + * if _write_byte(&writer, '\r') < 0: # <<<<<<<<<<<<<< + * raise + * if _write_byte(&writer, '\n') < 0: + */ + __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), '\r') < 0) != 0); + if (unlikely(__pyx_t_1)) { + + /* "aiohttp/_http_writer.pyx":127 + * raise + * if _write_byte(&writer, '\r') < 0: + * raise # <<<<<<<<<<<<<< + * if _write_byte(&writer, '\n') < 0: + * raise + */ + __Pyx_ReraiseException(); __PYX_ERR(0, 127, __pyx_L4_error) + + /* "aiohttp/_http_writer.pyx":126 + * if _write_str(&writer, status_line) < 0: + * raise + * if _write_byte(&writer, '\r') < 0: # <<<<<<<<<<<<<< + * raise + * if _write_byte(&writer, '\n') < 0: + */ + } + + /* "aiohttp/_http_writer.pyx":128 + * if _write_byte(&writer, '\r') < 0: + * raise + * if _write_byte(&writer, '\n') < 0: # <<<<<<<<<<<<<< + * raise + * + */ + __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), '\n') < 0) != 0); + if (unlikely(__pyx_t_1)) { + + /* "aiohttp/_http_writer.pyx":129 + * raise + * if _write_byte(&writer, '\n') < 0: + * raise # <<<<<<<<<<<<<< + * + * for key, val in headers.items(): + */ + __Pyx_ReraiseException(); __PYX_ERR(0, 129, __pyx_L4_error) + + /* "aiohttp/_http_writer.pyx":128 + * if _write_byte(&writer, '\r') < 0: + * raise + * if _write_byte(&writer, '\n') < 0: # <<<<<<<<<<<<<< + * raise + * + */ + } + + /* "aiohttp/_http_writer.pyx":131 + * raise + * + * for key, val in headers.items(): # <<<<<<<<<<<<<< + * if _write_str(&writer, to_str(key)) < 0: + * raise + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_headers, __pyx_n_s_items); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 131, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_4) : __Pyx_PyObject_CallNoArg(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 131, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (likely(PyList_CheckExact(__pyx_t_2)) || PyTuple_CheckExact(__pyx_t_2)) { + __pyx_t_3 = __pyx_t_2; __Pyx_INCREF(__pyx_t_3); __pyx_t_5 = 0; + __pyx_t_6 = NULL; + } else { + __pyx_t_5 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 131, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 131, __pyx_L4_error) + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + for (;;) { + if (likely(!__pyx_t_6)) { + if (likely(PyList_CheckExact(__pyx_t_3))) { + if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_3)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_2 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_5); __Pyx_INCREF(__pyx_t_2); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 131, __pyx_L4_error) + #else + __pyx_t_2 = PySequence_ITEM(__pyx_t_3, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 131, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_2); + #endif + } else { + if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_3)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_2 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_5); __Pyx_INCREF(__pyx_t_2); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 131, __pyx_L4_error) + #else + __pyx_t_2 = PySequence_ITEM(__pyx_t_3, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 131, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_2); + #endif + } + } else { + __pyx_t_2 = __pyx_t_6(__pyx_t_3); + if (unlikely(!__pyx_t_2)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 131, __pyx_L4_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_2); + } + if ((likely(PyTuple_CheckExact(__pyx_t_2))) || (PyList_CheckExact(__pyx_t_2))) { + PyObject* sequence = __pyx_t_2; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 131, __pyx_L4_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_7 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_4 = PyList_GET_ITEM(sequence, 0); + __pyx_t_7 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_7); + #else + __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 131, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 131, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + #endif + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_8 = PyObject_GetIter(__pyx_t_2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 131, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_9 = Py_TYPE(__pyx_t_8)->tp_iternext; + index = 0; __pyx_t_4 = __pyx_t_9(__pyx_t_8); if (unlikely(!__pyx_t_4)) goto __pyx_L11_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + index = 1; __pyx_t_7 = __pyx_t_9(__pyx_t_8); if (unlikely(!__pyx_t_7)) goto __pyx_L11_unpacking_failed; + __Pyx_GOTREF(__pyx_t_7); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_9(__pyx_t_8), 2) < 0) __PYX_ERR(0, 131, __pyx_L4_error) + __pyx_t_9 = NULL; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + goto __pyx_L12_unpacking_done; + __pyx_L11_unpacking_failed:; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_9 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 131, __pyx_L4_error) + __pyx_L12_unpacking_done:; + } + __Pyx_XDECREF_SET(__pyx_v_key, __pyx_t_4); + __pyx_t_4 = 0; + __Pyx_XDECREF_SET(__pyx_v_val, __pyx_t_7); + __pyx_t_7 = 0; + + /* "aiohttp/_http_writer.pyx":132 + * + * for key, val in headers.items(): + * if _write_str(&writer, to_str(key)) < 0: # <<<<<<<<<<<<<< + * raise + * if _write_byte(&writer, ':') < 0: + */ + __pyx_t_2 = __pyx_f_7aiohttp_12_http_writer_to_str(__pyx_v_key); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 132, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_str((&__pyx_v_writer), ((PyObject*)__pyx_t_2)) < 0) != 0); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (unlikely(__pyx_t_1)) { + + /* "aiohttp/_http_writer.pyx":133 + * for key, val in headers.items(): + * if _write_str(&writer, to_str(key)) < 0: + * raise # <<<<<<<<<<<<<< + * if _write_byte(&writer, ':') < 0: + * raise + */ + __Pyx_ReraiseException(); __PYX_ERR(0, 133, __pyx_L4_error) + + /* "aiohttp/_http_writer.pyx":132 + * + * for key, val in headers.items(): + * if _write_str(&writer, to_str(key)) < 0: # <<<<<<<<<<<<<< + * raise + * if _write_byte(&writer, ':') < 0: + */ + } + + /* "aiohttp/_http_writer.pyx":134 + * if _write_str(&writer, to_str(key)) < 0: + * raise + * if _write_byte(&writer, ':') < 0: # <<<<<<<<<<<<<< + * raise + * if _write_byte(&writer, ' ') < 0: + */ + __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), ':') < 0) != 0); + if (unlikely(__pyx_t_1)) { + + /* "aiohttp/_http_writer.pyx":135 + * raise + * if _write_byte(&writer, ':') < 0: + * raise # <<<<<<<<<<<<<< + * if _write_byte(&writer, ' ') < 0: + * raise + */ + __Pyx_ReraiseException(); __PYX_ERR(0, 135, __pyx_L4_error) + + /* "aiohttp/_http_writer.pyx":134 + * if _write_str(&writer, to_str(key)) < 0: + * raise + * if _write_byte(&writer, ':') < 0: # <<<<<<<<<<<<<< + * raise + * if _write_byte(&writer, ' ') < 0: + */ + } + + /* "aiohttp/_http_writer.pyx":136 + * if _write_byte(&writer, ':') < 0: + * raise + * if _write_byte(&writer, ' ') < 0: # <<<<<<<<<<<<<< + * raise + * if _write_str(&writer, to_str(val)) < 0: + */ + __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), ' ') < 0) != 0); + if (unlikely(__pyx_t_1)) { + + /* "aiohttp/_http_writer.pyx":137 + * raise + * if _write_byte(&writer, ' ') < 0: + * raise # <<<<<<<<<<<<<< + * if _write_str(&writer, to_str(val)) < 0: + * raise + */ + __Pyx_ReraiseException(); __PYX_ERR(0, 137, __pyx_L4_error) + + /* "aiohttp/_http_writer.pyx":136 + * if _write_byte(&writer, ':') < 0: + * raise + * if _write_byte(&writer, ' ') < 0: # <<<<<<<<<<<<<< + * raise + * if _write_str(&writer, to_str(val)) < 0: + */ + } + + /* "aiohttp/_http_writer.pyx":138 + * if _write_byte(&writer, ' ') < 0: + * raise + * if _write_str(&writer, to_str(val)) < 0: # <<<<<<<<<<<<<< + * raise + * if _write_byte(&writer, '\r') < 0: + */ + __pyx_t_2 = __pyx_f_7aiohttp_12_http_writer_to_str(__pyx_v_val); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 138, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_str((&__pyx_v_writer), ((PyObject*)__pyx_t_2)) < 0) != 0); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (unlikely(__pyx_t_1)) { + + /* "aiohttp/_http_writer.pyx":139 + * raise + * if _write_str(&writer, to_str(val)) < 0: + * raise # <<<<<<<<<<<<<< + * if _write_byte(&writer, '\r') < 0: + * raise + */ + __Pyx_ReraiseException(); __PYX_ERR(0, 139, __pyx_L4_error) + + /* "aiohttp/_http_writer.pyx":138 + * if _write_byte(&writer, ' ') < 0: + * raise + * if _write_str(&writer, to_str(val)) < 0: # <<<<<<<<<<<<<< + * raise + * if _write_byte(&writer, '\r') < 0: + */ + } + + /* "aiohttp/_http_writer.pyx":140 + * if _write_str(&writer, to_str(val)) < 0: + * raise + * if _write_byte(&writer, '\r') < 0: # <<<<<<<<<<<<<< + * raise + * if _write_byte(&writer, '\n') < 0: + */ + __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), '\r') < 0) != 0); + if (unlikely(__pyx_t_1)) { + + /* "aiohttp/_http_writer.pyx":141 + * raise + * if _write_byte(&writer, '\r') < 0: + * raise # <<<<<<<<<<<<<< + * if _write_byte(&writer, '\n') < 0: + * raise + */ + __Pyx_ReraiseException(); __PYX_ERR(0, 141, __pyx_L4_error) + + /* "aiohttp/_http_writer.pyx":140 + * if _write_str(&writer, to_str(val)) < 0: + * raise + * if _write_byte(&writer, '\r') < 0: # <<<<<<<<<<<<<< + * raise + * if _write_byte(&writer, '\n') < 0: + */ + } + + /* "aiohttp/_http_writer.pyx":142 + * if _write_byte(&writer, '\r') < 0: + * raise + * if _write_byte(&writer, '\n') < 0: # <<<<<<<<<<<<<< + * raise + * + */ + __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), '\n') < 0) != 0); + if (unlikely(__pyx_t_1)) { + + /* "aiohttp/_http_writer.pyx":143 + * raise + * if _write_byte(&writer, '\n') < 0: + * raise # <<<<<<<<<<<<<< + * + * if _write_byte(&writer, '\r') < 0: + */ + __Pyx_ReraiseException(); __PYX_ERR(0, 143, __pyx_L4_error) + + /* "aiohttp/_http_writer.pyx":142 + * if _write_byte(&writer, '\r') < 0: + * raise + * if _write_byte(&writer, '\n') < 0: # <<<<<<<<<<<<<< + * raise + * + */ + } + + /* "aiohttp/_http_writer.pyx":131 + * raise + * + * for key, val in headers.items(): # <<<<<<<<<<<<<< + * if _write_str(&writer, to_str(key)) < 0: + * raise + */ + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "aiohttp/_http_writer.pyx":145 + * raise + * + * if _write_byte(&writer, '\r') < 0: # <<<<<<<<<<<<<< + * raise + * if _write_byte(&writer, '\n') < 0: + */ + __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), '\r') < 0) != 0); + if (unlikely(__pyx_t_1)) { + + /* "aiohttp/_http_writer.pyx":146 + * + * if _write_byte(&writer, '\r') < 0: + * raise # <<<<<<<<<<<<<< + * if _write_byte(&writer, '\n') < 0: + * raise + */ + __Pyx_ReraiseException(); __PYX_ERR(0, 146, __pyx_L4_error) + + /* "aiohttp/_http_writer.pyx":145 + * raise + * + * if _write_byte(&writer, '\r') < 0: # <<<<<<<<<<<<<< + * raise + * if _write_byte(&writer, '\n') < 0: + */ + } + + /* "aiohttp/_http_writer.pyx":147 + * if _write_byte(&writer, '\r') < 0: + * raise + * if _write_byte(&writer, '\n') < 0: # <<<<<<<<<<<<<< + * raise + * + */ + __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), '\n') < 0) != 0); + if (unlikely(__pyx_t_1)) { + + /* "aiohttp/_http_writer.pyx":148 + * raise + * if _write_byte(&writer, '\n') < 0: + * raise # <<<<<<<<<<<<<< + * + * return PyBytes_FromStringAndSize(writer.buf, writer.pos) + */ + __Pyx_ReraiseException(); __PYX_ERR(0, 148, __pyx_L4_error) + + /* "aiohttp/_http_writer.pyx":147 + * if _write_byte(&writer, '\r') < 0: + * raise + * if _write_byte(&writer, '\n') < 0: # <<<<<<<<<<<<<< + * raise + * + */ + } + + /* "aiohttp/_http_writer.pyx":150 + * raise + * + * return PyBytes_FromStringAndSize(writer.buf, writer.pos) # <<<<<<<<<<<<<< + * finally: + * _release_writer(&writer) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = PyBytes_FromStringAndSize(__pyx_v_writer.buf, __pyx_v_writer.pos); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 150, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L3_return; + } + + /* "aiohttp/_http_writer.pyx":152 + * return PyBytes_FromStringAndSize(writer.buf, writer.pos) + * finally: + * _release_writer(&writer) # <<<<<<<<<<<<<< + */ + /*finally:*/ { + __pyx_L4_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; __pyx_t_18 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_16, &__pyx_t_17, &__pyx_t_18); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_13, &__pyx_t_14, &__pyx_t_15) < 0)) __Pyx_ErrFetch(&__pyx_t_13, &__pyx_t_14, &__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_17); + __Pyx_XGOTREF(__pyx_t_18); + __pyx_t_10 = __pyx_lineno; __pyx_t_11 = __pyx_clineno; __pyx_t_12 = __pyx_filename; + { + __pyx_f_7aiohttp_12_http_writer__release_writer((&__pyx_v_writer)); + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_ExceptionReset(__pyx_t_16, __pyx_t_17, __pyx_t_18); + } + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_ErrRestore(__pyx_t_13, __pyx_t_14, __pyx_t_15); + __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; __pyx_t_18 = 0; + __pyx_lineno = __pyx_t_10; __pyx_clineno = __pyx_t_11; __pyx_filename = __pyx_t_12; + goto __pyx_L1_error; + } + __pyx_L3_return: { + __pyx_t_18 = __pyx_r; + __pyx_r = 0; + __pyx_f_7aiohttp_12_http_writer__release_writer((&__pyx_v_writer)); + __pyx_r = __pyx_t_18; + __pyx_t_18 = 0; + goto __pyx_L0; + } + } + + /* "aiohttp/_http_writer.pyx":115 + * + * + * def _serialize_headers(str status_line, headers): # <<<<<<<<<<<<<< + * cdef Writer writer + * cdef object key + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("aiohttp._http_writer._serialize_headers", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_key); + __Pyx_XDECREF(__pyx_v_val); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec__http_writer(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec__http_writer}, + {0, NULL} +}; +#endif + +static struct PyModuleDef __pyx_moduledef = { + PyModuleDef_HEAD_INIT, + "_http_writer", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_s_Cannot_serialize_non_str_key_r, __pyx_k_Cannot_serialize_non_str_key_r, sizeof(__pyx_k_Cannot_serialize_non_str_key_r), 0, 0, 1, 0}, + {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, + {&__pyx_n_s_aiohttp__http_writer, __pyx_k_aiohttp__http_writer, sizeof(__pyx_k_aiohttp__http_writer), 0, 0, 1, 1}, + {&__pyx_kp_s_aiohttp__http_writer_pyx, __pyx_k_aiohttp__http_writer_pyx, sizeof(__pyx_k_aiohttp__http_writer_pyx), 0, 0, 1, 0}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 0, 1, 1}, + {&__pyx_n_s_headers, __pyx_k_headers, sizeof(__pyx_k_headers), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_istr, __pyx_k_istr, sizeof(__pyx_k_istr), 0, 0, 1, 1}, + {&__pyx_n_s_items, __pyx_k_items, sizeof(__pyx_k_items), 0, 0, 1, 1}, + {&__pyx_n_s_key, __pyx_k_key, sizeof(__pyx_k_key), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_multidict, __pyx_k_multidict, sizeof(__pyx_k_multidict), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_ret, __pyx_k_ret, sizeof(__pyx_k_ret), 0, 0, 1, 1}, + {&__pyx_n_s_serialize_headers, __pyx_k_serialize_headers, sizeof(__pyx_k_serialize_headers), 0, 0, 1, 1}, + {&__pyx_n_s_status_line, __pyx_k_status_line, sizeof(__pyx_k_status_line), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_val, __pyx_k_val, sizeof(__pyx_k_val), 0, 0, 1, 1}, + {&__pyx_n_s_writer, __pyx_k_writer, sizeof(__pyx_k_writer), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 110, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "aiohttp/_http_writer.pyx":115 + * + * + * def _serialize_headers(str status_line, headers): # <<<<<<<<<<<<<< + * cdef Writer writer + * cdef object key + */ + __pyx_tuple_ = PyTuple_Pack(6, __pyx_n_s_status_line, __pyx_n_s_headers, __pyx_n_s_writer, __pyx_n_s_key, __pyx_n_s_val, __pyx_n_s_ret); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 115, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple_); + __Pyx_GIVEREF(__pyx_tuple_); + __pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(2, 0, 6, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple_, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__http_writer_pyx, __pyx_n_s_serialize_headers, 115, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 115, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __pyx_v_7aiohttp_12_http_writer__istr = Py_None; Py_INCREF(Py_None); + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", + #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + +#if PY_MAJOR_VERSION < 3 +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC void +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#else +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC init_http_writer(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC init_http_writer(void) +#else +__Pyx_PyMODINIT_FUNC PyInit__http_writer(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit__http_writer(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { + result = PyDict_SetItemString(moddict, to_name, value); + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec__http_writer(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module '_http_writer' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__http_writer(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("_http_writer", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_aiohttp___http_writer) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "aiohttp._http_writer")) { + if (unlikely(PyDict_SetItemString(modules, "aiohttp._http_writer", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + (void)__Pyx_modinit_type_init_code(); + if (unlikely(__Pyx_modinit_type_import_code() != 0)) goto __pyx_L1_error; + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "aiohttp/_http_writer.pyx":9 + * from cpython.object cimport PyObject_Str + * + * from multidict import istr # <<<<<<<<<<<<<< + * + * DEF BUF_SIZE = 16 * 1024 # 16KiB + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_istr); + __Pyx_GIVEREF(__pyx_n_s_istr); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_istr); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_multidict, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_istr); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_istr, __pyx_t_1) < 0) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_http_writer.pyx":14 + * cdef char BUFFER[BUF_SIZE] + * + * cdef object _istr = istr # <<<<<<<<<<<<<< + * + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_istr); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_writer__istr); + __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_writer__istr, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_2); + __pyx_t_2 = 0; + + /* "aiohttp/_http_writer.pyx":115 + * + * + * def _serialize_headers(str status_line, headers): # <<<<<<<<<<<<<< + * cdef Writer writer + * cdef object key + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_12_http_writer_1_serialize_headers, NULL, __pyx_n_s_aiohttp__http_writer); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 115, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_serialize_headers, __pyx_t_2) < 0) __PYX_ERR(0, 115, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "aiohttp/_http_writer.pyx":1 + * from libc.stdint cimport uint8_t, uint64_t # <<<<<<<<<<<<<< + * from libc.string cimport memcpy + * from cpython.exc cimport PyErr_NoMemory + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init aiohttp._http_writer", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + Py_CLEAR(__pyx_m); + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init aiohttp._http_writer"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* WriteUnraisableException */ +static void __Pyx_WriteUnraisable(const char *name, CYTHON_UNUSED int clineno, + CYTHON_UNUSED int lineno, CYTHON_UNUSED const char *filename, + int full_traceback, CYTHON_UNUSED int nogil) { + PyObject *old_exc, *old_val, *old_tb; + PyObject *ctx; + __Pyx_PyThreadState_declare +#ifdef WITH_THREAD + PyGILState_STATE state; + if (nogil) + state = PyGILState_Ensure(); +#ifdef _MSC_VER + else state = (PyGILState_STATE)-1; +#endif +#endif + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&old_exc, &old_val, &old_tb); + if (full_traceback) { + Py_XINCREF(old_exc); + Py_XINCREF(old_val); + Py_XINCREF(old_tb); + __Pyx_ErrRestore(old_exc, old_val, old_tb); + PyErr_PrintEx(1); + } + #if PY_MAJOR_VERSION < 3 + ctx = PyString_FromString(name); + #else + ctx = PyUnicode_FromString(name); + #endif + __Pyx_ErrRestore(old_exc, old_val, old_tb); + if (!ctx) { + PyErr_WriteUnraisable(Py_None); + } else { + PyErr_WriteUnraisable(ctx); + Py_DECREF(ctx); + } +#ifdef WITH_THREAD + if (nogil) + PyGILState_Release(state); +#endif +} + +/* PyCFunctionFastCall */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + int flags = PyCFunction_GET_FLAGS(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { + return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); + } else { + return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); + } +} +#endif + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif +#endif + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCall2Args */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { + PyObject *args, *result = NULL; + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyFunction_FastCall(function, args, 2); + } + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyCFunction_FastCall(function, args, 2); + } + #endif + args = PyTuple_New(2); + if (unlikely(!args)) goto done; + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 0, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 1, arg2); + Py_INCREF(function); + result = __Pyx_PyObject_Call(function, args, NULL); + Py_DECREF(args); + Py_DECREF(function); +done: + return result; +} + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallOneArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif + if (likely(PyCFunction_Check(func))) { + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* ArgTypeTest */ +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact) +{ + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + else if (exact) { + #if PY_MAJOR_VERSION == 2 + if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; + #endif + } + else { + if (likely(__Pyx_TypeCheck(obj, type))) return 1; + } + PyErr_Format(PyExc_TypeError, + "Argument '%.200s' has incorrect type (expected %.200s, got %.200s)", + name, type->tp_name, Py_TYPE(obj)->tp_name); + return 0; +} + +/* GetTopmostException */ +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * +__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) +{ + _PyErr_StackItem *exc_info = tstate->exc_info; + while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) && + exc_info->previous_item != NULL) + { + exc_info = exc_info->previous_item; + } + return exc_info; +} +#endif + +/* ReRaiseException */ +static CYTHON_INLINE void __Pyx_ReraiseException(void) { + PyObject *type = NULL, *value = NULL, *tb = NULL; +#if CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = PyThreadState_GET(); + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); + type = exc_info->exc_type; + value = exc_info->exc_value; + tb = exc_info->exc_traceback; + #else + type = tstate->exc_type; + value = tstate->exc_value; + tb = tstate->exc_traceback; + #endif +#else + PyErr_GetExcInfo(&type, &value, &tb); +#endif + if (!type || type == Py_None) { +#if !CYTHON_FAST_THREAD_STATE + Py_XDECREF(type); + Py_XDECREF(value); + Py_XDECREF(tb); +#endif + PyErr_SetString(PyExc_RuntimeError, + "No active exception to reraise"); + } else { +#if CYTHON_FAST_THREAD_STATE + Py_INCREF(type); + Py_XINCREF(value); + Py_XINCREF(tb); +#endif + PyErr_Restore(type, value, tb); + } +} + +/* PyObjectCallNoArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, NULL, 0); + } +#endif +#ifdef __Pyx_CyFunction_USED + if (likely(PyCFunction_Check(func) || __Pyx_CyFunction_Check(func))) +#else + if (likely(PyCFunction_Check(func))) +#endif + { + if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { + return __Pyx_PyObject_CallMethO(func, NULL); + } + } + return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); +} +#endif + +/* RaiseTooManyValuesToUnpack */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +/* RaiseNeedMoreValuesToUnpack */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", + index, (index == 1) ? "" : "s"); +} + +/* IterFinish */ +static CYTHON_INLINE int __Pyx_IterFinish(void) { +#if CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* exc_type = tstate->curexc_type; + if (unlikely(exc_type)) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) { + PyObject *exc_value, *exc_tb; + exc_value = tstate->curexc_value; + exc_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; + Py_DECREF(exc_type); + Py_XDECREF(exc_value); + Py_XDECREF(exc_tb); + return 0; + } else { + return -1; + } + } + return 0; +#else + if (unlikely(PyErr_Occurred())) { + if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { + PyErr_Clear(); + return 0; + } else { + return -1; + } + } + return 0; +#endif +} + +/* UnpackItemEndCheck */ +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { + if (unlikely(retval)) { + Py_DECREF(retval); + __Pyx_RaiseTooManyValuesError(expected); + return -1; + } else { + return __Pyx_IterFinish(); + } + return 0; +} + +/* GetException */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) +#endif +{ + PyObject *local_type, *local_value, *local_tb; +#if CYTHON_FAST_THREAD_STATE + PyObject *tmp_type, *tmp_value, *tmp_tb; + local_type = tstate->curexc_type; + local_value = tstate->curexc_value; + local_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(&local_type, &local_value, &local_tb); +#endif + PyErr_NormalizeException(&local_type, &local_value, &local_tb); +#if CYTHON_FAST_THREAD_STATE + if (unlikely(tstate->curexc_type)) +#else + if (unlikely(PyErr_Occurred())) +#endif + goto bad; + #if PY_MAJOR_VERSION >= 3 + if (local_tb) { + if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) + goto bad; + } + #endif + Py_XINCREF(local_tb); + Py_XINCREF(local_type); + Py_XINCREF(local_value); + *type = local_type; + *value = local_value; + *tb = local_tb; +#if CYTHON_FAST_THREAD_STATE + #if CYTHON_USE_EXC_INFO_STACK + { + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = local_type; + exc_info->exc_value = local_value; + exc_info->exc_traceback = local_tb; + } + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = local_type; + tstate->exc_value = local_value; + tstate->exc_traceback = local_tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_SetExcInfo(local_type, local_value, local_tb); +#endif + return 0; +bad: + *type = 0; + *value = 0; + *tb = 0; + Py_XDECREF(local_type); + Py_XDECREF(local_value); + Py_XDECREF(local_tb); + return -1; +} + +/* SwapException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = *type; + exc_info->exc_value = *value; + exc_info->exc_traceback = *tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = *type; + tstate->exc_value = *value; + tstate->exc_traceback = *tb; + #endif + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); + PyErr_SetExcInfo(*type, *value, *tb); + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#endif + +/* SaveResetException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); + *type = exc_info->exc_type; + *value = exc_info->exc_value; + *tb = exc_info->exc_traceback; + #else + *type = tstate->exc_type; + *value = tstate->exc_value; + *tb = tstate->exc_traceback; + #endif + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); +} +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = type; + exc_info->exc_value = value; + exc_info->exc_traceback = tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = type; + tstate->exc_value = value; + tstate->exc_traceback = tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +#endif + +/* TypeImport */ +#ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name, + size_t size, enum __Pyx_ImportType_CheckSize check_size) +{ + PyObject *result = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + result = PyObject_GetAttrString(module, class_name); + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if ((size_t)basicsize < size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(result); + return NULL; +} +#endif + +/* Import */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.')) { + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +/* ImportFrom */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* GetModuleGlobalName */ +#if CYTHON_USE_DICT_VERSIONS +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) +#else +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) +#endif +{ + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } else if (unlikely(PyErr_Occurred())) { + return NULL; + } +#else + result = PyDict_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } + PyErr_Clear(); +#endif + return __Pyx_GetBuiltinName(name); +} + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* UnicodeAsUCS4 */ +static CYTHON_INLINE Py_UCS4 __Pyx_PyUnicode_AsPy_UCS4(PyObject* x) { + Py_ssize_t length; + #if CYTHON_PEP393_ENABLED + length = PyUnicode_GET_LENGTH(x); + if (likely(length == 1)) { + return PyUnicode_READ_CHAR(x, 0); + } + #else + length = PyUnicode_GET_SIZE(x); + if (likely(length == 1)) { + return PyUnicode_AS_UNICODE(x)[0]; + } + #if Py_UNICODE_SIZE == 2 + else if (PyUnicode_GET_SIZE(x) == 2) { + Py_UCS4 high_val = PyUnicode_AS_UNICODE(x)[0]; + if (high_val >= 0xD800 && high_val <= 0xDBFF) { + Py_UCS4 low_val = PyUnicode_AS_UNICODE(x)[1]; + if (low_val >= 0xDC00 && low_val <= 0xDFFF) { + return 0x10000 + (((high_val & ((1<<10)-1)) << 10) | (low_val & ((1<<10)-1))); + } + } + } + #endif + #endif + PyErr_Format(PyExc_ValueError, + "only single character unicode strings can be converted to Py_UCS4, " + "got length %" CYTHON_FORMAT_SSIZE_T "d", length); + return (Py_UCS4)-1; +} + +/* ObjectAsUCS4 */ +static Py_UCS4 __Pyx__PyObject_AsPy_UCS4_raise_error(long ival) { + if (ival < 0) { + if (!PyErr_Occurred()) + PyErr_SetString(PyExc_OverflowError, + "cannot convert negative value to Py_UCS4"); + } else { + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to Py_UCS4"); + } + return (Py_UCS4)-1; +} +static Py_UCS4 __Pyx__PyObject_AsPy_UCS4(PyObject* x) { + long ival; + ival = __Pyx_PyInt_As_long(x); + if (unlikely(!__Pyx_is_valid_index(ival, 1114111 + 1))) { + return __Pyx__PyObject_AsPy_UCS4_raise_error(ival); + } + return (Py_UCS4)ival; +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { + const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; ip) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/venv/lib/python3.7/site-packages/aiohttp/_http_writer.cpython-37m-x86_64-linux-gnu.so b/venv/lib/python3.7/site-packages/aiohttp/_http_writer.cpython-37m-x86_64-linux-gnu.so new file mode 100755 index 0000000..ba64557 Binary files /dev/null and b/venv/lib/python3.7/site-packages/aiohttp/_http_writer.cpython-37m-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.7/site-packages/aiohttp/_http_writer.pyx b/venv/lib/python3.7/site-packages/aiohttp/_http_writer.pyx new file mode 100644 index 0000000..8af3776 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_http_writer.pyx @@ -0,0 +1,152 @@ +from libc.stdint cimport uint8_t, uint64_t +from libc.string cimport memcpy +from cpython.exc cimport PyErr_NoMemory +from cpython.mem cimport PyMem_Malloc, PyMem_Realloc, PyMem_Free + +from cpython.bytes cimport PyBytes_FromStringAndSize +from cpython.object cimport PyObject_Str + +from multidict import istr + +DEF BUF_SIZE = 16 * 1024 # 16KiB +cdef char BUFFER[BUF_SIZE] + +cdef object _istr = istr + + +# ----------------- writer --------------------------- + +cdef struct Writer: + char *buf + Py_ssize_t size + Py_ssize_t pos + + +cdef inline void _init_writer(Writer* writer): + writer.buf = &BUFFER[0] + writer.size = BUF_SIZE + writer.pos = 0 + + +cdef inline void _release_writer(Writer* writer): + if writer.buf != BUFFER: + PyMem_Free(writer.buf) + + +cdef inline int _write_byte(Writer* writer, uint8_t ch): + cdef char * buf + cdef Py_ssize_t size + + if writer.pos == writer.size: + # reallocate + size = writer.size + BUF_SIZE + if writer.buf == BUFFER: + buf = PyMem_Malloc(size) + if buf == NULL: + PyErr_NoMemory() + return -1 + memcpy(buf, writer.buf, writer.size) + else: + buf = PyMem_Realloc(writer.buf, size) + if buf == NULL: + PyErr_NoMemory() + return -1 + writer.buf = buf + writer.size = size + writer.buf[writer.pos] = ch + writer.pos += 1 + return 0 + + +cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): + cdef uint64_t utf = symbol + + if utf < 0x80: + return _write_byte(writer, utf) + elif utf < 0x800: + if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: + return -1 + return _write_byte(writer, (0x80 | (utf & 0x3f))) + elif 0xD800 <= utf <= 0xDFFF: + # surogate pair, ignored + return 0 + elif utf < 0x10000: + if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: + return -1 + if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: + return -1 + return _write_byte(writer, (0x80 | (utf & 0x3f))) + elif utf > 0x10FFFF: + # symbol is too large + return 0 + else: + if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: + return -1 + if _write_byte(writer, + (0x80 | ((utf >> 12) & 0x3f))) < 0: + return -1 + if _write_byte(writer, + (0x80 | ((utf >> 6) & 0x3f))) < 0: + return -1 + return _write_byte(writer, (0x80 | (utf & 0x3f))) + + +cdef inline int _write_str(Writer* writer, str s): + cdef Py_UCS4 ch + for ch in s: + if _write_utf8(writer, ch) < 0: + return -1 + + +# --------------- _serialize_headers ---------------------- + +cdef str to_str(object s): + typ = type(s) + if typ is str: + return s + elif typ is _istr: + return PyObject_Str(s) + elif not isinstance(s, str): + raise TypeError("Cannot serialize non-str key {!r}".format(s)) + else: + return str(s) + + +def _serialize_headers(str status_line, headers): + cdef Writer writer + cdef object key + cdef object val + cdef bytes ret + + _init_writer(&writer) + + try: + if _write_str(&writer, status_line) < 0: + raise + if _write_byte(&writer, '\r') < 0: + raise + if _write_byte(&writer, '\n') < 0: + raise + + for key, val in headers.items(): + if _write_str(&writer, to_str(key)) < 0: + raise + if _write_byte(&writer, ':') < 0: + raise + if _write_byte(&writer, ' ') < 0: + raise + if _write_str(&writer, to_str(val)) < 0: + raise + if _write_byte(&writer, '\r') < 0: + raise + if _write_byte(&writer, '\n') < 0: + raise + + if _write_byte(&writer, '\r') < 0: + raise + if _write_byte(&writer, '\n') < 0: + raise + + return PyBytes_FromStringAndSize(writer.buf, writer.pos) + finally: + _release_writer(&writer) diff --git a/venv/lib/python3.7/site-packages/aiohttp/_websocket.c b/venv/lib/python3.7/site-packages/aiohttp/_websocket.c new file mode 100644 index 0000000..99bc0ea --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_websocket.c @@ -0,0 +1,3538 @@ +/* Generated by Cython 0.29.2 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "depends": [], + "name": "aiohttp._websocket", + "sources": [ + "aiohttp/_websocket.pyx" + ] + }, + "module_name": "aiohttp._websocket" +} +END: Cython Metadata */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.6+ or Python 3.3+. +#else +#define CYTHON_ABI "0_29_2" +#define CYTHON_HEX_VERSION 0x001D02F0 +#define CYTHON_FUTURE_DIVISION 0 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif + #ifndef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) + #endif + #ifndef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if CYTHON_USE_DICT_VERSIONS +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ + } +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 + #define PyMem_RawMalloc(n) PyMem_Malloc(n) + #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) + #define PyMem_RawFree(p) PyMem_Free(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; // PyThread_create_key reports success always +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif // TSS (Thread Specific Storage) API +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact + #define PyObject_Unicode PyObject_Str +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + + +#define __PYX_ERR(f_index, lineno, Ln_error) \ +{ \ + __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ +} + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__aiohttp___websocket +#define __PYX_HAVE_API__aiohttp___websocket +/* Early includes */ +#include +#include +#include "pythread.h" +#include +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m = NULL; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime = NULL; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + + +static const char *__pyx_f[] = { + "aiohttp/_websocket.pyx", + "type.pxd", + "bool.pxd", + "complex.pxd", +}; + +/*--- Type declarations ---*/ + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* TypeImport.proto */ +#ifndef __PYX_HAVE_RT_ImportType_proto +#define __PYX_HAVE_RT_ImportType_proto +enum __Pyx_ImportType_CheckSize { + __Pyx_ImportType_CheckSize_Error = 0, + __Pyx_ImportType_CheckSize_Warn = 1, + __Pyx_ImportType_CheckSize_Ignore = 2 +}; +static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + + +/* Module declarations from 'cpython.version' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from 'cpython.ref' */ + +/* Module declarations from 'cpython.exc' */ + +/* Module declarations from 'cpython.module' */ + +/* Module declarations from 'cpython.mem' */ + +/* Module declarations from 'cpython.tuple' */ + +/* Module declarations from 'cpython.list' */ + +/* Module declarations from 'cpython.sequence' */ + +/* Module declarations from 'cpython.mapping' */ + +/* Module declarations from 'cpython.iterator' */ + +/* Module declarations from 'cpython.number' */ + +/* Module declarations from 'cpython.int' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.bool' */ +static PyTypeObject *__pyx_ptype_7cpython_4bool_bool = 0; + +/* Module declarations from 'cpython.long' */ + +/* Module declarations from 'cpython.float' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.complex' */ +static PyTypeObject *__pyx_ptype_7cpython_7complex_complex = 0; + +/* Module declarations from 'cpython.string' */ + +/* Module declarations from 'cpython.unicode' */ + +/* Module declarations from 'cpython.dict' */ + +/* Module declarations from 'cpython.instance' */ + +/* Module declarations from 'cpython.function' */ + +/* Module declarations from 'cpython.method' */ + +/* Module declarations from 'cpython.weakref' */ + +/* Module declarations from 'cpython.getargs' */ + +/* Module declarations from 'cpython.pythread' */ + +/* Module declarations from 'cpython.pystate' */ + +/* Module declarations from 'cpython.cobject' */ + +/* Module declarations from 'cpython.oldbuffer' */ + +/* Module declarations from 'cpython.set' */ + +/* Module declarations from 'cpython.buffer' */ + +/* Module declarations from 'cpython.bytes' */ + +/* Module declarations from 'cpython.pycapsule' */ + +/* Module declarations from 'cpython' */ + +/* Module declarations from 'libc.stdint' */ + +/* Module declarations from 'aiohttp._websocket' */ +#define __Pyx_MODULE_NAME "aiohttp._websocket" +extern int __pyx_module_is_main_aiohttp___websocket; +int __pyx_module_is_main_aiohttp___websocket = 0; + +/* Implementation of 'aiohttp._websocket' */ +static PyObject *__pyx_builtin_range; +static const char __pyx_k_i[] = "i"; +static const char __pyx_k_data[] = "data"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_mask[] = "mask"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_range[] = "range"; +static const char __pyx_k_in_buf[] = "in_buf"; +static const char __pyx_k_data_len[] = "data_len"; +static const char __pyx_k_mask_buf[] = "mask_buf"; +static const char __pyx_k_uint32_msk[] = "uint32_msk"; +static const char __pyx_k_uint64_msk[] = "uint64_msk"; +static const char __pyx_k_aiohttp__websocket[] = "aiohttp._websocket"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_websocket_mask_cython[] = "_websocket_mask_cython"; +static const char __pyx_k_aiohttp__websocket_pyx[] = "aiohttp/_websocket.pyx"; +static PyObject *__pyx_n_s_aiohttp__websocket; +static PyObject *__pyx_kp_s_aiohttp__websocket_pyx; +static PyObject *__pyx_n_s_cline_in_traceback; +static PyObject *__pyx_n_s_data; +static PyObject *__pyx_n_s_data_len; +static PyObject *__pyx_n_s_i; +static PyObject *__pyx_n_s_in_buf; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_mask; +static PyObject *__pyx_n_s_mask_buf; +static PyObject *__pyx_n_s_name; +static PyObject *__pyx_n_s_range; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_n_s_uint32_msk; +static PyObject *__pyx_n_s_uint64_msk; +static PyObject *__pyx_n_s_websocket_mask_cython; +static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_mask, PyObject *__pyx_v_data); /* proto */ +static PyObject *__pyx_tuple_; +static PyObject *__pyx_codeobj__2; +/* Late includes */ + +/* "aiohttp/_websocket.pyx":9 + * from libc.stdint cimport uint32_t, uint64_t, uintmax_t + * + * def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<< + * """Note, this function mutates its `data` argument + * """ + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_7aiohttp_10_websocket__websocket_mask_cython[] = "Note, this function mutates its `data` argument\n "; +static PyMethodDef __pyx_mdef_7aiohttp_10_websocket_1_websocket_mask_cython = {"_websocket_mask_cython", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython, METH_VARARGS|METH_KEYWORDS, __pyx_doc_7aiohttp_10_websocket__websocket_mask_cython}; +static PyObject *__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_mask = 0; + PyObject *__pyx_v_data = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_websocket_mask_cython (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_mask,&__pyx_n_s_data,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_mask)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_data)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, 1); __PYX_ERR(0, 9, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_websocket_mask_cython") < 0)) __PYX_ERR(0, 9, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_mask = values[0]; + __pyx_v_data = values[1]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 9, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._websocket._websocket_mask_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(__pyx_self, __pyx_v_mask, __pyx_v_data); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_mask, PyObject *__pyx_v_data) { + Py_ssize_t __pyx_v_data_len; + Py_ssize_t __pyx_v_i; + unsigned char *__pyx_v_in_buf; + unsigned char const *__pyx_v_mask_buf; + uint32_t __pyx_v_uint32_msk; + uint64_t __pyx_v_uint64_msk; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + Py_ssize_t __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + char *__pyx_t_5; + uint64_t *__pyx_t_6; + long __pyx_t_7; + uint32_t *__pyx_t_8; + Py_ssize_t __pyx_t_9; + Py_ssize_t __pyx_t_10; + Py_ssize_t __pyx_t_11; + __Pyx_RefNannySetupContext("_websocket_mask_cython", 0); + __Pyx_INCREF(__pyx_v_mask); + __Pyx_INCREF(__pyx_v_data); + + /* "aiohttp/_websocket.pyx":20 + * uint64_t uint64_msk + * + * assert len(mask) == 4 # <<<<<<<<<<<<<< + * + * if not isinstance(mask, bytes): + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + if (unlikely(!Py_OptimizeFlag)) { + __pyx_t_1 = PyObject_Length(__pyx_v_mask); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 20, __pyx_L1_error) + if (unlikely(!((__pyx_t_1 == 4) != 0))) { + PyErr_SetNone(PyExc_AssertionError); + __PYX_ERR(0, 20, __pyx_L1_error) + } + } + #endif + + /* "aiohttp/_websocket.pyx":22 + * assert len(mask) == 4 + * + * if not isinstance(mask, bytes): # <<<<<<<<<<<<<< + * mask = bytes(mask) + * + */ + __pyx_t_2 = PyBytes_Check(__pyx_v_mask); + __pyx_t_3 = ((!(__pyx_t_2 != 0)) != 0); + if (__pyx_t_3) { + + /* "aiohttp/_websocket.pyx":23 + * + * if not isinstance(mask, bytes): + * mask = bytes(mask) # <<<<<<<<<<<<<< + * + * if isinstance(data, bytearray): + */ + __pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_mask); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_mask, __pyx_t_4); + __pyx_t_4 = 0; + + /* "aiohttp/_websocket.pyx":22 + * assert len(mask) == 4 + * + * if not isinstance(mask, bytes): # <<<<<<<<<<<<<< + * mask = bytes(mask) + * + */ + } + + /* "aiohttp/_websocket.pyx":25 + * mask = bytes(mask) + * + * if isinstance(data, bytearray): # <<<<<<<<<<<<<< + * data = data + * else: + */ + __pyx_t_3 = PyByteArray_Check(__pyx_v_data); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "aiohttp/_websocket.pyx":26 + * + * if isinstance(data, bytearray): + * data = data # <<<<<<<<<<<<<< + * else: + * data = bytearray(data) + */ + __pyx_t_4 = __pyx_v_data; + __Pyx_INCREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_data, __pyx_t_4); + __pyx_t_4 = 0; + + /* "aiohttp/_websocket.pyx":25 + * mask = bytes(mask) + * + * if isinstance(data, bytearray): # <<<<<<<<<<<<<< + * data = data + * else: + */ + goto __pyx_L4; + } + + /* "aiohttp/_websocket.pyx":28 + * data = data + * else: + * data = bytearray(data) # <<<<<<<<<<<<<< + * + * data_len = len(data) + */ + /*else*/ { + __pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyByteArray_Type)), __pyx_v_data); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 28, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_data, __pyx_t_4); + __pyx_t_4 = 0; + } + __pyx_L4:; + + /* "aiohttp/_websocket.pyx":30 + * data = bytearray(data) + * + * data_len = len(data) # <<<<<<<<<<<<<< + * in_buf = PyByteArray_AsString(data) + * mask_buf = PyBytes_AsString(mask) + */ + __pyx_t_1 = PyObject_Length(__pyx_v_data); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 30, __pyx_L1_error) + __pyx_v_data_len = __pyx_t_1; + + /* "aiohttp/_websocket.pyx":31 + * + * data_len = len(data) + * in_buf = PyByteArray_AsString(data) # <<<<<<<<<<<<<< + * mask_buf = PyBytes_AsString(mask) + * uint32_msk = (mask_buf)[0] + */ + if (!(likely(PyByteArray_CheckExact(__pyx_v_data))||((__pyx_v_data) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "bytearray", Py_TYPE(__pyx_v_data)->tp_name), 0))) __PYX_ERR(0, 31, __pyx_L1_error) + __pyx_t_5 = PyByteArray_AsString(((PyObject*)__pyx_v_data)); if (unlikely(__pyx_t_5 == ((char *)NULL))) __PYX_ERR(0, 31, __pyx_L1_error) + __pyx_v_in_buf = ((unsigned char *)__pyx_t_5); + + /* "aiohttp/_websocket.pyx":32 + * data_len = len(data) + * in_buf = PyByteArray_AsString(data) + * mask_buf = PyBytes_AsString(mask) # <<<<<<<<<<<<<< + * uint32_msk = (mask_buf)[0] + * + */ + __pyx_t_5 = PyBytes_AsString(__pyx_v_mask); if (unlikely(__pyx_t_5 == ((char *)NULL))) __PYX_ERR(0, 32, __pyx_L1_error) + __pyx_v_mask_buf = ((unsigned char const *)__pyx_t_5); + + /* "aiohttp/_websocket.pyx":33 + * in_buf = PyByteArray_AsString(data) + * mask_buf = PyBytes_AsString(mask) + * uint32_msk = (mask_buf)[0] # <<<<<<<<<<<<<< + * + * # TODO: align in_data ptr to achieve even faster speeds + */ + __pyx_v_uint32_msk = (((uint32_t *)__pyx_v_mask_buf)[0]); + + /* "aiohttp/_websocket.pyx":38 + * # does it need in python ?! malloc() always aligns to sizeof(long) bytes + * + * if sizeof(size_t) >= 8: # <<<<<<<<<<<<<< + * uint64_msk = uint32_msk + * uint64_msk = (uint64_msk << 32) | uint32_msk + */ + __pyx_t_2 = (((sizeof(size_t)) >= 8) != 0); + if (__pyx_t_2) { + + /* "aiohttp/_websocket.pyx":39 + * + * if sizeof(size_t) >= 8: + * uint64_msk = uint32_msk # <<<<<<<<<<<<<< + * uint64_msk = (uint64_msk << 32) | uint32_msk + * + */ + __pyx_v_uint64_msk = __pyx_v_uint32_msk; + + /* "aiohttp/_websocket.pyx":40 + * if sizeof(size_t) >= 8: + * uint64_msk = uint32_msk + * uint64_msk = (uint64_msk << 32) | uint32_msk # <<<<<<<<<<<<<< + * + * while data_len >= 8: + */ + __pyx_v_uint64_msk = ((__pyx_v_uint64_msk << 32) | __pyx_v_uint32_msk); + + /* "aiohttp/_websocket.pyx":42 + * uint64_msk = (uint64_msk << 32) | uint32_msk + * + * while data_len >= 8: # <<<<<<<<<<<<<< + * (in_buf)[0] ^= uint64_msk + * in_buf += 8 + */ + while (1) { + __pyx_t_2 = ((__pyx_v_data_len >= 8) != 0); + if (!__pyx_t_2) break; + + /* "aiohttp/_websocket.pyx":43 + * + * while data_len >= 8: + * (in_buf)[0] ^= uint64_msk # <<<<<<<<<<<<<< + * in_buf += 8 + * data_len -= 8 + */ + __pyx_t_6 = ((uint64_t *)__pyx_v_in_buf); + __pyx_t_7 = 0; + (__pyx_t_6[__pyx_t_7]) = ((__pyx_t_6[__pyx_t_7]) ^ __pyx_v_uint64_msk); + + /* "aiohttp/_websocket.pyx":44 + * while data_len >= 8: + * (in_buf)[0] ^= uint64_msk + * in_buf += 8 # <<<<<<<<<<<<<< + * data_len -= 8 + * + */ + __pyx_v_in_buf = (__pyx_v_in_buf + 8); + + /* "aiohttp/_websocket.pyx":45 + * (in_buf)[0] ^= uint64_msk + * in_buf += 8 + * data_len -= 8 # <<<<<<<<<<<<<< + * + * + */ + __pyx_v_data_len = (__pyx_v_data_len - 8); + } + + /* "aiohttp/_websocket.pyx":38 + * # does it need in python ?! malloc() always aligns to sizeof(long) bytes + * + * if sizeof(size_t) >= 8: # <<<<<<<<<<<<<< + * uint64_msk = uint32_msk + * uint64_msk = (uint64_msk << 32) | uint32_msk + */ + } + + /* "aiohttp/_websocket.pyx":48 + * + * + * while data_len >= 4: # <<<<<<<<<<<<<< + * (in_buf)[0] ^= uint32_msk + * in_buf += 4 + */ + while (1) { + __pyx_t_2 = ((__pyx_v_data_len >= 4) != 0); + if (!__pyx_t_2) break; + + /* "aiohttp/_websocket.pyx":49 + * + * while data_len >= 4: + * (in_buf)[0] ^= uint32_msk # <<<<<<<<<<<<<< + * in_buf += 4 + * data_len -= 4 + */ + __pyx_t_8 = ((uint32_t *)__pyx_v_in_buf); + __pyx_t_7 = 0; + (__pyx_t_8[__pyx_t_7]) = ((__pyx_t_8[__pyx_t_7]) ^ __pyx_v_uint32_msk); + + /* "aiohttp/_websocket.pyx":50 + * while data_len >= 4: + * (in_buf)[0] ^= uint32_msk + * in_buf += 4 # <<<<<<<<<<<<<< + * data_len -= 4 + * + */ + __pyx_v_in_buf = (__pyx_v_in_buf + 4); + + /* "aiohttp/_websocket.pyx":51 + * (in_buf)[0] ^= uint32_msk + * in_buf += 4 + * data_len -= 4 # <<<<<<<<<<<<<< + * + * for i in range(0, data_len): + */ + __pyx_v_data_len = (__pyx_v_data_len - 4); + } + + /* "aiohttp/_websocket.pyx":53 + * data_len -= 4 + * + * for i in range(0, data_len): # <<<<<<<<<<<<<< + * in_buf[i] ^= mask_buf[i] + */ + __pyx_t_1 = __pyx_v_data_len; + __pyx_t_9 = __pyx_t_1; + for (__pyx_t_10 = 0; __pyx_t_10 < __pyx_t_9; __pyx_t_10+=1) { + __pyx_v_i = __pyx_t_10; + + /* "aiohttp/_websocket.pyx":54 + * + * for i in range(0, data_len): + * in_buf[i] ^= mask_buf[i] # <<<<<<<<<<<<<< + */ + __pyx_t_11 = __pyx_v_i; + (__pyx_v_in_buf[__pyx_t_11]) = ((__pyx_v_in_buf[__pyx_t_11]) ^ (__pyx_v_mask_buf[__pyx_v_i])); + } + + /* "aiohttp/_websocket.pyx":9 + * from libc.stdint cimport uint32_t, uint64_t, uintmax_t + * + * def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<< + * """Note, this function mutates its `data` argument + * """ + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("aiohttp._websocket._websocket_mask_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_mask); + __Pyx_XDECREF(__pyx_v_data); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec__websocket(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec__websocket}, + {0, NULL} +}; +#endif + +static struct PyModuleDef __pyx_moduledef = { + PyModuleDef_HEAD_INIT, + "_websocket", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_n_s_aiohttp__websocket, __pyx_k_aiohttp__websocket, sizeof(__pyx_k_aiohttp__websocket), 0, 0, 1, 1}, + {&__pyx_kp_s_aiohttp__websocket_pyx, __pyx_k_aiohttp__websocket_pyx, sizeof(__pyx_k_aiohttp__websocket_pyx), 0, 0, 1, 0}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_data, __pyx_k_data, sizeof(__pyx_k_data), 0, 0, 1, 1}, + {&__pyx_n_s_data_len, __pyx_k_data_len, sizeof(__pyx_k_data_len), 0, 0, 1, 1}, + {&__pyx_n_s_i, __pyx_k_i, sizeof(__pyx_k_i), 0, 0, 1, 1}, + {&__pyx_n_s_in_buf, __pyx_k_in_buf, sizeof(__pyx_k_in_buf), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_mask, __pyx_k_mask, sizeof(__pyx_k_mask), 0, 0, 1, 1}, + {&__pyx_n_s_mask_buf, __pyx_k_mask_buf, sizeof(__pyx_k_mask_buf), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_uint32_msk, __pyx_k_uint32_msk, sizeof(__pyx_k_uint32_msk), 0, 0, 1, 1}, + {&__pyx_n_s_uint64_msk, __pyx_k_uint64_msk, sizeof(__pyx_k_uint64_msk), 0, 0, 1, 1}, + {&__pyx_n_s_websocket_mask_cython, __pyx_k_websocket_mask_cython, sizeof(__pyx_k_websocket_mask_cython), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 53, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "aiohttp/_websocket.pyx":9 + * from libc.stdint cimport uint32_t, uint64_t, uintmax_t + * + * def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<< + * """Note, this function mutates its `data` argument + * """ + */ + __pyx_tuple_ = PyTuple_Pack(8, __pyx_n_s_mask, __pyx_n_s_data, __pyx_n_s_data_len, __pyx_n_s_i, __pyx_n_s_in_buf, __pyx_n_s_mask_buf, __pyx_n_s_uint32_msk, __pyx_n_s_uint64_msk); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple_); + __Pyx_GIVEREF(__pyx_tuple_); + __pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(2, 0, 8, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple_, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__websocket_pyx, __pyx_n_s_websocket_mask_cython, 9, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", + #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_7cpython_4bool_bool) __PYX_ERR(2, 8, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_7cpython_7complex_complex) __PYX_ERR(3, 15, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + +#if PY_MAJOR_VERSION < 3 +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC void +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#else +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC init_websocket(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC init_websocket(void) +#else +__Pyx_PyMODINIT_FUNC PyInit__websocket(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit__websocket(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { + result = PyDict_SetItemString(moddict, to_name, value); + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec__websocket(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module '_websocket' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__websocket(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("_websocket", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_aiohttp___websocket) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "aiohttp._websocket")) { + if (unlikely(PyDict_SetItemString(modules, "aiohttp._websocket", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + (void)__Pyx_modinit_type_init_code(); + if (unlikely(__Pyx_modinit_type_import_code() != 0)) goto __pyx_L1_error; + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "aiohttp/_websocket.pyx":9 + * from libc.stdint cimport uint32_t, uint64_t, uintmax_t + * + * def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<< + * """Note, this function mutates its `data` argument + * """ + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_10_websocket_1_websocket_mask_cython, NULL, __pyx_n_s_aiohttp__websocket); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_websocket_mask_cython, __pyx_t_1) < 0) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_websocket.pyx":1 + * from cpython cimport PyBytes_AsString # <<<<<<<<<<<<<< + * + * #from cpython cimport PyByteArray_AsString # cython still not exports that + */ + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init aiohttp._websocket", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + Py_CLEAR(__pyx_m); + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init aiohttp._websocket"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* PyCFunctionFastCall */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + int flags = PyCFunction_GET_FLAGS(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { + return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); + } else { + return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); + } +} +#endif + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif +#endif + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallOneArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif + if (likely(PyCFunction_Check(func))) { + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* TypeImport */ +#ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name, + size_t size, enum __Pyx_ImportType_CheckSize check_size) +{ + PyObject *result = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + result = PyObject_GetAttrString(module, class_name); + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if ((size_t)basicsize < size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(result); + return NULL; +} +#endif + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { + const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; ip) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/venv/lib/python3.7/site-packages/aiohttp/_websocket.cpython-37m-x86_64-linux-gnu.so b/venv/lib/python3.7/site-packages/aiohttp/_websocket.cpython-37m-x86_64-linux-gnu.so new file mode 100755 index 0000000..dd77cdb Binary files /dev/null and b/venv/lib/python3.7/site-packages/aiohttp/_websocket.cpython-37m-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.7/site-packages/aiohttp/_websocket.pyx b/venv/lib/python3.7/site-packages/aiohttp/_websocket.pyx new file mode 100644 index 0000000..e4d992a --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/_websocket.pyx @@ -0,0 +1,54 @@ +from cpython cimport PyBytes_AsString + +#from cpython cimport PyByteArray_AsString # cython still not exports that +cdef extern from "Python.h": + char* PyByteArray_AsString(bytearray ba) except NULL + +from libc.stdint cimport uint32_t, uint64_t, uintmax_t + +def _websocket_mask_cython(object mask, object data): + """Note, this function mutates its `data` argument + """ + cdef: + Py_ssize_t data_len, i + # bit operations on signed integers are implementation-specific + unsigned char * in_buf + const unsigned char * mask_buf + uint32_t uint32_msk + uint64_t uint64_msk + + assert len(mask) == 4 + + if not isinstance(mask, bytes): + mask = bytes(mask) + + if isinstance(data, bytearray): + data = data + else: + data = bytearray(data) + + data_len = len(data) + in_buf = PyByteArray_AsString(data) + mask_buf = PyBytes_AsString(mask) + uint32_msk = (mask_buf)[0] + + # TODO: align in_data ptr to achieve even faster speeds + # does it need in python ?! malloc() always aligns to sizeof(long) bytes + + if sizeof(size_t) >= 8: + uint64_msk = uint32_msk + uint64_msk = (uint64_msk << 32) | uint32_msk + + while data_len >= 8: + (in_buf)[0] ^= uint64_msk + in_buf += 8 + data_len -= 8 + + + while data_len >= 4: + (in_buf)[0] ^= uint32_msk + in_buf += 4 + data_len -= 4 + + for i in range(0, data_len): + in_buf[i] ^= mask_buf[i] diff --git a/venv/lib/python3.7/site-packages/aiohttp/abc.py b/venv/lib/python3.7/site-packages/aiohttp/abc.py new file mode 100644 index 0000000..58817c0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/abc.py @@ -0,0 +1,208 @@ +import asyncio +import logging +from abc import ABC, abstractmethod +from collections.abc import Sized +from http.cookies import BaseCookie, Morsel # noqa +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Dict, + Generator, + Iterable, + List, + Optional, + Tuple, +) + +from multidict import CIMultiDict # noqa +from yarl import URL + +from .helpers import get_running_loop +from .typedefs import LooseCookies + +if TYPE_CHECKING: # pragma: no cover + from .web_request import BaseRequest, Request + from .web_response import StreamResponse + from .web_app import Application + from .web_exceptions import HTTPException +else: + BaseRequest = Request = Application = StreamResponse = None + HTTPException = None + + +class AbstractRouter(ABC): + + def __init__(self) -> None: + self._frozen = False + + def post_init(self, app: Application) -> None: + """Post init stage. + + Not an abstract method for sake of backward compatibility, + but if the router wants to be aware of the application + it can override this. + """ + + @property + def frozen(self) -> bool: + return self._frozen + + def freeze(self) -> None: + """Freeze router.""" + self._frozen = True + + @abstractmethod + async def resolve(self, request: Request) -> 'AbstractMatchInfo': + """Return MATCH_INFO for given request""" + + +class AbstractMatchInfo(ABC): + + @property # pragma: no branch + @abstractmethod + def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]: + """Execute matched request handler""" + + @property + @abstractmethod + def expect_handler(self) -> Callable[[Request], Awaitable[None]]: + """Expect handler for 100-continue processing""" + + @property # pragma: no branch + @abstractmethod + def http_exception(self) -> Optional[HTTPException]: + """HTTPException instance raised on router's resolving, or None""" + + @abstractmethod # pragma: no branch + def get_info(self) -> Dict[str, Any]: + """Return a dict with additional info useful for introspection""" + + @property # pragma: no branch + @abstractmethod + def apps(self) -> Tuple[Application, ...]: + """Stack of nested applications. + + Top level application is left-most element. + + """ + + @abstractmethod + def add_app(self, app: Application) -> None: + """Add application to the nested apps stack.""" + + @abstractmethod + def freeze(self) -> None: + """Freeze the match info. + + The method is called after route resolution. + + After the call .add_app() is forbidden. + + """ + + +class AbstractView(ABC): + """Abstract class based view.""" + + def __init__(self, request: Request) -> None: + self._request = request + + @property + def request(self) -> Request: + """Request instance.""" + return self._request + + @abstractmethod + def __await__(self) -> Generator[Any, None, StreamResponse]: + """Execute the view handler.""" + + +class AbstractResolver(ABC): + """Abstract DNS resolver.""" + + @abstractmethod + async def resolve(self, host: str, + port: int, family: int) -> List[Dict[str, Any]]: + """Return IP address for given hostname""" + + @abstractmethod + async def close(self) -> None: + """Release resolver""" + + +if TYPE_CHECKING: # pragma: no cover + IterableBase = Iterable[Morsel[str]] +else: + IterableBase = Iterable + + +class AbstractCookieJar(Sized, IterableBase): + """Abstract Cookie Jar.""" + + def __init__(self, *, + loop: Optional[asyncio.AbstractEventLoop]=None) -> None: + self._loop = get_running_loop(loop) + + @abstractmethod + def clear(self) -> None: + """Clear all cookies.""" + + @abstractmethod + def update_cookies(self, + cookies: LooseCookies, + response_url: URL=URL()) -> None: + """Update cookies.""" + + @abstractmethod + def filter_cookies(self, request_url: URL) -> 'BaseCookie[str]': + """Return the jar's cookies filtered by their attributes.""" + + +class AbstractStreamWriter(ABC): + """Abstract stream writer.""" + + buffer_size = 0 + output_size = 0 + length = 0 # type: Optional[int] + + @abstractmethod + async def write(self, chunk: bytes) -> None: + """Write chunk into stream.""" + + @abstractmethod + async def write_eof(self, chunk: bytes=b'') -> None: + """Write last chunk.""" + + @abstractmethod + async def drain(self) -> None: + """Flush the write buffer.""" + + @abstractmethod + def enable_compression(self, encoding: str='deflate') -> None: + """Enable HTTP body compression""" + + @abstractmethod + def enable_chunking(self) -> None: + """Enable HTTP chunked mode""" + + @abstractmethod + async def write_headers(self, status_line: str, + headers: 'CIMultiDict[str]') -> None: + """Write HTTP headers""" + + +class AbstractAccessLogger(ABC): + """Abstract writer to access log.""" + + def __init__(self, logger: logging.Logger, log_format: str) -> None: + self.logger = logger + self.log_format = log_format + + @abstractmethod + def log(self, + request: BaseRequest, + response: StreamResponse, + time: float) -> None: + """Emit log to logger.""" diff --git a/venv/lib/python3.7/site-packages/aiohttp/base_protocol.py b/venv/lib/python3.7/site-packages/aiohttp/base_protocol.py new file mode 100644 index 0000000..cf4f9da --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/base_protocol.py @@ -0,0 +1,81 @@ +import asyncio +from typing import Optional, cast + +from .tcp_helpers import tcp_nodelay + + +class BaseProtocol(asyncio.Protocol): + __slots__ = ('_loop', '_paused', '_drain_waiter', + '_connection_lost', 'transport') + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop # type: asyncio.AbstractEventLoop + self._paused = False + self._drain_waiter = None # type: Optional[asyncio.Future[None]] + self._connection_lost = False + self._reading_paused = False + + self.transport = None # type: Optional[asyncio.Transport] + + def pause_writing(self) -> None: + assert not self._paused + self._paused = True + + def resume_writing(self) -> None: + assert self._paused + self._paused = False + + waiter = self._drain_waiter + if waiter is not None: + self._drain_waiter = None + if not waiter.done(): + waiter.set_result(None) + + def pause_reading(self) -> None: + if not self._reading_paused and self.transport is not None: + try: + self.transport.pause_reading() + except (AttributeError, NotImplementedError, RuntimeError): + pass + self._reading_paused = True + + def resume_reading(self) -> None: + if self._reading_paused and self.transport is not None: + try: + self.transport.resume_reading() + except (AttributeError, NotImplementedError, RuntimeError): + pass + self._reading_paused = False + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + tr = cast(asyncio.Transport, transport) + tcp_nodelay(tr, True) + self.transport = tr + + def connection_lost(self, exc: Optional[BaseException]) -> None: + self._connection_lost = True + # Wake up the writer if currently paused. + self.transport = None + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + return + self._drain_waiter = None + if waiter.done(): + return + if exc is None: + waiter.set_result(None) + else: + waiter.set_exception(exc) + + async def _drain_helper(self) -> None: + if self._connection_lost: + raise ConnectionResetError('Connection lost') + if not self._paused: + return + waiter = self._drain_waiter + assert waiter is None or waiter.cancelled() + waiter = self._loop.create_future() + self._drain_waiter = waiter + await waiter diff --git a/venv/lib/python3.7/site-packages/aiohttp/client.py b/venv/lib/python3.7/site-packages/aiohttp/client.py new file mode 100644 index 0000000..c79823e --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/client.py @@ -0,0 +1,1139 @@ +"""HTTP Client for asyncio.""" + +import asyncio +import base64 +import hashlib +import json +import os +import sys +import traceback +import warnings +from types import SimpleNamespace, TracebackType +from typing import ( # noqa + Any, + Coroutine, + Generator, + Generic, + Iterable, + List, + Mapping, + Optional, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +import attr +from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr +from yarl import URL + +from . import hdrs, http, payload +from .abc import AbstractCookieJar +from .client_exceptions import ( + ClientConnectionError, + ClientConnectorCertificateError, + ClientConnectorError, + ClientConnectorSSLError, + ClientError, + ClientHttpProxyError, + ClientOSError, + ClientPayloadError, + ClientProxyConnectionError, + ClientResponseError, + ClientSSLError, + ContentTypeError, + InvalidURL, + ServerConnectionError, + ServerDisconnectedError, + ServerFingerprintMismatch, + ServerTimeoutError, + TooManyRedirects, + WSServerHandshakeError, +) +from .client_reqrep import ( + ClientRequest, + ClientResponse, + Fingerprint, + RequestInfo, + _merge_ssl_params, +) +from .client_ws import ClientWebSocketResponse +from .connector import BaseConnector, TCPConnector, UnixConnector +from .cookiejar import CookieJar +from .helpers import ( + DEBUG, + PY_36, + BasicAuth, + CeilTimeout, + TimeoutHandle, + get_running_loop, + proxies_from_env, + sentinel, + strip_auth_from_url, +) +from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter +from .http_websocket import ( # noqa + WSHandshakeError, + WSMessage, + ws_ext_gen, + ws_ext_parse, +) +from .streams import FlowControlDataQueue +from .tracing import Trace, TraceConfig +from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL + +__all__ = ( + # client_exceptions + 'ClientConnectionError', + 'ClientConnectorCertificateError', + 'ClientConnectorError', + 'ClientConnectorSSLError', + 'ClientError', + 'ClientHttpProxyError', + 'ClientOSError', + 'ClientPayloadError', + 'ClientProxyConnectionError', + 'ClientResponseError', + 'ClientSSLError', + 'ContentTypeError', + 'InvalidURL', + 'ServerConnectionError', + 'ServerDisconnectedError', + 'ServerFingerprintMismatch', + 'ServerTimeoutError', + 'TooManyRedirects', + 'WSServerHandshakeError', + # client_reqrep + 'ClientRequest', + 'ClientResponse', + 'Fingerprint', + 'RequestInfo', + # connector + 'BaseConnector', + 'TCPConnector', + 'UnixConnector', + # client_ws + 'ClientWebSocketResponse', + # client + 'ClientSession', + 'ClientTimeout', + 'request') + + +try: + from ssl import SSLContext +except ImportError: # pragma: no cover + SSLContext = object # type: ignore + + +@attr.s(frozen=True, slots=True) +class ClientTimeout: + total = attr.ib(type=Optional[float], default=None) + connect = attr.ib(type=Optional[float], default=None) + sock_read = attr.ib(type=Optional[float], default=None) + sock_connect = attr.ib(type=Optional[float], default=None) + + # pool_queue_timeout = attr.ib(type=float, default=None) + # dns_resolution_timeout = attr.ib(type=float, default=None) + # socket_connect_timeout = attr.ib(type=float, default=None) + # connection_acquiring_timeout = attr.ib(type=float, default=None) + # new_connection_timeout = attr.ib(type=float, default=None) + # http_header_timeout = attr.ib(type=float, default=None) + # response_body_timeout = attr.ib(type=float, default=None) + + # to create a timeout specific for a single request, either + # - create a completely new one to overwrite the default + # - or use http://www.attrs.org/en/stable/api.html#attr.evolve + # to overwrite the defaults + + +# 5 Minute default read timeout +DEFAULT_TIMEOUT = ClientTimeout(total=5*60) + +_RetType = TypeVar('_RetType') + + +class ClientSession: + """First-class interface for making HTTP requests.""" + + ATTRS = frozenset([ + '_source_traceback', '_connector', + 'requote_redirect_url', '_loop', '_cookie_jar', + '_connector_owner', '_default_auth', + '_version', '_json_serialize', + '_requote_redirect_url', + '_timeout', '_raise_for_status', '_auto_decompress', + '_trust_env', '_default_headers', '_skip_auto_headers', + '_request_class', '_response_class', + '_ws_response_class', '_trace_configs']) + + _source_traceback = None + _connector = None + + def __init__(self, *, connector: Optional[BaseConnector]=None, + loop: Optional[asyncio.AbstractEventLoop]=None, + cookies: Optional[LooseCookies]=None, + headers: Optional[LooseHeaders]=None, + skip_auto_headers: Optional[Iterable[str]]=None, + auth: Optional[BasicAuth]=None, + json_serialize: JSONEncoder=json.dumps, + request_class: Type[ClientRequest]=ClientRequest, + response_class: Type[ClientResponse]=ClientResponse, + ws_response_class: Type[ClientWebSocketResponse]=ClientWebSocketResponse, # noqa + version: HttpVersion=http.HttpVersion11, + cookie_jar: Optional[AbstractCookieJar]=None, + connector_owner: bool=True, + raise_for_status: bool=False, + read_timeout: Union[float, object]=sentinel, + conn_timeout: Optional[float]=None, + timeout: Union[object, ClientTimeout]=sentinel, + auto_decompress: bool=True, + trust_env: bool=False, + requote_redirect_url: bool=True, + trace_configs: Optional[List[TraceConfig]]=None) -> None: + + if loop is None: + if connector is not None: + loop = connector._loop + + loop = get_running_loop(loop) + + if connector is None: + connector = TCPConnector(loop=loop) + + if connector._loop is not loop: + raise RuntimeError( + "Session and connector has to use same event loop") + + self._loop = loop + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + if cookie_jar is None: + cookie_jar = CookieJar(loop=loop) + self._cookie_jar = cookie_jar + + if cookies is not None: + self._cookie_jar.update_cookies(cookies) + + self._connector = connector # type: BaseConnector + self._connector_owner = connector_owner + self._default_auth = auth + self._version = version + self._json_serialize = json_serialize + if timeout is sentinel: + self._timeout = DEFAULT_TIMEOUT + if read_timeout is not sentinel: + warnings.warn("read_timeout is deprecated, " + "use timeout argument instead", + DeprecationWarning, + stacklevel=2) + self._timeout = attr.evolve(self._timeout, total=read_timeout) + if conn_timeout is not None: + self._timeout = attr.evolve(self._timeout, + connect=conn_timeout) + warnings.warn("conn_timeout is deprecated, " + "use timeout argument instead", + DeprecationWarning, + stacklevel=2) + else: + self._timeout = timeout # type: ignore + if read_timeout is not sentinel: + raise ValueError("read_timeout and timeout parameters " + "conflict, please setup " + "timeout.read") + if conn_timeout is not None: + raise ValueError("conn_timeout and timeout parameters " + "conflict, please setup " + "timeout.connect") + self._raise_for_status = raise_for_status + self._auto_decompress = auto_decompress + self._trust_env = trust_env + self._requote_redirect_url = requote_redirect_url + + # Convert to list of tuples + if headers: + headers = CIMultiDict(headers) + else: + headers = CIMultiDict() + self._default_headers = headers + if skip_auto_headers is not None: + self._skip_auto_headers = frozenset([istr(i) + for i in skip_auto_headers]) + else: + self._skip_auto_headers = frozenset() + + self._request_class = request_class + self._response_class = response_class + self._ws_response_class = ws_response_class + + self._trace_configs = trace_configs or [] + for trace_config in self._trace_configs: + trace_config.freeze() + + def __init_subclass__(cls: Type['ClientSession']) -> None: + warnings.warn("Inheritance class {} from ClientSession " + "is discouraged".format(cls.__name__), + DeprecationWarning, + stacklevel=2) + + if DEBUG: + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn("Setting custom ClientSession.{} attribute " + "is discouraged".format(name), + DeprecationWarning, + stacklevel=2) + super().__setattr__(name, val) + + def __del__(self, _warnings: Any=warnings) -> None: + if not self.closed: + if PY_36: + kwargs = {'source': self} + else: + kwargs = {} + _warnings.warn("Unclosed client session {!r}".format(self), + ResourceWarning, + **kwargs) + context = {'client_session': self, + 'message': 'Unclosed client session'} + if self._source_traceback is not None: + context['source_traceback'] = self._source_traceback + self._loop.call_exception_handler(context) + + def request(self, + method: str, + url: StrOrURL, + **kwargs: Any) -> '_RequestContextManager': + """Perform HTTP request.""" + return _RequestContextManager(self._request(method, url, **kwargs)) + + async def _request( + self, + method: str, + str_or_url: StrOrURL, *, + params: Optional[Mapping[str, str]]=None, + data: Any=None, + json: Any=None, + cookies: Optional[LooseCookies]=None, + headers: LooseHeaders=None, + skip_auto_headers: Optional[Iterable[str]]=None, + auth: Optional[BasicAuth]=None, + allow_redirects: bool=True, + max_redirects: int=10, + compress: Optional[str]=None, + chunked: Optional[bool]=None, + expect100: bool=False, + raise_for_status: Optional[bool]=None, + read_until_eof: bool=True, + proxy: Optional[StrOrURL]=None, + proxy_auth: Optional[BasicAuth]=None, + timeout: Union[ClientTimeout, object]=sentinel, + verify_ssl: Optional[bool]=None, + fingerprint: Optional[bytes]=None, + ssl_context: Optional[SSLContext]=None, + ssl: Optional[Union[SSLContext, bool, Fingerprint]]=None, + proxy_headers: Optional[LooseHeaders]=None, + trace_request_ctx: Optional[SimpleNamespace]=None + ) -> ClientResponse: + + # NOTE: timeout clamps existing connect and read timeouts. We cannot + # set the default to None because we need to detect if the user wants + # to use the existing timeouts by setting timeout to None. + + if self.closed: + raise RuntimeError('Session is closed') + + ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + + if data is not None and json is not None: + raise ValueError( + 'data and json parameters can not be used at the same time') + elif json is not None: + data = payload.JsonPayload(json, dumps=self._json_serialize) + + if not isinstance(chunked, bool) and chunked is not None: + warnings.warn( + 'Chunk size is deprecated #1615', DeprecationWarning) + + redirects = 0 + history = [] + version = self._version + + # Merge with default headers and transform to CIMultiDict + headers = self._prepare_headers(headers) + proxy_headers = self._prepare_headers(proxy_headers) + + try: + url = URL(str_or_url) + except ValueError: + raise InvalidURL(str_or_url) + + skip_headers = set(self._skip_auto_headers) + if skip_auto_headers is not None: + for i in skip_auto_headers: + skip_headers.add(istr(i)) + + if proxy is not None: + try: + proxy = URL(proxy) + except ValueError: + raise InvalidURL(proxy) + + if timeout is sentinel: + real_timeout = self._timeout # type: ClientTimeout + else: + if not isinstance(timeout, ClientTimeout): + real_timeout = ClientTimeout(total=timeout) # type: ignore + else: + real_timeout = timeout + # timeout is cumulative for all request operations + # (request, redirects, responses, data consuming) + tm = TimeoutHandle(self._loop, real_timeout.total) + handle = tm.start() + + traces = [ + Trace( + self, + trace_config, + trace_config.trace_config_ctx( + trace_request_ctx=trace_request_ctx) + ) + for trace_config in self._trace_configs + ] + + for trace in traces: + await trace.send_request_start( + method, + url, + headers + ) + + timer = tm.timer() + try: + with timer: + while True: + url, auth_from_url = strip_auth_from_url(url) + if auth and auth_from_url: + raise ValueError("Cannot combine AUTH argument with " + "credentials encoded in URL") + + if auth is None: + auth = auth_from_url + if auth is None: + auth = self._default_auth + # It would be confusing if we support explicit + # Authorization header with auth argument + if (headers is not None and + auth is not None and + hdrs.AUTHORIZATION in headers): + raise ValueError("Cannot combine AUTHORIZATION header " + "with AUTH argument or credentials " + "encoded in URL") + + session_cookies = self._cookie_jar.filter_cookies(url) + + if cookies is not None: + tmp_cookie_jar = CookieJar() + tmp_cookie_jar.update_cookies(cookies) + req_cookies = tmp_cookie_jar.filter_cookies(url) + if req_cookies: + session_cookies.load(req_cookies) + + cookies = session_cookies + + if proxy is not None: + proxy = URL(proxy) + elif self._trust_env: + for scheme, proxy_info in proxies_from_env().items(): + if scheme == url.scheme: + proxy = proxy_info.proxy + proxy_auth = proxy_info.proxy_auth + break + + req = self._request_class( + method, url, params=params, headers=headers, + skip_auto_headers=skip_headers, data=data, + cookies=cookies, auth=auth, version=version, + compress=compress, chunked=chunked, + expect100=expect100, loop=self._loop, + response_class=self._response_class, + proxy=proxy, proxy_auth=proxy_auth, timer=timer, + session=self, + ssl=ssl, proxy_headers=proxy_headers, traces=traces) + + # connection timeout + try: + with CeilTimeout(real_timeout.connect, + loop=self._loop): + assert self._connector is not None + conn = await self._connector.connect( + req, + traces=traces, + timeout=real_timeout + ) + except asyncio.TimeoutError as exc: + raise ServerTimeoutError( + 'Connection timeout ' + 'to host {0}'.format(url)) from exc + + assert conn.transport is not None + + assert conn.protocol is not None + conn.protocol.set_response_params( + timer=timer, + skip_payload=method.upper() == 'HEAD', + read_until_eof=read_until_eof, + auto_decompress=self._auto_decompress, + read_timeout=real_timeout.sock_read) + + try: + try: + resp = await req.send(conn) + try: + await resp.start(conn) + except BaseException: + resp.close() + raise + except BaseException: + conn.close() + raise + except ClientError: + raise + except OSError as exc: + raise ClientOSError(*exc.args) from exc + + self._cookie_jar.update_cookies(resp.cookies, resp.url) + + # redirects + if resp.status in ( + 301, 302, 303, 307, 308) and allow_redirects: + + for trace in traces: + await trace.send_request_redirect( + method, + url, + headers, + resp + ) + + redirects += 1 + history.append(resp) + if max_redirects and redirects >= max_redirects: + resp.close() + raise TooManyRedirects( + history[0].request_info, tuple(history)) + + # For 301 and 302, mimic IE, now changed in RFC + # https://github.com/kennethreitz/requests/pull/269 + if (resp.status == 303 and + resp.method != hdrs.METH_HEAD) \ + or (resp.status in (301, 302) and + resp.method == hdrs.METH_POST): + method = hdrs.METH_GET + data = None + if headers.get(hdrs.CONTENT_LENGTH): + headers.pop(hdrs.CONTENT_LENGTH) + + r_url = (resp.headers.get(hdrs.LOCATION) or + resp.headers.get(hdrs.URI)) + if r_url is None: + # see github.com/aio-libs/aiohttp/issues/2022 + break + else: + # reading from correct redirection + # response is forbidden + resp.release() + + try: + r_url = URL( + r_url, encoded=not self._requote_redirect_url) + + except ValueError: + raise InvalidURL(r_url) + + scheme = r_url.scheme + if scheme not in ('http', 'https', ''): + resp.close() + raise ValueError( + 'Can redirect only to http or https') + elif not scheme: + r_url = url.join(r_url) + + if url.origin() != r_url.origin(): + auth = None + headers.pop(hdrs.AUTHORIZATION, None) + + url = r_url + params = None + resp.release() + continue + + break + + # check response status + if raise_for_status is None: + raise_for_status = self._raise_for_status + if raise_for_status: + resp.raise_for_status() + + # register connection + if handle is not None: + if resp.connection is not None: + resp.connection.add_callback(handle.cancel) + else: + handle.cancel() + + resp._history = tuple(history) + + for trace in traces: + await trace.send_request_end( + method, + url, + headers, + resp + ) + return resp + + except BaseException as e: + # cleanup timer + tm.close() + if handle: + handle.cancel() + handle = None + + for trace in traces: + await trace.send_request_exception( + method, + url, + headers, + e + ) + raise + + def ws_connect( + self, + url: StrOrURL, *, + method: str=hdrs.METH_GET, + protocols: Iterable[str]=(), + timeout: float=10.0, + receive_timeout: Optional[float]=None, + autoclose: bool=True, + autoping: bool=True, + heartbeat: Optional[float]=None, + auth: Optional[BasicAuth]=None, + origin: Optional[str]=None, + headers: Optional[LooseHeaders]=None, + proxy: Optional[StrOrURL]=None, + proxy_auth: Optional[BasicAuth]=None, + ssl: Union[SSLContext, bool, None, Fingerprint]=None, + verify_ssl: Optional[bool]=None, + fingerprint: Optional[bytes]=None, + ssl_context: Optional[SSLContext]=None, + proxy_headers: Optional[LooseHeaders]=None, + compress: int=0, + max_msg_size: int=4*1024*1024) -> '_WSRequestContextManager': + """Initiate websocket connection.""" + return _WSRequestContextManager( + self._ws_connect(url, + method=method, + protocols=protocols, + timeout=timeout, + receive_timeout=receive_timeout, + autoclose=autoclose, + autoping=autoping, + heartbeat=heartbeat, + auth=auth, + origin=origin, + headers=headers, + proxy=proxy, + proxy_auth=proxy_auth, + ssl=ssl, + verify_ssl=verify_ssl, + fingerprint=fingerprint, + ssl_context=ssl_context, + proxy_headers=proxy_headers, + compress=compress, + max_msg_size=max_msg_size)) + + async def _ws_connect( + self, + url: StrOrURL, *, + method: str=hdrs.METH_GET, + protocols: Iterable[str]=(), + timeout: float=10.0, + receive_timeout: Optional[float]=None, + autoclose: bool=True, + autoping: bool=True, + heartbeat: Optional[float]=None, + auth: Optional[BasicAuth]=None, + origin: Optional[str]=None, + headers: Optional[LooseHeaders]=None, + proxy: Optional[StrOrURL]=None, + proxy_auth: Optional[BasicAuth]=None, + ssl: Union[SSLContext, bool, None, Fingerprint]=None, + verify_ssl: Optional[bool]=None, + fingerprint: Optional[bytes]=None, + ssl_context: Optional[SSLContext]=None, + proxy_headers: Optional[LooseHeaders]=None, + compress: int=0, + max_msg_size: int=4*1024*1024 + ) -> ClientWebSocketResponse: + + if headers is None: + real_headers = CIMultiDict() # type: CIMultiDict[str] + else: + real_headers = CIMultiDict(headers) + + default_headers = { + hdrs.UPGRADE: hdrs.WEBSOCKET, + hdrs.CONNECTION: hdrs.UPGRADE, + hdrs.SEC_WEBSOCKET_VERSION: '13', + } + + for key, value in default_headers.items(): + real_headers.setdefault(key, value) + + sec_key = base64.b64encode(os.urandom(16)) + real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode() + + if protocols: + real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ','.join(protocols) + if origin is not None: + real_headers[hdrs.ORIGIN] = origin + if compress: + extstr = ws_ext_gen(compress=compress) + real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr + + ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + + # send request + resp = await self.request(method, url, + headers=real_headers, + read_until_eof=False, + auth=auth, + proxy=proxy, + proxy_auth=proxy_auth, + ssl=ssl, + proxy_headers=proxy_headers) + + try: + # check handshake + if resp.status != 101: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message='Invalid response status', + status=resp.status, + headers=resp.headers) + + if resp.headers.get(hdrs.UPGRADE, '').lower() != 'websocket': + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message='Invalid upgrade header', + status=resp.status, + headers=resp.headers) + + if resp.headers.get(hdrs.CONNECTION, '').lower() != 'upgrade': + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message='Invalid connection header', + status=resp.status, + headers=resp.headers) + + # key calculation + key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, '') + match = base64.b64encode( + hashlib.sha1(sec_key + WS_KEY).digest()).decode() + if key != match: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message='Invalid challenge response', + status=resp.status, + headers=resp.headers) + + # websocket protocol + protocol = None + if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers: + resp_protocols = [ + proto.strip() for proto in + resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(',')] + + for proto in resp_protocols: + if proto in protocols: + protocol = proto + break + + # websocket compress + notakeover = False + if compress: + compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) + if compress_hdrs: + try: + compress, notakeover = ws_ext_parse(compress_hdrs) + except WSHandshakeError as exc: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message=exc.args[0], + status=resp.status, + headers=resp.headers) + else: + compress = 0 + notakeover = False + + conn = resp.connection + assert conn is not None + proto = conn.protocol + assert proto is not None + transport = conn.transport + assert transport is not None + reader = FlowControlDataQueue( + proto, limit=2 ** 16, loop=self._loop) # type: FlowControlDataQueue[WSMessage] # noqa + proto.set_parser(WebSocketReader(reader, max_msg_size), reader) + writer = WebSocketWriter( + proto, transport, use_mask=True, + compress=compress, notakeover=notakeover) + except BaseException: + resp.close() + raise + else: + return self._ws_response_class(reader, + writer, + protocol, + resp, + timeout, + autoclose, + autoping, + self._loop, + receive_timeout=receive_timeout, + heartbeat=heartbeat, + compress=compress, + client_notakeover=notakeover) + + def _prepare_headers( + self, + headers: Optional[LooseHeaders]) -> 'CIMultiDict[str]': + """ Add default headers and transform it to CIMultiDict + """ + # Convert headers to MultiDict + result = CIMultiDict(self._default_headers) + if headers: + if not isinstance(headers, (MultiDictProxy, MultiDict)): + headers = CIMultiDict(headers) + added_names = set() # type: Set[str] + for key, value in headers.items(): + if key in added_names: + result.add(key, value) + else: + result[key] = value + added_names.add(key) + return result + + def get(self, url: StrOrURL, *, allow_redirects: bool=True, + **kwargs: Any) -> '_RequestContextManager': + """Perform HTTP GET request.""" + return _RequestContextManager( + self._request(hdrs.METH_GET, url, + allow_redirects=allow_redirects, + **kwargs)) + + def options(self, url: StrOrURL, *, allow_redirects: bool=True, + **kwargs: Any) -> '_RequestContextManager': + """Perform HTTP OPTIONS request.""" + return _RequestContextManager( + self._request(hdrs.METH_OPTIONS, url, + allow_redirects=allow_redirects, + **kwargs)) + + def head(self, url: StrOrURL, *, allow_redirects: bool=False, + **kwargs: Any) -> '_RequestContextManager': + """Perform HTTP HEAD request.""" + return _RequestContextManager( + self._request(hdrs.METH_HEAD, url, + allow_redirects=allow_redirects, + **kwargs)) + + def post(self, url: StrOrURL, + *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': + """Perform HTTP POST request.""" + return _RequestContextManager( + self._request(hdrs.METH_POST, url, + data=data, + **kwargs)) + + def put(self, url: StrOrURL, + *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': + """Perform HTTP PUT request.""" + return _RequestContextManager( + self._request(hdrs.METH_PUT, url, + data=data, + **kwargs)) + + def patch(self, url: StrOrURL, + *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': + """Perform HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_PATCH, url, + data=data, + **kwargs)) + + def delete(self, url: StrOrURL, **kwargs: Any) -> '_RequestContextManager': + """Perform HTTP DELETE request.""" + return _RequestContextManager( + self._request(hdrs.METH_DELETE, url, + **kwargs)) + + async def close(self) -> None: + """Close underlying connector. + + Release all acquired resources. + """ + if not self.closed: + if self._connector is not None and self._connector_owner: + await self._connector.close() + self._connector = None + + @property + def closed(self) -> bool: + """Is client session closed. + + A readonly property. + """ + return self._connector is None or self._connector.closed + + @property + def connector(self) -> Optional[BaseConnector]: + """Connector instance used for the session.""" + return self._connector + + @property + def cookie_jar(self) -> AbstractCookieJar: + """The session cookies.""" + return self._cookie_jar + + @property + def version(self) -> Tuple[int, int]: + """The session HTTP protocol version.""" + return self._version + + @property + def requote_redirect_url(self) -> bool: + """Do URL requoting on redirection handling.""" + return self._requote_redirect_url + + @requote_redirect_url.setter + def requote_redirect_url(self, val: bool) -> None: + """Do URL requoting on redirection handling.""" + warnings.warn("session.requote_redirect_url modification " + "is deprecated #2778", + DeprecationWarning, + stacklevel=2) + self._requote_redirect_url = val + + @property + def loop(self) -> asyncio.AbstractEventLoop: + """Session's loop.""" + warnings.warn("client.loop property is deprecated", + DeprecationWarning, + stacklevel=2) + return self._loop + + def detach(self) -> None: + """Detach connector from session without closing the former. + + Session is switched to closed state anyway. + """ + self._connector = None + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__(self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> 'ClientSession': + return self + + async def __aexit__(self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + await self.close() + + +class _BaseRequestContextManager(Coroutine[Any, + Any, + _RetType], + Generic[_RetType]): + + __slots__ = ('_coro', '_resp') + + def __init__( + self, + coro: Coroutine['asyncio.Future[Any]', None, _RetType] + ) -> None: + self._coro = coro + + def send(self, arg: None) -> 'asyncio.Future[Any]': + return self._coro.send(arg) + + def throw(self, arg: BaseException) -> None: # type: ignore + self._coro.throw(arg) # type: ignore + + def close(self) -> None: + return self._coro.close() + + def __await__(self) -> Generator[Any, None, _RetType]: + ret = self._coro.__await__() + return ret + + def __iter__(self) -> Generator[Any, None, _RetType]: + return self.__await__() + + async def __aenter__(self) -> _RetType: + self._resp = await self._coro + return self._resp + + +class _RequestContextManager(_BaseRequestContextManager[ClientResponse]): + async def __aexit__(self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType]) -> None: + # We're basing behavior on the exception as it can be caused by + # user code unrelated to the status of the connection. If you + # would like to close a connection you must do that + # explicitly. Otherwise connection error handling should kick in + # and close/recycle the connection as required. + self._resp.release() + + +class _WSRequestContextManager(_BaseRequestContextManager[ + ClientWebSocketResponse]): + async def __aexit__(self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType]) -> None: + await self._resp.close() + + +class _SessionRequestContextManager: + + __slots__ = ('_coro', '_resp', '_session') + + def __init__(self, + coro: Coroutine['asyncio.Future[Any]', None, ClientResponse], + session: ClientSession) -> None: + self._coro = coro + self._resp = None # type: Optional[ClientResponse] + self._session = session + + async def __aenter__(self) -> ClientResponse: + self._resp = await self._coro + return self._resp + + async def __aexit__(self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType]) -> None: + assert self._resp is not None + self._resp.close() + await self._session.close() + + +def request( + method: str, + url: StrOrURL, *, + params: Optional[Mapping[str, str]]=None, + data: Any=None, + json: Any=None, + headers: LooseHeaders=None, + skip_auto_headers: Optional[Iterable[str]]=None, + auth: Optional[BasicAuth]=None, + allow_redirects: bool=True, + max_redirects: int=10, + compress: Optional[str]=None, + chunked: Optional[bool]=None, + expect100: bool=False, + raise_for_status: Optional[bool]=None, + read_until_eof: bool=True, + proxy: Optional[StrOrURL]=None, + proxy_auth: Optional[BasicAuth]=None, + timeout: Union[ClientTimeout, object]=sentinel, + cookies: Optional[LooseCookies]=None, + version: HttpVersion=http.HttpVersion11, + connector: Optional[BaseConnector]=None, + loop: Optional[asyncio.AbstractEventLoop]=None +) -> _SessionRequestContextManager: + """Constructs and sends a request. Returns response object. + method - HTTP method + url - request url + params - (optional) Dictionary or bytes to be sent in the query + string of the new request + data - (optional) Dictionary, bytes, or file-like object to + send in the body of the request + json - (optional) Any json compatible python object + headers - (optional) Dictionary of HTTP Headers to send with + the request + cookies - (optional) Dict object to send with the request + auth - (optional) BasicAuth named tuple represent HTTP Basic Auth + auth - aiohttp.helpers.BasicAuth + allow_redirects - (optional) If set to False, do not follow + redirects + version - Request HTTP version. + compress - Set to True if request has to be compressed + with deflate encoding. + chunked - Set to chunk size for chunked transfer encoding. + expect100 - Expect 100-continue response from server. + connector - BaseConnector sub-class instance to support + connection pooling. + read_until_eof - Read response until eof if response + does not have Content-Length header. + loop - Optional event loop. + timeout - Optional ClientTimeout settings structure, 5min + total timeout by default. + Usage:: + >>> import aiohttp + >>> resp = await aiohttp.request('GET', 'http://python.org/') + >>> resp + + >>> data = await resp.read() + """ + connector_owner = False + if connector is None: + connector_owner = True + connector = TCPConnector(loop=loop, force_close=True) + + session = ClientSession( + loop=loop, cookies=cookies, version=version, timeout=timeout, + connector=connector, connector_owner=connector_owner) + + return _SessionRequestContextManager( + session._request(method, url, + params=params, + data=data, + json=json, + headers=headers, + skip_auto_headers=skip_auto_headers, + auth=auth, + allow_redirects=allow_redirects, + max_redirects=max_redirects, + compress=compress, + chunked=chunked, + expect100=expect100, + raise_for_status=raise_for_status, + read_until_eof=read_until_eof, + proxy=proxy, + proxy_auth=proxy_auth,), + session) diff --git a/venv/lib/python3.7/site-packages/aiohttp/client_exceptions.py b/venv/lib/python3.7/site-packages/aiohttp/client_exceptions.py new file mode 100644 index 0000000..e06077a --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/client_exceptions.py @@ -0,0 +1,268 @@ +"""HTTP related errors.""" + +import asyncio +import warnings +from typing import TYPE_CHECKING, Any, Optional, Tuple, Union + +from .typedefs import _CIMultiDict + +try: + import ssl + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = SSLContext = None # type: ignore + + +if TYPE_CHECKING: # pragma: no cover + from .client_reqrep import (RequestInfo, ClientResponse, ConnectionKey, # noqa + Fingerprint) +else: + RequestInfo = ClientResponse = ConnectionKey = None + + +__all__ = ( + 'ClientError', + + 'ClientConnectionError', + 'ClientOSError', 'ClientConnectorError', 'ClientProxyConnectionError', + + 'ClientSSLError', + 'ClientConnectorSSLError', 'ClientConnectorCertificateError', + + 'ServerConnectionError', 'ServerTimeoutError', 'ServerDisconnectedError', + 'ServerFingerprintMismatch', + + 'ClientResponseError', 'ClientHttpProxyError', + 'WSServerHandshakeError', 'ContentTypeError', + + 'ClientPayloadError', 'InvalidURL') + + +class ClientError(Exception): + """Base class for client connection errors.""" + + +class ClientResponseError(ClientError): + """Connection error during reading response. + + request_info: instance of RequestInfo + """ + + def __init__(self, request_info: RequestInfo, + history: Tuple[ClientResponse, ...], *, + code: Optional[int]=None, + status: Optional[int]=None, + message: str='', + headers: Optional[_CIMultiDict]=None) -> None: + self.request_info = request_info + if code is not None: + if status is not None: + raise ValueError( + "Both code and status arguments are provided; " + "code is deprecated, use status instead") + warnings.warn("code argument is deprecated, use status instead", + DeprecationWarning, + stacklevel=2) + if status is not None: + self.status = status + elif code is not None: + self.status = code + else: + self.status = 0 + self.message = message + self.headers = headers + self.history = history + + super().__init__("%s, message='%s'" % (self.status, message)) + + @property + def code(self) -> int: + warnings.warn("code property is deprecated, use status instead", + DeprecationWarning, + stacklevel=2) + return self.status + + @code.setter + def code(self, value: int) -> None: + warnings.warn("code property is deprecated, use status instead", + DeprecationWarning, + stacklevel=2) + self.status = value + + +class ContentTypeError(ClientResponseError): + """ContentType found is not valid.""" + + +class WSServerHandshakeError(ClientResponseError): + """websocket server handshake error.""" + + +class ClientHttpProxyError(ClientResponseError): + """HTTP proxy error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + proxy responds with status other than ``200 OK`` + on ``CONNECT`` request. + """ + + +class TooManyRedirects(ClientResponseError): + """Client was redirected too many times.""" + + +class ClientConnectionError(ClientError): + """Base class for client socket errors.""" + + +class ClientOSError(ClientConnectionError, OSError): + """OSError error.""" + + +class ClientConnectorError(ClientOSError): + """Client connector error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + connection to proxy can not be established. + """ + def __init__(self, connection_key: ConnectionKey, + os_error: OSError) -> None: + self._conn_key = connection_key + self._os_error = os_error + super().__init__(os_error.errno, os_error.strerror) + + @property + def os_error(self) -> OSError: + return self._os_error + + @property + def host(self) -> str: + return self._conn_key.host + + @property + def port(self) -> Optional[int]: + return self._conn_key.port + + @property + def ssl(self) -> Union[SSLContext, None, bool, 'Fingerprint']: + return self._conn_key.ssl + + def __str__(self) -> str: + return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} [{1}]' + .format(self, self.strerror)) + + +class ClientProxyConnectionError(ClientConnectorError): + """Proxy connection error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + connection to proxy can not be established. + """ + + +class ServerConnectionError(ClientConnectionError): + """Server connection errors.""" + + +class ServerDisconnectedError(ServerConnectionError): + """Server disconnected.""" + + def __init__(self, message: Optional[str]=None) -> None: + self.message = message + + +class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError): + """Server timeout error.""" + + +class ServerFingerprintMismatch(ServerConnectionError): + """SSL certificate does not match expected fingerprint.""" + + def __init__(self, expected: bytes, got: bytes, + host: str, port: int) -> None: + self.expected = expected + self.got = got + self.host = host + self.port = port + + def __repr__(self) -> str: + return '<{} expected={} got={} host={} port={}>'.format( + self.__class__.__name__, self.expected, self.got, + self.host, self.port) + + +class ClientPayloadError(ClientError): + """Response payload error.""" + + +class InvalidURL(ClientError, ValueError): + """Invalid URL. + + URL used for fetching is malformed, e.g. it doesn't contains host + part.""" + + # Derive from ValueError for backward compatibility + + def __init__(self, url: Any) -> None: + # The type of url is not yarl.URL because the exception can be raised + # on URL(url) call + super().__init__(url) + + @property + def url(self) -> Any: + return self.args[0] + + def __repr__(self) -> str: + return '<{} {}>'.format(self.__class__.__name__, self.url) + + +class ClientSSLError(ClientConnectorError): + """Base error for ssl.*Errors.""" + + +if ssl is not None: + cert_errors = (ssl.CertificateError,) + cert_errors_bases = (ClientSSLError, ssl.CertificateError,) + + ssl_errors = (ssl.SSLError,) + ssl_error_bases = (ClientSSLError, ssl.SSLError) +else: # pragma: no cover + cert_errors = tuple() + cert_errors_bases = (ClientSSLError, ValueError,) + + ssl_errors = tuple() + ssl_error_bases = (ClientSSLError,) + + +class ClientConnectorSSLError(*ssl_error_bases): # type: ignore + """Response ssl error.""" + + +class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore + """Response certificate error.""" + + def __init__(self, connection_key: + ConnectionKey, certificate_error: Exception) -> None: + self._conn_key = connection_key + self._certificate_error = certificate_error + + @property + def certificate_error(self) -> Exception: + return self._certificate_error + + @property + def host(self) -> str: + return self._conn_key.host + + @property + def port(self) -> Optional[int]: + return self._conn_key.port + + @property + def ssl(self) -> bool: + return self._conn_key.is_ssl + + def __str__(self) -> str: + return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} ' + '[{0.certificate_error.__class__.__name__}: ' + '{0.certificate_error.args}]'.format(self)) diff --git a/venv/lib/python3.7/site-packages/aiohttp/client_proto.py b/venv/lib/python3.7/site-packages/aiohttp/client_proto.py new file mode 100644 index 0000000..87db71a --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/client_proto.py @@ -0,0 +1,237 @@ +import asyncio +from contextlib import suppress +from typing import Any, Optional, Tuple + +from .base_protocol import BaseProtocol +from .client_exceptions import ( + ClientOSError, + ClientPayloadError, + ServerDisconnectedError, + ServerTimeoutError, +) +from .helpers import BaseTimerContext +from .http import HttpResponseParser, RawResponseMessage +from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader + + +class ResponseHandler(BaseProtocol, + DataQueue[Tuple[RawResponseMessage, StreamReader]]): + """Helper class to adapt between Protocol and StreamReader.""" + + def __init__(self, + loop: asyncio.AbstractEventLoop) -> None: + BaseProtocol.__init__(self, loop=loop) + DataQueue.__init__(self, loop) + + self._should_close = False + + self._payload = None + self._skip_payload = False + self._payload_parser = None + + self._timer = None + + self._tail = b'' + self._upgraded = False + self._parser = None # type: Optional[HttpResponseParser] + + self._read_timeout = None # type: Optional[float] + self._read_timeout_handle = None # type: Optional[asyncio.TimerHandle] + + @property + def upgraded(self) -> bool: + return self._upgraded + + @property + def should_close(self) -> bool: + if (self._payload is not None and + not self._payload.is_eof() or self._upgraded): + return True + + return (self._should_close or self._upgraded or + self.exception() is not None or + self._payload_parser is not None or + len(self) > 0 or bool(self._tail)) + + def force_close(self) -> None: + self._should_close = True + + def close(self) -> None: + transport = self.transport + if transport is not None: + transport.close() + self.transport = None + self._payload = None + self._drop_timeout() + + def is_connected(self) -> bool: + return self.transport is not None + + def connection_lost(self, exc: Optional[BaseException]) -> None: + self._drop_timeout() + + if self._payload_parser is not None: + with suppress(Exception): + self._payload_parser.feed_eof() + + uncompleted = None + if self._parser is not None: + try: + uncompleted = self._parser.feed_eof() + except Exception: + if self._payload is not None: + self._payload.set_exception( + ClientPayloadError( + 'Response payload is not completed')) + + if not self.is_eof(): + if isinstance(exc, OSError): + exc = ClientOSError(*exc.args) + if exc is None: + exc = ServerDisconnectedError(uncompleted) + # assigns self._should_close to True as side effect, + # we do it anyway below + self.set_exception(exc) + + self._should_close = True + self._parser = None + self._payload = None + self._payload_parser = None + self._reading_paused = False + + super().connection_lost(exc) + + def eof_received(self) -> None: + # should call parser.feed_eof() most likely + self._drop_timeout() + + def pause_reading(self) -> None: + super().pause_reading() + self._drop_timeout() + + def resume_reading(self) -> None: + super().resume_reading() + self._reschedule_timeout() + + def set_exception(self, exc: BaseException) -> None: + self._should_close = True + self._drop_timeout() + super().set_exception(exc) + + def set_parser(self, parser: Any, payload: Any) -> None: + # TODO: actual types are: + # parser: WebSocketReader + # payload: FlowControlDataQueue + # but they are not generi enough + # Need an ABC for both types + self._payload = payload + self._payload_parser = parser + + self._drop_timeout() + + if self._tail: + data, self._tail = self._tail, b'' + self.data_received(data) + + def set_response_params(self, *, timer: BaseTimerContext=None, + skip_payload: bool=False, + read_until_eof: bool=False, + auto_decompress: bool=True, + read_timeout: Optional[float]=None) -> None: + self._skip_payload = skip_payload + + self._read_timeout = read_timeout + self._reschedule_timeout() + + self._parser = HttpResponseParser( + self, self._loop, timer=timer, + payload_exception=ClientPayloadError, + read_until_eof=read_until_eof, + auto_decompress=auto_decompress) + + if self._tail: + data, self._tail = self._tail, b'' + self.data_received(data) + + def _drop_timeout(self) -> None: + if self._read_timeout_handle is not None: + self._read_timeout_handle.cancel() + self._read_timeout_handle = None + + def _reschedule_timeout(self) -> None: + timeout = self._read_timeout + if self._read_timeout_handle is not None: + self._read_timeout_handle.cancel() + + if timeout: + self._read_timeout_handle = self._loop.call_later( + timeout, self._on_read_timeout) + else: + self._read_timeout_handle = None + + def _on_read_timeout(self) -> None: + exc = ServerTimeoutError("Timeout on reading data from socket") + self.set_exception(exc) + if self._payload is not None: + self._payload.set_exception(exc) + + def data_received(self, data: bytes) -> None: + if not data: + return + + # custom payload parser + if self._payload_parser is not None: + eof, tail = self._payload_parser.feed_data(data) + if eof: + self._payload = None + self._payload_parser = None + + if tail: + self.data_received(tail) + return + else: + if self._upgraded or self._parser is None: + # i.e. websocket connection, websocket parser is not set yet + self._tail += data + else: + # parse http messages + try: + messages, upgraded, tail = self._parser.feed_data(data) + except BaseException as exc: + if self.transport is not None: + # connection.release() could be called BEFORE + # data_received(), the transport is already + # closed in this case + self.transport.close() + # should_close is True after the call + self.set_exception(exc) + return + + self._upgraded = upgraded + + payload = None + for message, payload in messages: + if message.should_close: + self._should_close = True + + self._payload = payload + + if self._skip_payload or message.code in (204, 304): + self.feed_data((message, EMPTY_PAYLOAD), 0) # type: ignore # noqa + else: + self.feed_data((message, payload), 0) + if payload is not None: + # new message(s) was processed + # register timeout handler unsubscribing + # either on end-of-stream or immediately for + # EMPTY_PAYLOAD + if payload is not EMPTY_PAYLOAD: + payload.on_eof(self._drop_timeout) + else: + self._drop_timeout() + + if tail: + if upgraded: + self.data_received(tail) + else: + self._tail = tail diff --git a/venv/lib/python3.7/site-packages/aiohttp/client_reqrep.py b/venv/lib/python3.7/site-packages/aiohttp/client_reqrep.py new file mode 100644 index 0000000..27b7256 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/client_reqrep.py @@ -0,0 +1,1048 @@ +import asyncio +import codecs +import io +import re +import sys +import traceback +import warnings +from hashlib import md5, sha1, sha256 +from http.cookies import CookieError, Morsel, SimpleCookie +from types import MappingProxyType, TracebackType +from typing import ( # noqa + TYPE_CHECKING, + Any, + Dict, + Iterable, + List, + Mapping, + Optional, + Tuple, + Type, + Union, + cast, +) + +import attr +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy +from yarl import URL + +from . import hdrs, helpers, http, multipart, payload +from .abc import AbstractStreamWriter +from .client_exceptions import ( + ClientConnectionError, + ClientOSError, + ClientResponseError, + ContentTypeError, + InvalidURL, + ServerFingerprintMismatch, +) +from .formdata import FormData +from .helpers import ( # noqa + PY_36, + BaseTimerContext, + BasicAuth, + HeadersMixin, + TimerNoop, + noop, + reify, + set_result, +) +from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11, StreamWriter +from .log import client_logger +from .streams import StreamReader # noqa +from .typedefs import ( + DEFAULT_JSON_DECODER, + JSONDecoder, + LooseCookies, + LooseHeaders, + RawHeaders, +) + +try: + import ssl + from ssl import SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore + SSLContext = object # type: ignore + +try: + import cchardet as chardet +except ImportError: # pragma: no cover + import chardet + + +__all__ = ('ClientRequest', 'ClientResponse', 'RequestInfo', 'Fingerprint') + + +if TYPE_CHECKING: # pragma: no cover + from .client import ClientSession # noqa + from .connector import Connection # noqa + from .tracing import Trace # noqa + + +json_re = re.compile(r'^application/(?:[\w.+-]+?\+)?json') + + +@attr.s(frozen=True, slots=True) +class ContentDisposition: + type = attr.ib(type=str) # type: Optional[str] + parameters = attr.ib(type=MappingProxyType) # type: MappingProxyType[str, str] # noqa + filename = attr.ib(type=str) # type: Optional[str] + + +@attr.s(frozen=True, slots=True) +class RequestInfo: + url = attr.ib(type=URL) + method = attr.ib(type=str) + headers = attr.ib(type=CIMultiDictProxy) # type: CIMultiDictProxy[str] + real_url = attr.ib(type=URL) + + @real_url.default + def real_url_default(self) -> URL: + return self.url + + +class Fingerprint: + HASHFUNC_BY_DIGESTLEN = { + 16: md5, + 20: sha1, + 32: sha256, + } + + def __init__(self, fingerprint: bytes) -> None: + digestlen = len(fingerprint) + hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen) + if not hashfunc: + raise ValueError('fingerprint has invalid length') + elif hashfunc is md5 or hashfunc is sha1: + raise ValueError('md5 and sha1 are insecure and ' + 'not supported. Use sha256.') + self._hashfunc = hashfunc + self._fingerprint = fingerprint + + @property + def fingerprint(self) -> bytes: + return self._fingerprint + + def check(self, transport: asyncio.Transport) -> None: + if not transport.get_extra_info('sslcontext'): + return + sslobj = transport.get_extra_info('ssl_object') + cert = sslobj.getpeercert(binary_form=True) + got = self._hashfunc(cert).digest() + if got != self._fingerprint: + host, port, *_ = transport.get_extra_info('peername') + raise ServerFingerprintMismatch(self._fingerprint, + got, host, port) + + +if ssl is not None: + SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None)) +else: # pragma: no cover + SSL_ALLOWED_TYPES = type(None) + + +def _merge_ssl_params( + ssl: Union['SSLContext', bool, Fingerprint, None], + verify_ssl: Optional[bool], + ssl_context: Optional['SSLContext'], + fingerprint: Optional[bytes] +) -> Union['SSLContext', bool, Fingerprint, None]: + if verify_ssl is not None and not verify_ssl: + warnings.warn("verify_ssl is deprecated, use ssl=False instead", + DeprecationWarning, + stacklevel=3) + if ssl is not None: + raise ValueError("verify_ssl, ssl_context, fingerprint and ssl " + "parameters are mutually exclusive") + else: + ssl = False + if ssl_context is not None: + warnings.warn("ssl_context is deprecated, use ssl=context instead", + DeprecationWarning, + stacklevel=3) + if ssl is not None: + raise ValueError("verify_ssl, ssl_context, fingerprint and ssl " + "parameters are mutually exclusive") + else: + ssl = ssl_context + if fingerprint is not None: + warnings.warn("fingerprint is deprecated, " + "use ssl=Fingerprint(fingerprint) instead", + DeprecationWarning, + stacklevel=3) + if ssl is not None: + raise ValueError("verify_ssl, ssl_context, fingerprint and ssl " + "parameters are mutually exclusive") + else: + ssl = Fingerprint(fingerprint) + if not isinstance(ssl, SSL_ALLOWED_TYPES): + raise TypeError("ssl should be SSLContext, bool, Fingerprint or None, " + "got {!r} instead.".format(ssl)) + return ssl + + +@attr.s(slots=True, frozen=True) +class ConnectionKey: + # the key should contain an information about used proxy / TLS + # to prevent reusing wrong connections from a pool + host = attr.ib(type=str) + port = attr.ib(type=int) # type: Optional[int] + is_ssl = attr.ib(type=bool) + ssl = attr.ib() # type: Union[SSLContext, None, bool, Fingerprint] + proxy = attr.ib() # type: Optional[URL] + proxy_auth = attr.ib() # type: Optional[BasicAuth] + proxy_headers_hash = attr.ib(type=int) # type: Optional[int] # noqa # hash(CIMultiDict) + + +def _is_expected_content_type(response_content_type: str, + expected_content_type: str) -> bool: + if expected_content_type == 'application/json': + return json_re.match(response_content_type) is not None + return expected_content_type in response_content_type + + +class ClientRequest: + GET_METHODS = { + hdrs.METH_GET, + hdrs.METH_HEAD, + hdrs.METH_OPTIONS, + hdrs.METH_TRACE, + } + POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} + ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) + + DEFAULT_HEADERS = { + hdrs.ACCEPT: '*/*', + hdrs.ACCEPT_ENCODING: 'gzip, deflate', + } + + body = b'' + auth = None + response = None + response_class = None + + _writer = None # async task for streaming data + _continue = None # waiter future for '100 Continue' response + + # N.B. + # Adding __del__ method with self._writer closing doesn't make sense + # because _writer is instance method, thus it keeps a reference to self. + # Until writer has finished finalizer will not be called. + + def __init__(self, method: str, url: URL, *, + params: Optional[Mapping[str, str]]=None, + headers: Optional[LooseHeaders]=None, + skip_auto_headers: Iterable[str]=frozenset(), + data: Any=None, + cookies: Optional[LooseCookies]=None, + auth: Optional[BasicAuth]=None, + version: http.HttpVersion=http.HttpVersion11, + compress: Optional[str]=None, + chunked: Optional[bool]=None, + expect100: bool=False, + loop: Optional[asyncio.AbstractEventLoop]=None, + response_class: Optional[Type['ClientResponse']]=None, + proxy: Optional[URL]=None, + proxy_auth: Optional[BasicAuth]=None, + timer: Optional[BaseTimerContext]=None, + session: Optional['ClientSession']=None, + ssl: Union[SSLContext, bool, Fingerprint, None]=None, + proxy_headers: Optional[LooseHeaders]=None, + traces: Optional[List['Trace']]=None): + + if loop is None: + loop = asyncio.get_event_loop() + + assert isinstance(url, URL), url + assert isinstance(proxy, (URL, type(None))), proxy + # FIXME: session is None in tests only, need to fix tests + # assert session is not None + self._session = cast('ClientSession', session) + if params: + q = MultiDict(url.query) + url2 = url.with_query(params) + q.extend(url2.query) + url = url.with_query(q) + self.original_url = url + self.url = url.with_fragment(None) + self.method = method.upper() + self.chunked = chunked + self.compress = compress + self.loop = loop + self.length = None + if response_class is None: + real_response_class = ClientResponse + else: + real_response_class = response_class + self.response_class = real_response_class # type: Type[ClientResponse] + self._timer = timer if timer is not None else TimerNoop() + self._ssl = ssl + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + self.update_version(version) + self.update_host(url) + self.update_headers(headers) + self.update_auto_headers(skip_auto_headers) + self.update_cookies(cookies) + self.update_content_encoding(data) + self.update_auth(auth) + self.update_proxy(proxy, proxy_auth, proxy_headers) + + self.update_body_from_data(data) + if data or self.method not in self.GET_METHODS: + self.update_transfer_encoding() + self.update_expect_continue(expect100) + if traces is None: + traces = [] + self._traces = traces + + def is_ssl(self) -> bool: + return self.url.scheme in ('https', 'wss') + + @property + def ssl(self) -> Union['SSLContext', None, bool, Fingerprint]: + return self._ssl + + @property + def connection_key(self) -> ConnectionKey: + proxy_headers = self.proxy_headers + if proxy_headers: + h = hash(tuple((k, v) for k, v in proxy_headers.items())) # type: Optional[int] # noqa + else: + h = None + return ConnectionKey(self.host, self.port, self.is_ssl(), + self.ssl, + self.proxy, self.proxy_auth, h) + + @property + def host(self) -> str: + ret = self.url.host + assert ret is not None + return ret + + @property + def port(self) -> Optional[int]: + return self.url.port + + @property + def request_info(self) -> RequestInfo: + headers = CIMultiDictProxy(self.headers) # type: CIMultiDictProxy[str] + return RequestInfo(self.url, self.method, + headers, self.original_url) + + def update_host(self, url: URL) -> None: + """Update destination host, port and connection type (ssl).""" + # get host/port + if not url.host: + raise InvalidURL(url) + + # basic auth info + username, password = url.user, url.password + if username: + self.auth = helpers.BasicAuth(username, password or '') + + def update_version(self, version: Union[http.HttpVersion, str]) -> None: + """Convert request version to two elements tuple. + + parser HTTP version '1.1' => (1, 1) + """ + if isinstance(version, str): + v = [l.strip() for l in version.split('.', 1)] + try: + version = http.HttpVersion(int(v[0]), int(v[1])) + except ValueError: + raise ValueError( + 'Can not parse http version number: {}' + .format(version)) from None + self.version = version + + def update_headers(self, headers: Optional[LooseHeaders]) -> None: + """Update request headers.""" + self.headers = CIMultiDict() # type: CIMultiDict[str] + + # add host + netloc = cast(str, self.url.raw_host) + if helpers.is_ipv6_address(netloc): + netloc = '[{}]'.format(netloc) + if not self.url.is_default_port(): + netloc += ':' + str(self.url.port) + self.headers[hdrs.HOST] = netloc + + if headers: + if isinstance(headers, (dict, MultiDictProxy, MultiDict)): + headers = headers.items() # type: ignore + + for key, value in headers: + # A special case for Host header + if key.lower() == 'host': + self.headers[key] = value + else: + self.headers.add(key, value) + + def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None: + self.skip_auto_headers = CIMultiDict( + (hdr, None) for hdr in sorted(skip_auto_headers)) + used_headers = self.headers.copy() + used_headers.extend(self.skip_auto_headers) # type: ignore + + for hdr, val in self.DEFAULT_HEADERS.items(): + if hdr not in used_headers: + self.headers.add(hdr, val) + + if hdrs.USER_AGENT not in used_headers: + self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE + + def update_cookies(self, cookies: Optional[LooseCookies]) -> None: + """Update request cookies header.""" + if not cookies: + return + + c = SimpleCookie() + if hdrs.COOKIE in self.headers: + c.load(self.headers.get(hdrs.COOKIE, '')) + del self.headers[hdrs.COOKIE] + + if isinstance(cookies, Mapping): + iter_cookies = cookies.items() + else: + iter_cookies = cookies # type: ignore + for name, value in iter_cookies: + if isinstance(value, Morsel): + # Preserve coded_value + mrsl_val = value.get(value.key, Morsel()) + mrsl_val.set(value.key, value.value, value.coded_value) # type: ignore # noqa + c[name] = mrsl_val + else: + c[name] = value # type: ignore + + self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip() + + def update_content_encoding(self, data: Any) -> None: + """Set request content encoding.""" + if not data: + return + + enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower() + if enc: + if self.compress: + raise ValueError( + 'compress can not be set ' + 'if Content-Encoding header is set') + elif self.compress: + if not isinstance(self.compress, str): + self.compress = 'deflate' + self.headers[hdrs.CONTENT_ENCODING] = self.compress + self.chunked = True # enable chunked, no need to deal with length + + def update_transfer_encoding(self) -> None: + """Analyze transfer-encoding header.""" + te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower() + + if 'chunked' in te: + if self.chunked: + raise ValueError( + 'chunked can not be set ' + 'if "Transfer-Encoding: chunked" header is set') + + elif self.chunked: + if hdrs.CONTENT_LENGTH in self.headers: + raise ValueError( + 'chunked can not be set ' + 'if Content-Length header is set') + + self.headers[hdrs.TRANSFER_ENCODING] = 'chunked' + else: + if hdrs.CONTENT_LENGTH not in self.headers: + self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body)) + + def update_auth(self, auth: Optional[BasicAuth]) -> None: + """Set basic auth.""" + if auth is None: + auth = self.auth + if auth is None: + return + + if not isinstance(auth, helpers.BasicAuth): + raise TypeError('BasicAuth() tuple is required instead') + + self.headers[hdrs.AUTHORIZATION] = auth.encode() + + def update_body_from_data(self, body: Any) -> None: + if not body: + return + + # FormData + if isinstance(body, FormData): + body = body() + + try: + body = payload.PAYLOAD_REGISTRY.get(body, disposition=None) + except payload.LookupError: + body = FormData(body)() + + self.body = body + + # enable chunked encoding if needed + if not self.chunked: + if hdrs.CONTENT_LENGTH not in self.headers: + size = body.size + if size is None: + self.chunked = True + else: + if hdrs.CONTENT_LENGTH not in self.headers: + self.headers[hdrs.CONTENT_LENGTH] = str(size) + + # copy payload headers + assert body.headers + for (key, value) in body.headers.items(): + if key in self.headers: + continue + if key in self.skip_auto_headers: + continue + self.headers[key] = value + + def update_expect_continue(self, expect: bool=False) -> None: + if expect: + self.headers[hdrs.EXPECT] = '100-continue' + elif self.headers.get(hdrs.EXPECT, '').lower() == '100-continue': + expect = True + + if expect: + self._continue = self.loop.create_future() + + def update_proxy(self, proxy: Optional[URL], + proxy_auth: Optional[BasicAuth], + proxy_headers: Optional[LooseHeaders]) -> None: + if proxy and not proxy.scheme == 'http': + raise ValueError("Only http proxies are supported") + if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): + raise ValueError("proxy_auth must be None or BasicAuth() tuple") + self.proxy = proxy + self.proxy_auth = proxy_auth + self.proxy_headers = proxy_headers + + def keep_alive(self) -> bool: + if self.version < HttpVersion10: + # keep alive not supported at all + return False + if self.version == HttpVersion10: + if self.headers.get(hdrs.CONNECTION) == 'keep-alive': + return True + else: # no headers means we close for Http 1.0 + return False + elif self.headers.get(hdrs.CONNECTION) == 'close': + return False + + return True + + async def write_bytes(self, writer: AbstractStreamWriter, + conn: 'Connection') -> None: + """Support coroutines that yields bytes objects.""" + # 100 response + if self._continue is not None: + await writer.drain() + await self._continue + + protocol = conn.protocol + assert protocol is not None + try: + if isinstance(self.body, payload.Payload): + await self.body.write(writer) + else: + if isinstance(self.body, (bytes, bytearray)): + self.body = (self.body,) # type: ignore + + for chunk in self.body: + await writer.write(chunk) # type: ignore + + await writer.write_eof() + except OSError as exc: + new_exc = ClientOSError( + exc.errno, + 'Can not write request body for %s' % self.url) + new_exc.__context__ = exc + new_exc.__cause__ = exc + protocol.set_exception(new_exc) + except asyncio.CancelledError as exc: + if not conn.closed: + protocol.set_exception(exc) + except Exception as exc: + protocol.set_exception(exc) + finally: + self._writer = None + + async def send(self, conn: 'Connection') -> 'ClientResponse': + # Specify request target: + # - CONNECT request must send authority form URI + # - not CONNECT proxy must send absolute form URI + # - most common is origin form URI + if self.method == hdrs.METH_CONNECT: + path = '{}:{}'.format(self.url.raw_host, self.url.port) + elif self.proxy and not self.is_ssl(): + path = str(self.url) + else: + path = self.url.raw_path + if self.url.raw_query_string: + path += '?' + self.url.raw_query_string + + protocol = conn.protocol + assert protocol is not None + writer = StreamWriter( + protocol, self.loop, + on_chunk_sent=self._on_chunk_request_sent + ) + + if self.compress: + writer.enable_compression(self.compress) + + if self.chunked is not None: + writer.enable_chunking() + + # set default content-type + if (self.method in self.POST_METHODS and + hdrs.CONTENT_TYPE not in self.skip_auto_headers and + hdrs.CONTENT_TYPE not in self.headers): + self.headers[hdrs.CONTENT_TYPE] = 'application/octet-stream' + + # set the connection header + connection = self.headers.get(hdrs.CONNECTION) + if not connection: + if self.keep_alive(): + if self.version == HttpVersion10: + connection = 'keep-alive' + else: + if self.version == HttpVersion11: + connection = 'close' + + if connection is not None: + self.headers[hdrs.CONNECTION] = connection + + # status + headers + status_line = '{0} {1} HTTP/{2[0]}.{2[1]}'.format( + self.method, path, self.version) + await writer.write_headers(status_line, self.headers) + + self._writer = self.loop.create_task(self.write_bytes(writer, conn)) + + response_class = self.response_class + assert response_class is not None + self.response = response_class( + self.method, self.original_url, + writer=self._writer, continue100=self._continue, timer=self._timer, + request_info=self.request_info, + traces=self._traces, + loop=self.loop, + session=self._session + ) + return self.response + + async def close(self) -> None: + if self._writer is not None: + try: + await self._writer + finally: + self._writer = None + + def terminate(self) -> None: + if self._writer is not None: + if not self.loop.is_closed(): + self._writer.cancel() + self._writer = None + + async def _on_chunk_request_sent(self, chunk: bytes) -> None: + for trace in self._traces: + await trace.send_request_chunk_sent(chunk) + + +class ClientResponse(HeadersMixin): + + # from the Status-Line of the response + version = None # HTTP-Version + status = None # type: int # Status-Code + reason = None # Reason-Phrase + + content = None # type: StreamReader # Payload stream + _headers = None # type: CIMultiDictProxy[str] # Response headers + _raw_headers = None # type: RawHeaders # Response raw headers + + _connection = None # current connection + _source_traceback = None + # setted up by ClientRequest after ClientResponse object creation + # post-init stage allows to not change ctor signature + _closed = True # to allow __del__ for non-initialized properly response + _released = False + + def __init__(self, method: str, url: URL, *, + writer: 'asyncio.Task[None]', + continue100: Optional['asyncio.Future[bool]'], + timer: BaseTimerContext, + request_info: RequestInfo, + traces: List['Trace'], + loop: asyncio.AbstractEventLoop, + session: 'ClientSession') -> None: + assert isinstance(url, URL) + + self.method = method + self.cookies = SimpleCookie() + + self._real_url = url + self._url = url.with_fragment(None) + self._body = None # type: Any + self._writer = writer # type: Optional[asyncio.Task[None]] + self._continue = continue100 # None by default + self._closed = True + self._history = () # type: Tuple[ClientResponse, ...] + self._request_info = request_info + self._timer = timer if timer is not None else TimerNoop() + self._cache = {} # type: Dict[str, Any] + self._traces = traces + self._loop = loop + # store a reference to session #1985 + self._session = session # type: Optional[ClientSession] + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + @reify + def url(self) -> URL: + return self._url + + @reify + def url_obj(self) -> URL: + warnings.warn( + "Deprecated, use .url #1654", DeprecationWarning, stacklevel=2) + return self._url + + @reify + def real_url(self) -> URL: + return self._real_url + + @reify + def host(self) -> str: + assert self._url.host is not None + return self._url.host + + @reify + def headers(self) -> 'CIMultiDictProxy[str]': + return self._headers + + @reify + def raw_headers(self) -> RawHeaders: + return self._raw_headers + + @reify + def request_info(self) -> RequestInfo: + return self._request_info + + @reify + def content_disposition(self) -> Optional[ContentDisposition]: + raw = self._headers.get(hdrs.CONTENT_DISPOSITION) + if raw is None: + return None + disposition_type, params_dct = multipart.parse_content_disposition(raw) + params = MappingProxyType(params_dct) + filename = multipart.content_disposition_filename(params) + return ContentDisposition(disposition_type, params, filename) + + def __del__(self, _warnings: Any=warnings) -> None: + if self._closed: + return + + if self._connection is not None: + self._connection.release() + self._cleanup_writer() + + if self._loop.get_debug(): + if PY_36: + kwargs = {'source': self} + else: + kwargs = {} + _warnings.warn("Unclosed response {!r}".format(self), + ResourceWarning, + **kwargs) + context = {'client_response': self, + 'message': 'Unclosed response'} + if self._source_traceback: + context['source_traceback'] = self._source_traceback + self._loop.call_exception_handler(context) + + def __repr__(self) -> str: + out = io.StringIO() + ascii_encodable_url = str(self.url) + if self.reason: + ascii_encodable_reason = self.reason.encode('ascii', + 'backslashreplace') \ + .decode('ascii') + else: + ascii_encodable_reason = self.reason + print(''.format( + ascii_encodable_url, self.status, ascii_encodable_reason), + file=out) + print(self.headers, file=out) + return out.getvalue() + + @property + def connection(self) -> Optional['Connection']: + return self._connection + + @reify + def history(self) -> Tuple['ClientResponse', ...]: + """A sequence of of responses, if redirects occurred.""" + return self._history + + @reify + def links(self) -> 'MultiDictProxy[MultiDictProxy[Union[str, URL]]]': + links_str = ", ".join(self.headers.getall("link", [])) + + if not links_str: + return MultiDictProxy(MultiDict()) + + links = MultiDict() # type: MultiDict[MultiDictProxy[Union[str, URL]]] + + for val in re.split(r",(?=\s*<)", links_str): + match = re.match(r"\s*<(.*)>(.*)", val) + if match is None: # pragma: no cover + # the check exists to suppress mypy error + continue + url, params_str = match.groups() + params = params_str.split(";")[1:] + + link = MultiDict() # type: MultiDict[Union[str, URL]] + + for param in params: + match = re.match( + r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", + param, re.M + ) + if match is None: # pragma: no cover + # the check exists to suppress mypy error + continue + key, _, value, _ = match.groups() + + link.add(key, value) + + key = link.get("rel", url) # type: ignore + + link.add("url", self.url.join(URL(url))) + + links.add(key, MultiDictProxy(link)) + + return MultiDictProxy(links) + + async def start(self, connection: 'Connection') -> 'ClientResponse': + """Start response processing.""" + self._closed = False + self._protocol = connection.protocol + self._connection = connection + + with self._timer: + while True: + # read response + try: + message, payload = await self._protocol.read() # type: ignore # noqa + except http.HttpProcessingError as exc: + raise ClientResponseError( + self.request_info, self.history, + status=exc.code, + message=exc.message, headers=exc.headers) from exc + + if (message.code < 100 or + message.code > 199 or message.code == 101): + break + + if self._continue is not None: + set_result(self._continue, True) + self._continue = None + + # payload eof handler + payload.on_eof(self._response_eof) + + # response status + self.version = message.version + self.status = message.code + self.reason = message.reason + + # headers + self._headers = message.headers # type is CIMultiDictProxy + self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] + + # payload + self.content = payload + + # cookies + for hdr in self.headers.getall(hdrs.SET_COOKIE, ()): + try: + self.cookies.load(hdr) + except CookieError as exc: + client_logger.warning( + 'Can not load response cookies: %s', exc) + return self + + def _response_eof(self) -> None: + if self._closed: + return + + if self._connection is not None: + # websocket, protocol could be None because + # connection could be detached + if (self._connection.protocol is not None and + self._connection.protocol.upgraded): + return + + self._connection.release() + self._connection = None + + self._closed = True + self._cleanup_writer() + + @property + def closed(self) -> bool: + return self._closed + + def close(self) -> None: + if not self._released: + self._notify_content() + if self._closed: + return + + self._closed = True + if self._loop is None or self._loop.is_closed(): + return + + if self._connection is not None: + self._connection.close() + self._connection = None + self._cleanup_writer() + + def release(self) -> Any: + if not self._released: + self._notify_content() + if self._closed: + return noop() + + self._closed = True + if self._connection is not None: + self._connection.release() + self._connection = None + + self._cleanup_writer() + return noop() + + def raise_for_status(self) -> None: + if 400 <= self.status: + assert self.reason # always not None for started response + self.release() + raise ClientResponseError( + self.request_info, + self.history, + status=self.status, + message=self.reason, + headers=self.headers) + + def _cleanup_writer(self) -> None: + if self._writer is not None: + self._writer.cancel() + self._writer = None + self._session = None + + def _notify_content(self) -> None: + content = self.content + if content and content.exception() is None: + content.set_exception( + ClientConnectionError('Connection closed')) + self._released = True + + async def wait_for_close(self) -> None: + if self._writer is not None: + try: + await self._writer + finally: + self._writer = None + self.release() + + async def read(self) -> bytes: + """Read response payload.""" + if self._body is None: + try: + self._body = await self.content.read() + for trace in self._traces: + await trace.send_response_chunk_received(self._body) + except BaseException: + self.close() + raise + elif self._released: + raise ClientConnectionError('Connection closed') + + return self._body + + def get_encoding(self) -> str: + ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower() + mimetype = helpers.parse_mimetype(ctype) + + encoding = mimetype.parameters.get('charset') + if encoding: + try: + codecs.lookup(encoding) + except LookupError: + encoding = None + if not encoding: + if mimetype.type == 'application' and mimetype.subtype == 'json': + # RFC 7159 states that the default encoding is UTF-8. + encoding = 'utf-8' + else: + encoding = chardet.detect(self._body)['encoding'] + if not encoding: + encoding = 'utf-8' + + return encoding + + async def text(self, + encoding: Optional[str]=None, errors: str='strict') -> str: + """Read response payload and decode.""" + if self._body is None: + await self.read() + + if encoding is None: + encoding = self.get_encoding() + + return self._body.decode(encoding, errors=errors) # type: ignore + + async def json(self, *, encoding: str=None, + loads: JSONDecoder=DEFAULT_JSON_DECODER, + content_type: Optional[str]='application/json') -> Any: + """Read and decodes JSON response.""" + if self._body is None: + await self.read() + + if content_type: + ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower() + if not _is_expected_content_type(ctype, content_type): + raise ContentTypeError( + self.request_info, + self.history, + message=('Attempt to decode JSON with ' + 'unexpected mimetype: %s' % ctype), + headers=self.headers) + + stripped = self._body.strip() # type: ignore + if not stripped: + return None + + if encoding is None: + encoding = self.get_encoding() + + return loads(stripped.decode(encoding)) + + async def __aenter__(self) -> 'ClientResponse': + return self + + async def __aexit__(self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + # similar to _RequestContextManager, we do not need to check + # for exceptions, response object can closes connection + # is state is broken + self.release() diff --git a/venv/lib/python3.7/site-packages/aiohttp/client_ws.py b/venv/lib/python3.7/site-packages/aiohttp/client_ws.py new file mode 100644 index 0000000..e5fd126 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/client_ws.py @@ -0,0 +1,301 @@ +"""WebSocket client for asyncio.""" + +import asyncio +from typing import Any, Optional + +import async_timeout + +from .client_exceptions import ClientError +from .client_reqrep import ClientResponse +from .helpers import call_later, set_result +from .http import ( + WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE, + WebSocketError, + WSMessage, + WSMsgType, +) +from .http_websocket import WebSocketWriter # WSMessage +from .streams import EofStream, FlowControlDataQueue # noqa +from .typedefs import ( + DEFAULT_JSON_DECODER, + DEFAULT_JSON_ENCODER, + JSONDecoder, + JSONEncoder, +) + + +class ClientWebSocketResponse: + + def __init__(self, + reader: 'FlowControlDataQueue[WSMessage]', + writer: WebSocketWriter, + protocol: Optional[str], + response: ClientResponse, + timeout: float, + autoclose: bool, + autoping: bool, + loop: asyncio.AbstractEventLoop, + *, + receive_timeout: Optional[float]=None, + heartbeat: Optional[float]=None, + compress: int=0, + client_notakeover: bool=False) -> None: + self._response = response + self._conn = response.connection + + self._writer = writer + self._reader = reader + self._protocol = protocol + self._closed = False + self._closing = False + self._close_code = None # type: Optional[int] + self._timeout = timeout + self._receive_timeout = receive_timeout + self._autoclose = autoclose + self._autoping = autoping + self._heartbeat = heartbeat + self._heartbeat_cb = None + if heartbeat is not None: + self._pong_heartbeat = heartbeat / 2.0 + self._pong_response_cb = None + self._loop = loop + self._waiting = None # type: Optional[asyncio.Future[bool]] + self._exception = None # type: Optional[BaseException] + self._compress = compress + self._client_notakeover = client_notakeover + + self._reset_heartbeat() + + def _cancel_heartbeat(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None + + if self._heartbeat_cb is not None: + self._heartbeat_cb.cancel() + self._heartbeat_cb = None + + def _reset_heartbeat(self) -> None: + self._cancel_heartbeat() + + if self._heartbeat is not None: + self._heartbeat_cb = call_later( + self._send_heartbeat, self._heartbeat, self._loop) + + def _send_heartbeat(self) -> None: + if self._heartbeat is not None and not self._closed: + # fire-and-forget a task is not perfect but maybe ok for + # sending ping. Otherwise we need a long-living heartbeat + # task in the class. + self._loop.create_task(self._writer.ping()) + + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = call_later( + self._pong_not_received, self._pong_heartbeat, self._loop) + + def _pong_not_received(self) -> None: + if not self._closed: + self._closed = True + self._close_code = 1006 + self._exception = asyncio.TimeoutError() + self._response.close() + + @property + def closed(self) -> bool: + return self._closed + + @property + def close_code(self) -> Optional[int]: + return self._close_code + + @property + def protocol(self) -> Optional[str]: + return self._protocol + + @property + def compress(self) -> int: + return self._compress + + @property + def client_notakeover(self) -> bool: + return self._client_notakeover + + def get_extra_info(self, name: str, default: Any=None) -> Any: + """extra info from connection transport""" + conn = self._response.connection + if conn is None: + return default + transport = conn.transport + if transport is None: + return default + return transport.get_extra_info(name, default) + + def exception(self) -> Optional[BaseException]: + return self._exception + + async def ping(self, message: bytes=b'') -> None: + await self._writer.ping(message) + + async def pong(self, message: bytes=b'') -> None: + await self._writer.pong(message) + + async def send_str(self, data: str, + compress: Optional[int]=None) -> None: + if not isinstance(data, str): + raise TypeError('data argument must be str (%r)' % type(data)) + await self._writer.send(data, binary=False, compress=compress) + + async def send_bytes(self, data: bytes, + compress: Optional[int]=None) -> None: + if not isinstance(data, (bytes, bytearray, memoryview)): + raise TypeError('data argument must be byte-ish (%r)' % + type(data)) + await self._writer.send(data, binary=True, compress=compress) + + async def send_json(self, data: Any, + compress: Optional[int]=None, + *, dumps: JSONEncoder=DEFAULT_JSON_ENCODER) -> None: + await self.send_str(dumps(data), compress=compress) + + async def close(self, *, code: int=1000, message: bytes=b'') -> bool: + # we need to break `receive()` cycle first, + # `close()` may be called from different task + if self._waiting is not None and not self._closed: + self._reader.feed_data(WS_CLOSING_MESSAGE, 0) + await self._waiting + + if not self._closed: + self._cancel_heartbeat() + self._closed = True + try: + await self._writer.close(code, message) + except asyncio.CancelledError: + self._close_code = 1006 + self._response.close() + raise + except Exception as exc: + self._close_code = 1006 + self._exception = exc + self._response.close() + return True + + if self._closing: + self._response.close() + return True + + while True: + try: + with async_timeout.timeout(self._timeout, loop=self._loop): + msg = await self._reader.read() + except asyncio.CancelledError: + self._close_code = 1006 + self._response.close() + raise + except Exception as exc: + self._close_code = 1006 + self._exception = exc + self._response.close() + return True + + if msg.type == WSMsgType.CLOSE: + self._close_code = msg.data + self._response.close() + return True + else: + return False + + async def receive(self, timeout: Optional[float]=None) -> WSMessage: + while True: + if self._waiting is not None: + raise RuntimeError( + 'Concurrent call to receive() is not allowed') + + if self._closed: + return WS_CLOSED_MESSAGE + elif self._closing: + await self.close() + return WS_CLOSED_MESSAGE + + try: + self._waiting = self._loop.create_future() + try: + with async_timeout.timeout( + timeout or self._receive_timeout, + loop=self._loop): + msg = await self._reader.read() + self._reset_heartbeat() + finally: + waiter = self._waiting + self._waiting = None + set_result(waiter, True) + except (asyncio.CancelledError, asyncio.TimeoutError): + self._close_code = 1006 + raise + except EofStream: + self._close_code = 1000 + await self.close() + return WSMessage(WSMsgType.CLOSED, None, None) + except ClientError: + self._closed = True + self._close_code = 1006 + return WS_CLOSED_MESSAGE + except WebSocketError as exc: + self._close_code = exc.code + await self.close(code=exc.code) + return WSMessage(WSMsgType.ERROR, exc, None) + except Exception as exc: + self._exception = exc + self._closing = True + self._close_code = 1006 + await self.close() + return WSMessage(WSMsgType.ERROR, exc, None) + + if msg.type == WSMsgType.CLOSE: + self._closing = True + self._close_code = msg.data + if not self._closed and self._autoclose: + await self.close() + elif msg.type == WSMsgType.CLOSING: + self._closing = True + elif msg.type == WSMsgType.PING and self._autoping: + await self.pong(msg.data) + continue + elif msg.type == WSMsgType.PONG and self._autoping: + continue + + return msg + + async def receive_str(self, *, timeout: Optional[float]=None) -> str: + msg = await self.receive(timeout) + if msg.type != WSMsgType.TEXT: + raise TypeError( + "Received message {}:{!r} is not str".format(msg.type, + msg.data)) + return msg.data + + async def receive_bytes(self, *, timeout: Optional[float]=None) -> bytes: + msg = await self.receive(timeout) + if msg.type != WSMsgType.BINARY: + raise TypeError( + "Received message {}:{!r} is not bytes".format(msg.type, + msg.data)) + return msg.data + + async def receive_json(self, + *, loads: JSONDecoder=DEFAULT_JSON_DECODER, + timeout: Optional[float]=None) -> Any: + data = await self.receive_str(timeout=timeout) + return loads(data) + + def __aiter__(self) -> 'ClientWebSocketResponse': + return self + + async def __anext__(self) -> WSMessage: + msg = await self.receive() + if msg.type in (WSMsgType.CLOSE, + WSMsgType.CLOSING, + WSMsgType.CLOSED): + raise StopAsyncIteration # NOQA + return msg diff --git a/venv/lib/python3.7/site-packages/aiohttp/connector.py b/venv/lib/python3.7/site-packages/aiohttp/connector.py new file mode 100644 index 0000000..dc33879 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/connector.py @@ -0,0 +1,1128 @@ +import asyncio +import functools +import random +import sys +import traceback +import warnings +from collections import defaultdict, deque +from contextlib import suppress +from http.cookies import SimpleCookie +from itertools import cycle, islice +from time import monotonic +from types import TracebackType +from typing import ( # noqa + TYPE_CHECKING, + Any, + Awaitable, + Callable, + DefaultDict, + Dict, + Iterator, + List, + Optional, + Set, + Tuple, + Type, + Union, + cast, +) + +import attr + +from . import hdrs, helpers +from .abc import AbstractResolver +from .client_exceptions import ( + ClientConnectionError, + ClientConnectorCertificateError, + ClientConnectorError, + ClientConnectorSSLError, + ClientHttpProxyError, + ClientProxyConnectionError, + ServerFingerprintMismatch, + cert_errors, + ssl_errors, +) +from .client_proto import ResponseHandler +from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params +from .helpers import ( + PY_36, + CeilTimeout, + get_running_loop, + is_ip_address, + noop2, + sentinel, +) +from .http import RESPONSES +from .locks import EventResultOrError +from .resolver import DefaultResolver + +try: + import ssl + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore + SSLContext = object # type: ignore + + +__all__ = ('BaseConnector', 'TCPConnector', 'UnixConnector') + + +if TYPE_CHECKING: # pragma: no cover + from .client import ClientTimeout # noqa + from .client_reqrep import ConnectionKey # noqa + from .tracing import Trace # noqa + + +class _DeprecationWaiter: + __slots__ = ('_awaitable', '_awaited') + + def __init__(self, awaitable: Awaitable[Any]) -> None: + self._awaitable = awaitable + self._awaited = False + + def __await__(self) -> Any: + self._awaited = True + return self._awaitable.__await__() + + def __del__(self) -> None: + if not self._awaited: + warnings.warn("Connector.close() is a coroutine, " + "please use await connector.close()", + DeprecationWarning) + + +class Connection: + + _source_traceback = None + _transport = None + + def __init__(self, connector: 'BaseConnector', + key: 'ConnectionKey', + protocol: ResponseHandler, + loop: asyncio.AbstractEventLoop) -> None: + self._key = key + self._connector = connector + self._loop = loop + self._protocol = protocol # type: Optional[ResponseHandler] + self._callbacks = [] # type: List[Callable[[], None]] + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + def __repr__(self) -> str: + return 'Connection<{}>'.format(self._key) + + def __del__(self, _warnings: Any=warnings) -> None: + if self._protocol is not None: + if PY_36: + kwargs = {'source': self} + else: + kwargs = {} + _warnings.warn('Unclosed connection {!r}'.format(self), + ResourceWarning, + **kwargs) + if self._loop.is_closed(): + return + + self._connector._release( + self._key, self._protocol, should_close=True) + + context = {'client_connection': self, + 'message': 'Unclosed connection'} + if self._source_traceback is not None: + context['source_traceback'] = self._source_traceback + self._loop.call_exception_handler(context) + + @property + def loop(self) -> asyncio.AbstractEventLoop: + warnings.warn("connector.loop property is deprecated", + DeprecationWarning, + stacklevel=2) + return self._loop + + @property + def transport(self) -> Optional[asyncio.Transport]: + if self._protocol is None: + return None + return self._protocol.transport + + @property + def protocol(self) -> Optional[ResponseHandler]: + return self._protocol + + def add_callback(self, callback: Callable[[], None]) -> None: + if callback is not None: + self._callbacks.append(callback) + + def _notify_release(self) -> None: + callbacks, self._callbacks = self._callbacks[:], [] + + for cb in callbacks: + with suppress(Exception): + cb() + + def close(self) -> None: + self._notify_release() + + if self._protocol is not None: + self._connector._release( + self._key, self._protocol, should_close=True) + self._protocol = None + + def release(self) -> None: + self._notify_release() + + if self._protocol is not None: + self._connector._release( + self._key, self._protocol, + should_close=self._protocol.should_close) + self._protocol = None + + @property + def closed(self) -> bool: + return self._protocol is None or not self._protocol.is_connected() + + +class _TransportPlaceholder: + """ placeholder for BaseConnector.connect function """ + + def close(self) -> None: + pass + + +class BaseConnector: + """Base connector class. + + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + enable_cleanup_closed - Enables clean-up closed ssl transports. + Disabled by default. + loop - Optional event loop. + """ + + _closed = True # prevent AttributeError in __del__ if ctor was failed + _source_traceback = None + + # abort transport after 2 seconds (cleanup broken connections) + _cleanup_closed_period = 2.0 + + def __init__(self, *, + keepalive_timeout: Union[object, None, float]=sentinel, + force_close: bool=False, + limit: int=100, limit_per_host: int=0, + enable_cleanup_closed: bool=False, + loop: Optional[asyncio.AbstractEventLoop]=None) -> None: + + if force_close: + if keepalive_timeout is not None and \ + keepalive_timeout is not sentinel: + raise ValueError('keepalive_timeout cannot ' + 'be set if force_close is True') + else: + if keepalive_timeout is sentinel: + keepalive_timeout = 15.0 + + loop = get_running_loop(loop) + + self._closed = False + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + self._conns = {} # type: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] # noqa + self._limit = limit + self._limit_per_host = limit_per_host + self._acquired = set() # type: Set[ResponseHandler] + self._acquired_per_host = defaultdict(set) # type: DefaultDict[ConnectionKey, Set[ResponseHandler]] # noqa + self._keepalive_timeout = cast(float, keepalive_timeout) + self._force_close = force_close + + # {host_key: FIFO list of waiters} + self._waiters = defaultdict(deque) # type: ignore + + self._loop = loop + self._factory = functools.partial(ResponseHandler, loop=loop) + + self.cookies = SimpleCookie() + + # start keep-alive connection cleanup task + self._cleanup_handle = None + + # start cleanup closed transports task + self._cleanup_closed_handle = None + self._cleanup_closed_disabled = not enable_cleanup_closed + self._cleanup_closed_transports = [] # type: List[Optional[asyncio.Transport]] # noqa + self._cleanup_closed() + + def __del__(self, _warnings: Any=warnings) -> None: + if self._closed: + return + if not self._conns: + return + + conns = [repr(c) for c in self._conns.values()] + + self._close() + + if PY_36: + kwargs = {'source': self} + else: + kwargs = {} + _warnings.warn("Unclosed connector {!r}".format(self), + ResourceWarning, + **kwargs) + context = {'connector': self, + 'connections': conns, + 'message': 'Unclosed connector'} + if self._source_traceback is not None: + context['source_traceback'] = self._source_traceback + self._loop.call_exception_handler(context) + + def __enter__(self) -> 'BaseConnector': + warnings.warn('"witn Connector():" is deprecated, ' + 'use "async with Connector():" instead', + DeprecationWarning) + return self + + def __exit__(self, *exc: Any) -> None: + self.close() + + async def __aenter__(self) -> 'BaseConnector': + return self + + async def __aexit__(self, + exc_type: Optional[Type[BaseException]]=None, + exc_value: Optional[BaseException]=None, + exc_traceback: Optional[TracebackType]=None + ) -> None: + await self.close() + + @property + def force_close(self) -> bool: + """Ultimately close connection on releasing if True.""" + return self._force_close + + @property + def limit(self) -> int: + """The total number for simultaneous connections. + + If limit is 0 the connector has no limit. + The default limit size is 100. + """ + return self._limit + + @property + def limit_per_host(self) -> int: + """The limit_per_host for simultaneous connections + to the same endpoint. + + Endpoints are the same if they are have equal + (host, port, is_ssl) triple. + + """ + return self._limit_per_host + + def _cleanup(self) -> None: + """Cleanup unused transports.""" + if self._cleanup_handle: + self._cleanup_handle.cancel() + + now = self._loop.time() + timeout = self._keepalive_timeout + + if self._conns: + connections = {} + deadline = now - timeout + for key, conns in self._conns.items(): + alive = [] + for proto, use_time in conns: + if proto.is_connected(): + if use_time - deadline < 0: + transport = proto.transport + proto.close() + if (key.is_ssl and + not self._cleanup_closed_disabled): + self._cleanup_closed_transports.append( + transport) + else: + alive.append((proto, use_time)) + + if alive: + connections[key] = alive + + self._conns = connections + + if self._conns: + self._cleanup_handle = helpers.weakref_handle( + self, '_cleanup', timeout, self._loop) + + def _drop_acquired_per_host(self, key: 'ConnectionKey', + val: ResponseHandler) -> None: + acquired_per_host = self._acquired_per_host + if key not in acquired_per_host: + return + conns = acquired_per_host[key] + conns.remove(val) + if not conns: + del self._acquired_per_host[key] + + def _cleanup_closed(self) -> None: + """Double confirmation for transport close. + Some broken ssl servers may leave socket open without proper close. + """ + if self._cleanup_closed_handle: + self._cleanup_closed_handle.cancel() + + for transport in self._cleanup_closed_transports: + if transport is not None: + transport.abort() + + self._cleanup_closed_transports = [] + + if not self._cleanup_closed_disabled: + self._cleanup_closed_handle = helpers.weakref_handle( + self, '_cleanup_closed', + self._cleanup_closed_period, self._loop) + + def close(self) -> Awaitable[None]: + """Close all opened transports.""" + self._close() + return _DeprecationWaiter(noop2()) + + def _close(self) -> None: + if self._closed: + return + + self._closed = True + + try: + if self._loop.is_closed(): + return + + # cancel cleanup task + if self._cleanup_handle: + self._cleanup_handle.cancel() + + # cancel cleanup close task + if self._cleanup_closed_handle: + self._cleanup_closed_handle.cancel() + + for data in self._conns.values(): + for proto, t0 in data: + proto.close() + + for proto in self._acquired: + proto.close() + + for transport in self._cleanup_closed_transports: + if transport is not None: + transport.abort() + + finally: + self._conns.clear() + self._acquired.clear() + self._waiters.clear() + self._cleanup_handle = None + self._cleanup_closed_transports.clear() + self._cleanup_closed_handle = None + + @property + def closed(self) -> bool: + """Is connector closed. + + A readonly property. + """ + return self._closed + + def _available_connections(self, key: 'ConnectionKey') -> int: + """ + Return number of available connections taking into account + the limit, limit_per_host and the connection key. + + If it returns less than 1 means that there is no connections + availables. + """ + + if self._limit: + # total calc available connections + available = self._limit - len(self._acquired) + + # check limit per host + if (self._limit_per_host and available > 0 and + key in self._acquired_per_host): + acquired = self._acquired_per_host.get(key) + assert acquired is not None + available = self._limit_per_host - len(acquired) + + elif self._limit_per_host and key in self._acquired_per_host: + # check limit per host + acquired = self._acquired_per_host.get(key) + assert acquired is not None + available = self._limit_per_host - len(acquired) + else: + available = 1 + + return available + + async def connect(self, req: 'ClientRequest', + traces: List['Trace'], + timeout: 'ClientTimeout') -> Connection: + """Get from pool or create new connection.""" + key = req.connection_key + available = self._available_connections(key) + + # Wait if there are no available connections. + if available <= 0: + fut = self._loop.create_future() + + # This connection will now count towards the limit. + waiters = self._waiters[key] + waiters.append(fut) + + if traces: + for trace in traces: + await trace.send_connection_queued_start() + + try: + await fut + except BaseException as e: + # remove a waiter even if it was cancelled, normally it's + # removed when it's notified + try: + waiters.remove(fut) + except ValueError: # fut may no longer be in list + pass + + raise e + finally: + if not waiters: + try: + del self._waiters[key] + except KeyError: + # the key was evicted before. + pass + + if traces: + for trace in traces: + await trace.send_connection_queued_end() + + proto = self._get(key) + if proto is None: + placeholder = cast(ResponseHandler, _TransportPlaceholder()) + self._acquired.add(placeholder) + self._acquired_per_host[key].add(placeholder) + + if traces: + for trace in traces: + await trace.send_connection_create_start() + + try: + proto = await self._create_connection(req, traces, timeout) + if self._closed: + proto.close() + raise ClientConnectionError("Connector is closed.") + except BaseException: + if not self._closed: + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + self._release_waiter() + raise + else: + if not self._closed: + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + + if traces: + for trace in traces: + await trace.send_connection_create_end() + else: + if traces: + for trace in traces: + await trace.send_connection_reuseconn() + + self._acquired.add(proto) + self._acquired_per_host[key].add(proto) + return Connection(self, key, proto, self._loop) + + def _get(self, key: 'ConnectionKey') -> Optional[ResponseHandler]: + try: + conns = self._conns[key] + except KeyError: + return None + + t1 = self._loop.time() + while conns: + proto, t0 = conns.pop() + if proto.is_connected(): + if t1 - t0 > self._keepalive_timeout: + transport = proto.transport + proto.close() + # only for SSL transports + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + else: + if not conns: + # The very last connection was reclaimed: drop the key + del self._conns[key] + return proto + + # No more connections: drop the key + del self._conns[key] + return None + + def _release_waiter(self) -> None: + """ + Iterates over all waiters till found one that is not finsihed and + belongs to a host that has available connections. + """ + if not self._waiters: + return + + # Having the dict keys ordered this avoids to iterate + # at the same order at each call. + queues = list(self._waiters.keys()) + random.shuffle(queues) + + for key in queues: + if self._available_connections(key) < 1: + continue + + waiters = self._waiters[key] + while waiters: + waiter = waiters.popleft() + if not waiter.done(): + waiter.set_result(None) + return + + def _release_acquired(self, key: 'ConnectionKey', + proto: ResponseHandler) -> None: + if self._closed: + # acquired connection is already released on connector closing + return + + try: + self._acquired.remove(proto) + self._drop_acquired_per_host(key, proto) + except KeyError: # pragma: no cover + # this may be result of undetermenistic order of objects + # finalization due garbage collection. + pass + else: + self._release_waiter() + + def _release(self, key: 'ConnectionKey', protocol: ResponseHandler, + *, should_close: bool=False) -> None: + if self._closed: + # acquired connection is already released on connector closing + return + + self._release_acquired(key, protocol) + + if self._force_close: + should_close = True + + if should_close or protocol.should_close: + transport = protocol.transport + protocol.close() + + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + else: + conns = self._conns.get(key) + if conns is None: + conns = self._conns[key] = [] + conns.append((protocol, self._loop.time())) + + if self._cleanup_handle is None: + self._cleanup_handle = helpers.weakref_handle( + self, '_cleanup', self._keepalive_timeout, self._loop) + + async def _create_connection(self, req: 'ClientRequest', + traces: List['Trace'], + timeout: 'ClientTimeout') -> ResponseHandler: + raise NotImplementedError() + + +class _DNSCacheTable: + + def __init__(self, ttl: Optional[float]=None) -> None: + self._addrs_rr = {} # type: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] # noqa + self._timestamps = {} # type: Dict[Tuple[str, int], float] + self._ttl = ttl + + def __contains__(self, host: object) -> bool: + return host in self._addrs_rr + + def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None: + self._addrs_rr[key] = (cycle(addrs), len(addrs)) + + if self._ttl: + self._timestamps[key] = monotonic() + + def remove(self, key: Tuple[str, int]) -> None: + self._addrs_rr.pop(key, None) + + if self._ttl: + self._timestamps.pop(key, None) + + def clear(self) -> None: + self._addrs_rr.clear() + self._timestamps.clear() + + def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]: + loop, length = self._addrs_rr[key] + addrs = list(islice(loop, length)) + # Consume one more element to shift internal state of `cycle` + next(loop) + return addrs + + def expired(self, key: Tuple[str, int]) -> bool: + if self._ttl is None: + return False + + return self._timestamps[key] + self._ttl < monotonic() + + +class TCPConnector(BaseConnector): + """TCP connector. + + verify_ssl - Set to True to check ssl certifications. + fingerprint - Pass the binary sha256 + digest of the expected certificate in DER format to verify + that the certificate the server presents matches. See also + https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning + resolver - Enable DNS lookups and use this + resolver + use_dns_cache - Use memory cache for DNS lookups. + ttl_dns_cache - Max seconds having cached a DNS entry, None forever. + family - socket address family + local_addr - local tuple of (host, port) to bind socket to + + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + enable_cleanup_closed - Enables clean-up closed ssl transports. + Disabled by default. + loop - Optional event loop. + """ + + def __init__(self, *, verify_ssl: bool=True, + fingerprint: Optional[bytes]=None, + use_dns_cache: bool=True, ttl_dns_cache: int=10, + family: int=0, + ssl_context: Optional[SSLContext]=None, + ssl: Union[None, bool, Fingerprint, SSLContext]=None, + local_addr: Optional[str]=None, + resolver: Optional[AbstractResolver]=None, + keepalive_timeout: Union[None, float, object]=sentinel, + force_close: bool=False, + limit: int=100, limit_per_host: int=0, + enable_cleanup_closed: bool=False, + loop: Optional[asyncio.AbstractEventLoop]=None): + super().__init__(keepalive_timeout=keepalive_timeout, + force_close=force_close, + limit=limit, limit_per_host=limit_per_host, + enable_cleanup_closed=enable_cleanup_closed, + loop=loop) + + self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, + fingerprint) + if resolver is None: + resolver = DefaultResolver(loop=self._loop) + self._resolver = resolver + + self._use_dns_cache = use_dns_cache + self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) + self._throttle_dns_events = {} # type: Dict[Tuple[str, int], EventResultOrError] # noqa + self._family = family + self._local_addr = local_addr + + def close(self) -> Awaitable[None]: + """Close all ongoing DNS calls.""" + for ev in self._throttle_dns_events.values(): + ev.cancel() + + return super().close() + + @property + def family(self) -> int: + """Socket family like AF_INET.""" + return self._family + + @property + def use_dns_cache(self) -> bool: + """True if local DNS caching is enabled.""" + return self._use_dns_cache + + def clear_dns_cache(self, + host: Optional[str]=None, + port: Optional[int]=None) -> None: + """Remove specified host/port or clear all dns local cache.""" + if host is not None and port is not None: + self._cached_hosts.remove((host, port)) + elif host is not None or port is not None: + raise ValueError("either both host and port " + "or none of them are allowed") + else: + self._cached_hosts.clear() + + async def _resolve_host(self, + host: str, port: int, + traces: Optional[List['Trace']]=None + ) -> List[Dict[str, Any]]: + if is_ip_address(host): + return [{'hostname': host, 'host': host, 'port': port, + 'family': self._family, 'proto': 0, 'flags': 0}] + + if not self._use_dns_cache: + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_start(host) + + res = (await self._resolver.resolve( + host, port, family=self._family)) + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_end(host) + + return res + + key = (host, port) + + if (key in self._cached_hosts) and \ + (not self._cached_hosts.expired(key)): + + if traces: + for trace in traces: + await trace.send_dns_cache_hit(host) + + return self._cached_hosts.next_addrs(key) + + if key in self._throttle_dns_events: + if traces: + for trace in traces: + await trace.send_dns_cache_hit(host) + await self._throttle_dns_events[key].wait() + else: + if traces: + for trace in traces: + await trace.send_dns_cache_miss(host) + self._throttle_dns_events[key] = \ + EventResultOrError(self._loop) + try: + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_start(host) + + addrs = await \ + self._resolver.resolve(host, port, family=self._family) + if traces: + for trace in traces: + await trace.send_dns_resolvehost_end(host) + + self._cached_hosts.add(key, addrs) + self._throttle_dns_events[key].set() + except BaseException as e: + # any DNS exception, independently of the implementation + # is set for the waiters to raise the same exception. + self._throttle_dns_events[key].set(exc=e) + raise + finally: + self._throttle_dns_events.pop(key) + + return self._cached_hosts.next_addrs(key) + + async def _create_connection(self, req: 'ClientRequest', + traces: List['Trace'], + timeout: 'ClientTimeout') -> ResponseHandler: + """Create connection. + + Has same keyword arguments as BaseEventLoop.create_connection. + """ + if req.proxy: + _, proto = await self._create_proxy_connection( + req, traces, timeout) + else: + _, proto = await self._create_direct_connection( + req, traces, timeout) + + return proto + + @staticmethod + @functools.lru_cache(None) + def _make_ssl_context(verified: bool) -> SSLContext: + if verified: + return ssl.create_default_context() + else: + sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + sslcontext.options |= ssl.OP_NO_SSLv2 + sslcontext.options |= ssl.OP_NO_SSLv3 + sslcontext.options |= ssl.OP_NO_COMPRESSION + sslcontext.set_default_verify_paths() + return sslcontext + + def _get_ssl_context(self, req: 'ClientRequest') -> Optional[SSLContext]: + """Logic to get the correct SSL context + + 0. if req.ssl is false, return None + + 1. if ssl_context is specified in req, use it + 2. if _ssl_context is specified in self, use it + 3. otherwise: + 1. if verify_ssl is not specified in req, use self.ssl_context + (will generate a default context according to self.verify_ssl) + 2. if verify_ssl is True in req, generate a default SSL context + 3. if verify_ssl is False in req, generate a SSL context that + won't verify + """ + if req.is_ssl(): + if ssl is None: # pragma: no cover + raise RuntimeError('SSL is not supported.') + sslcontext = req.ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not None: + # not verified or fingerprinted + return self._make_ssl_context(False) + sslcontext = self._ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not None: + # not verified or fingerprinted + return self._make_ssl_context(False) + return self._make_ssl_context(True) + else: + return None + + def _get_fingerprint(self, + req: 'ClientRequest') -> Optional['Fingerprint']: + ret = req.ssl + if isinstance(ret, Fingerprint): + return ret + ret = self._ssl + if isinstance(ret, Fingerprint): + return ret + return None + + async def _wrap_create_connection( + self, *args: Any, + req: 'ClientRequest', + timeout: 'ClientTimeout', + client_error: Type[Exception]=ClientConnectorError, + **kwargs: Any) -> Tuple[asyncio.Transport, ResponseHandler]: + try: + with CeilTimeout(timeout.sock_connect): + return cast( + Tuple[asyncio.Transport, ResponseHandler], + await self._loop.create_connection(*args, **kwargs)) + except cert_errors as exc: + raise ClientConnectorCertificateError( + req.connection_key, exc) from exc + except ssl_errors as exc: + raise ClientConnectorSSLError(req.connection_key, exc) from exc + except OSError as exc: + raise client_error(req.connection_key, exc) from exc + + async def _create_direct_connection( + self, + req: 'ClientRequest', + traces: List['Trace'], + timeout: 'ClientTimeout', + *, + client_error: Type[Exception]=ClientConnectorError + ) -> Tuple[asyncio.Transport, ResponseHandler]: + sslcontext = self._get_ssl_context(req) + fingerprint = self._get_fingerprint(req) + + try: + # Cancelling this lookup should not cancel the underlying lookup + # or else the cancel event will get broadcast to all the waiters + # across all connections. + host = req.url.raw_host + assert host is not None + port = req.port + assert port is not None + hosts = await asyncio.shield(self._resolve_host( + host, + port, + traces=traces), loop=self._loop) + except OSError as exc: + # in case of proxy it is not ClientProxyConnectionError + # it is problem of resolving proxy ip itself + raise ClientConnectorError(req.connection_key, exc) from exc + + last_exc = None # type: Optional[Exception] + + for hinfo in hosts: + host = hinfo['host'] + port = hinfo['port'] + + try: + transp, proto = await self._wrap_create_connection( + self._factory, host, port, timeout=timeout, + ssl=sslcontext, family=hinfo['family'], + proto=hinfo['proto'], flags=hinfo['flags'], + server_hostname=hinfo['hostname'] if sslcontext else None, + local_addr=self._local_addr, + req=req, client_error=client_error) + except ClientConnectorError as exc: + last_exc = exc + continue + + if req.is_ssl() and fingerprint: + try: + fingerprint.check(transp) + except ServerFingerprintMismatch as exc: + transp.close() + if not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transp) + last_exc = exc + continue + + return transp, proto + else: + assert last_exc is not None + raise last_exc + + async def _create_proxy_connection( + self, + req: 'ClientRequest', + traces: List['Trace'], + timeout: 'ClientTimeout' + ) -> Tuple[asyncio.Transport, ResponseHandler]: + headers = {} # type: Dict[str, str] + if req.proxy_headers is not None: + headers = req.proxy_headers # type: ignore + headers[hdrs.HOST] = req.headers[hdrs.HOST] + + url = req.proxy + assert url is not None + proxy_req = ClientRequest( + hdrs.METH_GET, url, + headers=headers, + auth=req.proxy_auth, + loop=self._loop, + ssl=req.ssl) + + # create connection to proxy server + transport, proto = await self._create_direct_connection( + proxy_req, [], timeout, client_error=ClientProxyConnectionError) + + # Many HTTP proxies has buggy keepalive support. Let's not + # reuse connection but close it after processing every + # response. + proto.force_close() + + auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) + if auth is not None: + if not req.is_ssl(): + req.headers[hdrs.PROXY_AUTHORIZATION] = auth + else: + proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth + + if req.is_ssl(): + sslcontext = self._get_ssl_context(req) + # For HTTPS requests over HTTP proxy + # we must notify proxy to tunnel connection + # so we send CONNECT command: + # CONNECT www.python.org:443 HTTP/1.1 + # Host: www.python.org + # + # next we must do TLS handshake and so on + # to do this we must wrap raw socket into secure one + # asyncio handles this perfectly + proxy_req.method = hdrs.METH_CONNECT + proxy_req.url = req.url + key = attr.evolve(req.connection_key, + proxy=None, + proxy_auth=None, + proxy_headers_hash=None) + conn = Connection(self, key, proto, self._loop) + proxy_resp = await proxy_req.send(conn) + try: + protocol = conn._protocol + assert protocol is not None + protocol.set_response_params() + resp = await proxy_resp.start(conn) + except BaseException: + proxy_resp.close() + conn.close() + raise + else: + conn._protocol = None + conn._transport = None + try: + if resp.status != 200: + message = resp.reason + if message is None: + message = RESPONSES[resp.status][0] + raise ClientHttpProxyError( + proxy_resp.request_info, + resp.history, + status=resp.status, + message=message, + headers=resp.headers) + rawsock = transport.get_extra_info('socket', default=None) + if rawsock is None: + raise RuntimeError( + "Transport does not expose socket instance") + # Duplicate the socket, so now we can close proxy transport + rawsock = rawsock.dup() + finally: + transport.close() + + transport, proto = await self._wrap_create_connection( + self._factory, timeout=timeout, + ssl=sslcontext, sock=rawsock, + server_hostname=req.host, + req=req) + finally: + proxy_resp.close() + + return transport, proto + + +class UnixConnector(BaseConnector): + """Unix socket connector. + + path - Unix socket path. + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + loop - Optional event loop. + """ + + def __init__(self, path: str, force_close: bool=False, + keepalive_timeout: Union[object, float, None]=sentinel, + limit: int=100, limit_per_host: int=0, + loop: Optional[asyncio.AbstractEventLoop]=None) -> None: + super().__init__(force_close=force_close, + keepalive_timeout=keepalive_timeout, + limit=limit, limit_per_host=limit_per_host, loop=loop) + self._path = path + + @property + def path(self) -> str: + """Path to unix socket.""" + return self._path + + async def _create_connection(self, req: 'ClientRequest', + traces: List['Trace'], + timeout: 'ClientTimeout') -> ResponseHandler: + try: + with CeilTimeout(timeout.sock_connect): + _, proto = await self._loop.create_unix_connection( + self._factory, self._path) + except OSError as exc: + raise ClientConnectorError(req.connection_key, exc) from exc + + return cast(ResponseHandler, proto) diff --git a/venv/lib/python3.7/site-packages/aiohttp/cookiejar.py b/venv/lib/python3.7/site-packages/aiohttp/cookiejar.py new file mode 100644 index 0000000..a78e88d --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/cookiejar.py @@ -0,0 +1,357 @@ +import asyncio +import datetime +import os # noqa +import pathlib +import pickle +import re +from collections import defaultdict +from http.cookies import BaseCookie, Morsel, SimpleCookie # noqa +from math import ceil +from typing import ( # noqa + DefaultDict, + Dict, + Iterable, + Iterator, + Mapping, + Optional, + Set, + Tuple, + Union, + cast, +) + +from yarl import URL + +from .abc import AbstractCookieJar +from .helpers import is_ip_address +from .typedefs import LooseCookies, PathLike + +__all__ = ('CookieJar', 'DummyCookieJar') + + +CookieItem = Union[str, 'Morsel[str]'] + + +class CookieJar(AbstractCookieJar): + """Implements cookie storage adhering to RFC 6265.""" + + DATE_TOKENS_RE = re.compile( + r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*" + r"(?P[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)") + + DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})") + + DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})") + + DATE_MONTH_RE = re.compile("(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" + "(aug)|(sep)|(oct)|(nov)|(dec)", re.I) + + DATE_YEAR_RE = re.compile(r"(\d{2,4})") + + MAX_TIME = 2051215261.0 # so far in future (2035-01-01) + + def __init__(self, *, unsafe: bool=False, + loop: Optional[asyncio.AbstractEventLoop]=None) -> None: + super().__init__(loop=loop) + self._cookies = defaultdict(SimpleCookie) #type: DefaultDict[str, SimpleCookie] # noqa + self._host_only_cookies = set() # type: Set[Tuple[str, str]] + self._unsafe = unsafe + self._next_expiration = ceil(self._loop.time()) + self._expirations = {} # type: Dict[Tuple[str, str], int] + + def save(self, file_path: PathLike) -> None: + file_path = pathlib.Path(file_path) + with file_path.open(mode='wb') as f: + pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL) + + def load(self, file_path: PathLike) -> None: + file_path = pathlib.Path(file_path) + with file_path.open(mode='rb') as f: + self._cookies = pickle.load(f) + + def clear(self) -> None: + self._cookies.clear() + self._host_only_cookies.clear() + self._next_expiration = ceil(self._loop.time()) + self._expirations.clear() + + def __iter__(self) -> 'Iterator[Morsel[str]]': + self._do_expiration() + for val in self._cookies.values(): + yield from val.values() + + def __len__(self) -> int: + return sum(1 for i in self) + + def _do_expiration(self) -> None: + now = self._loop.time() + if self._next_expiration > now: + return + if not self._expirations: + return + next_expiration = self.MAX_TIME + to_del = [] + cookies = self._cookies + expirations = self._expirations + for (domain, name), when in expirations.items(): + if when <= now: + cookies[domain].pop(name, None) + to_del.append((domain, name)) + self._host_only_cookies.discard((domain, name)) + else: + next_expiration = min(next_expiration, when) + for key in to_del: + del expirations[key] + + self._next_expiration = ceil(next_expiration) + + def _expire_cookie(self, when: float, domain: str, name: str) -> None: + iwhen = int(when) + self._next_expiration = min(self._next_expiration, iwhen) + self._expirations[(domain, name)] = iwhen + + def update_cookies(self, + cookies: LooseCookies, + response_url: URL=URL()) -> None: + """Update cookies.""" + hostname = response_url.raw_host + + if not self._unsafe and is_ip_address(hostname): + # Don't accept cookies from IPs + return + + if isinstance(cookies, Mapping): + cookies = cookies.items() # type: ignore + + for name, cookie in cookies: + if not isinstance(cookie, Morsel): + tmp = SimpleCookie() + tmp[name] = cookie # type: ignore + cookie = tmp[name] + + domain = cookie["domain"] + + # ignore domains with trailing dots + if domain.endswith('.'): + domain = "" + del cookie["domain"] + + if not domain and hostname is not None: + # Set the cookie's domain to the response hostname + # and set its host-only-flag + self._host_only_cookies.add((hostname, name)) + domain = cookie["domain"] = hostname + + if domain.startswith("."): + # Remove leading dot + domain = domain[1:] + cookie["domain"] = domain + + if hostname and not self._is_domain_match(domain, hostname): + # Setting cookies for different domains is not allowed + continue + + path = cookie["path"] + if not path or not path.startswith("/"): + # Set the cookie's path to the response path + path = response_url.path + if not path.startswith("/"): + path = "/" + else: + # Cut everything from the last slash to the end + path = "/" + path[1:path.rfind("/")] + cookie["path"] = path + + max_age = cookie["max-age"] + if max_age: + try: + delta_seconds = int(max_age) + self._expire_cookie(self._loop.time() + delta_seconds, + domain, name) + except ValueError: + cookie["max-age"] = "" + + else: + expires = cookie["expires"] + if expires: + expire_time = self._parse_date(expires) + if expire_time: + self._expire_cookie(expire_time.timestamp(), + domain, name) + else: + cookie["expires"] = "" + + self._cookies[domain][name] = cookie + + self._do_expiration() + + def filter_cookies(self, request_url: URL=URL()) -> 'BaseCookie[str]': + """Returns this jar's cookies filtered by their attributes.""" + self._do_expiration() + request_url = URL(request_url) + filtered = SimpleCookie() + hostname = request_url.raw_host or "" + is_not_secure = request_url.scheme not in ("https", "wss") + + for cookie in self: + name = cookie.key + domain = cookie["domain"] + + # Send shared cookies + if not domain: + filtered[name] = cookie.value + continue + + if not self._unsafe and is_ip_address(hostname): + continue + + if (domain, name) in self._host_only_cookies: + if domain != hostname: + continue + elif not self._is_domain_match(domain, hostname): + continue + + if not self._is_path_match(request_url.path, cookie["path"]): + continue + + if is_not_secure and cookie["secure"]: + continue + + # It's critical we use the Morsel so the coded_value + # (based on cookie version) is preserved + mrsl_val = cast('Morsel[str]', cookie.get(cookie.key, Morsel())) + mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) + filtered[name] = mrsl_val + + return filtered + + @staticmethod + def _is_domain_match(domain: str, hostname: str) -> bool: + """Implements domain matching adhering to RFC 6265.""" + if hostname == domain: + return True + + if not hostname.endswith(domain): + return False + + non_matching = hostname[:-len(domain)] + + if not non_matching.endswith("."): + return False + + return not is_ip_address(hostname) + + @staticmethod + def _is_path_match(req_path: str, cookie_path: str) -> bool: + """Implements path matching adhering to RFC 6265.""" + if not req_path.startswith("/"): + req_path = "/" + + if req_path == cookie_path: + return True + + if not req_path.startswith(cookie_path): + return False + + if cookie_path.endswith("/"): + return True + + non_matching = req_path[len(cookie_path):] + + return non_matching.startswith("/") + + @classmethod + def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]: + """Implements date string parsing adhering to RFC 6265.""" + if not date_str: + return None + + found_time = False + found_day = False + found_month = False + found_year = False + + hour = minute = second = 0 + day = 0 + month = 0 + year = 0 + + for token_match in cls.DATE_TOKENS_RE.finditer(date_str): + + token = token_match.group("token") + + if not found_time: + time_match = cls.DATE_HMS_TIME_RE.match(token) + if time_match: + found_time = True + hour, minute, second = [ + int(s) for s in time_match.groups()] + continue + + if not found_day: + day_match = cls.DATE_DAY_OF_MONTH_RE.match(token) + if day_match: + found_day = True + day = int(day_match.group()) + continue + + if not found_month: + month_match = cls.DATE_MONTH_RE.match(token) + if month_match: + found_month = True + month = month_match.lastindex + continue + + if not found_year: + year_match = cls.DATE_YEAR_RE.match(token) + if year_match: + found_year = True + year = int(year_match.group()) + + if 70 <= year <= 99: + year += 1900 + elif 0 <= year <= 69: + year += 2000 + + if False in (found_day, found_month, found_year, found_time): + return None + + if not 1 <= day <= 31: + return None + + if year < 1601 or hour > 23 or minute > 59 or second > 59: + return None + + return datetime.datetime(year, month, day, + hour, minute, second, + tzinfo=datetime.timezone.utc) + + +class DummyCookieJar(AbstractCookieJar): + """Implements a dummy cookie storage. + + It can be used with the ClientSession when no cookie processing is needed. + + """ + + def __init__(self, *, + loop: Optional[asyncio.AbstractEventLoop]=None) -> None: + super().__init__(loop=loop) + + def __iter__(self) -> 'Iterator[Morsel[str]]': + while False: + yield None + + def __len__(self) -> int: + return 0 + + def clear(self) -> None: + pass + + def update_cookies(self, + cookies: LooseCookies, + response_url: URL=URL()) -> None: + pass + + def filter_cookies(self, request_url: URL) -> 'BaseCookie[str]': + return SimpleCookie() diff --git a/venv/lib/python3.7/site-packages/aiohttp/formdata.py b/venv/lib/python3.7/site-packages/aiohttp/formdata.py new file mode 100644 index 0000000..b4ffa04 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/formdata.py @@ -0,0 +1,150 @@ +import io +from typing import Any, Iterable, List, Optional # noqa +from urllib.parse import urlencode + +from multidict import MultiDict, MultiDictProxy + +from . import hdrs, multipart, payload +from .helpers import guess_filename +from .payload import Payload + +__all__ = ('FormData',) + + +class FormData: + """Helper class for multipart/form-data and + application/x-www-form-urlencoded body generation.""" + + def __init__(self, fields: + Iterable[Any]=(), + quote_fields: bool=True, + charset: Optional[str]=None) -> None: + self._writer = multipart.MultipartWriter('form-data') + self._fields = [] # type: List[Any] + self._is_multipart = False + self._quote_fields = quote_fields + self._charset = charset + + if isinstance(fields, dict): + fields = list(fields.items()) + elif not isinstance(fields, (list, tuple)): + fields = (fields,) + self.add_fields(*fields) + + @property + def is_multipart(self) -> bool: + return self._is_multipart + + def add_field(self, name: str, value: Any, *, + content_type: Optional[str]=None, + filename: Optional[str]=None, + content_transfer_encoding: Optional[str]=None) -> None: + + if isinstance(value, io.IOBase): + self._is_multipart = True + elif isinstance(value, (bytes, bytearray, memoryview)): + if filename is None and content_transfer_encoding is None: + filename = name + + type_options = MultiDict({'name': name}) + if filename is not None and not isinstance(filename, str): + raise TypeError('filename must be an instance of str. ' + 'Got: %s' % filename) + if filename is None and isinstance(value, io.IOBase): + filename = guess_filename(value, name) + if filename is not None: + type_options['filename'] = filename + self._is_multipart = True + + headers = {} + if content_type is not None: + if not isinstance(content_type, str): + raise TypeError('content_type must be an instance of str. ' + 'Got: %s' % content_type) + headers[hdrs.CONTENT_TYPE] = content_type + self._is_multipart = True + if content_transfer_encoding is not None: + if not isinstance(content_transfer_encoding, str): + raise TypeError('content_transfer_encoding must be an instance' + ' of str. Got: %s' % content_transfer_encoding) + headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding + self._is_multipart = True + + self._fields.append((type_options, headers, value)) + + def add_fields(self, *fields: Any) -> None: + to_add = list(fields) + + while to_add: + rec = to_add.pop(0) + + if isinstance(rec, io.IOBase): + k = guess_filename(rec, 'unknown') + self.add_field(k, rec) # type: ignore + + elif isinstance(rec, (MultiDictProxy, MultiDict)): + to_add.extend(rec.items()) + + elif isinstance(rec, (list, tuple)) and len(rec) == 2: + k, fp = rec + self.add_field(k, fp) # type: ignore + + else: + raise TypeError('Only io.IOBase, multidict and (name, file) ' + 'pairs allowed, use .add_field() for passing ' + 'more complex parameters, got {!r}' + .format(rec)) + + def _gen_form_urlencoded(self) -> payload.BytesPayload: + # form data (x-www-form-urlencoded) + data = [] + for type_options, _, value in self._fields: + data.append((type_options['name'], value)) + + charset = self._charset if self._charset is not None else 'utf-8' + + if charset == 'utf-8': + content_type = 'application/x-www-form-urlencoded' + else: + content_type = ('application/x-www-form-urlencoded; ' + 'charset=%s' % charset) + + return payload.BytesPayload( + urlencode(data, doseq=True, encoding=charset).encode(), + content_type=content_type) + + def _gen_form_data(self) -> multipart.MultipartWriter: + """Encode a list of fields using the multipart/form-data MIME format""" + for dispparams, headers, value in self._fields: + try: + if hdrs.CONTENT_TYPE in headers: + part = payload.get_payload( + value, content_type=headers[hdrs.CONTENT_TYPE], + headers=headers, encoding=self._charset) + else: + part = payload.get_payload( + value, headers=headers, encoding=self._charset) + except Exception as exc: + raise TypeError( + 'Can not serialize value type: %r\n ' + 'headers: %r\n value: %r' % ( + type(value), headers, value)) from exc + + if dispparams: + part.set_content_disposition( + 'form-data', quote_fields=self._quote_fields, **dispparams + ) + # FIXME cgi.FieldStorage doesn't likes body parts with + # Content-Length which were sent via chunked transfer encoding + assert part.headers is not None + part.headers.popall(hdrs.CONTENT_LENGTH, None) + + self._writer.append_payload(part) + + return self._writer + + def __call__(self) -> Payload: + if self._is_multipart: + return self._gen_form_data() + else: + return self._gen_form_urlencoded() diff --git a/venv/lib/python3.7/site-packages/aiohttp/frozenlist.py b/venv/lib/python3.7/site-packages/aiohttp/frozenlist.py new file mode 100644 index 0000000..2aaea64 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/frozenlist.py @@ -0,0 +1,72 @@ +from collections.abc import MutableSequence +from functools import total_ordering + +from .helpers import NO_EXTENSIONS + + +@total_ordering +class FrozenList(MutableSequence): + + __slots__ = ('_frozen', '_items') + + def __init__(self, items=None): + self._frozen = False + if items is not None: + items = list(items) + else: + items = [] + self._items = items + + @property + def frozen(self): + return self._frozen + + def freeze(self): + self._frozen = True + + def __getitem__(self, index): + return self._items[index] + + def __setitem__(self, index, value): + if self._frozen: + raise RuntimeError("Cannot modify frozen list.") + self._items[index] = value + + def __delitem__(self, index): + if self._frozen: + raise RuntimeError("Cannot modify frozen list.") + del self._items[index] + + def __len__(self): + return self._items.__len__() + + def __iter__(self): + return self._items.__iter__() + + def __reversed__(self): + return self._items.__reversed__() + + def __eq__(self, other): + return list(self) == other + + def __le__(self, other): + return list(self) <= other + + def insert(self, pos, item): + if self._frozen: + raise RuntimeError("Cannot modify frozen list.") + self._items.insert(pos, item) + + def __repr__(self): + return ''.format(self._frozen, + self._items) + + +PyFrozenList = FrozenList + +try: + from aiohttp._frozenlist import FrozenList as CFrozenList # type: ignore + if not NO_EXTENSIONS: + FrozenList = CFrozenList # type: ignore +except ImportError: # pragma: no cover + pass diff --git a/venv/lib/python3.7/site-packages/aiohttp/frozenlist.pyi b/venv/lib/python3.7/site-packages/aiohttp/frozenlist.pyi new file mode 100644 index 0000000..61a0478 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/frozenlist.pyi @@ -0,0 +1,54 @@ +from typing import (Generic, Iterable, Iterator, List, MutableSequence, + Optional, TypeVar, Union, overload) + +_T = TypeVar('_T') +_Arg = Union[List[_T], Iterable[_T]] + + +class FrozenList(MutableSequence[_T], Generic[_T]): + + def __init__(self, items: Optional[_Arg[_T]]=None) -> None: ... + + @property + def frozen(self) -> bool: ... + + def freeze(self) -> None: ... + + @overload + def __getitem__(self, i: int) -> _T: ... + + @overload + def __getitem__(self, s: slice) -> FrozenList[_T]: ... + + @overload + def __setitem__(self, i: int, o: _T) -> None: ... + + @overload + def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... + + @overload + def __delitem__(self, i: int) -> None: ... + + @overload + def __delitem__(self, i: slice) -> None: ... + + def __len__(self) -> int: ... + + def __iter__(self) -> Iterator[_T]: ... + + def __reversed__(self) -> Iterator[_T]: ... + + def __eq__(self, other: object) -> bool: ... + def __le__(self, other: FrozenList[_T]) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __lt__(self, other: FrozenList[_T]) -> bool: ... + def __ge__(self, other: FrozenList[_T]) -> bool: ... + def __gt__(self, other: FrozenList[_T]) -> bool: ... + + def insert(self, pos: int, item: _T) -> None: ... + + def __repr__(self) -> str: ... + + +# types for C accelerators are the same +CFrozenList = PyFrozenList = FrozenList diff --git a/venv/lib/python3.7/site-packages/aiohttp/hdrs.py b/venv/lib/python3.7/site-packages/aiohttp/hdrs.py new file mode 100644 index 0000000..c11a9d3 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/hdrs.py @@ -0,0 +1,100 @@ +"""HTTP Headers constants.""" + +# After changing the file content call ./tools/gen.py +# to regenerate the headers parser + +from multidict import istr + +METH_ANY = '*' +METH_CONNECT = 'CONNECT' +METH_HEAD = 'HEAD' +METH_GET = 'GET' +METH_DELETE = 'DELETE' +METH_OPTIONS = 'OPTIONS' +METH_PATCH = 'PATCH' +METH_POST = 'POST' +METH_PUT = 'PUT' +METH_TRACE = 'TRACE' + +METH_ALL = {METH_CONNECT, METH_HEAD, METH_GET, METH_DELETE, + METH_OPTIONS, METH_PATCH, METH_POST, METH_PUT, METH_TRACE} + + +ACCEPT = istr('Accept') +ACCEPT_CHARSET = istr('Accept-Charset') +ACCEPT_ENCODING = istr('Accept-Encoding') +ACCEPT_LANGUAGE = istr('Accept-Language') +ACCEPT_RANGES = istr('Accept-Ranges') +ACCESS_CONTROL_MAX_AGE = istr('Access-Control-Max-Age') +ACCESS_CONTROL_ALLOW_CREDENTIALS = istr('Access-Control-Allow-Credentials') +ACCESS_CONTROL_ALLOW_HEADERS = istr('Access-Control-Allow-Headers') +ACCESS_CONTROL_ALLOW_METHODS = istr('Access-Control-Allow-Methods') +ACCESS_CONTROL_ALLOW_ORIGIN = istr('Access-Control-Allow-Origin') +ACCESS_CONTROL_EXPOSE_HEADERS = istr('Access-Control-Expose-Headers') +ACCESS_CONTROL_REQUEST_HEADERS = istr('Access-Control-Request-Headers') +ACCESS_CONTROL_REQUEST_METHOD = istr('Access-Control-Request-Method') +AGE = istr('Age') +ALLOW = istr('Allow') +AUTHORIZATION = istr('Authorization') +CACHE_CONTROL = istr('Cache-Control') +CONNECTION = istr('Connection') +CONTENT_DISPOSITION = istr('Content-Disposition') +CONTENT_ENCODING = istr('Content-Encoding') +CONTENT_LANGUAGE = istr('Content-Language') +CONTENT_LENGTH = istr('Content-Length') +CONTENT_LOCATION = istr('Content-Location') +CONTENT_MD5 = istr('Content-MD5') +CONTENT_RANGE = istr('Content-Range') +CONTENT_TRANSFER_ENCODING = istr('Content-Transfer-Encoding') +CONTENT_TYPE = istr('Content-Type') +COOKIE = istr('Cookie') +DATE = istr('Date') +DESTINATION = istr('Destination') +DIGEST = istr('Digest') +ETAG = istr('Etag') +EXPECT = istr('Expect') +EXPIRES = istr('Expires') +FORWARDED = istr('Forwarded') +FROM = istr('From') +HOST = istr('Host') +IF_MATCH = istr('If-Match') +IF_MODIFIED_SINCE = istr('If-Modified-Since') +IF_NONE_MATCH = istr('If-None-Match') +IF_RANGE = istr('If-Range') +IF_UNMODIFIED_SINCE = istr('If-Unmodified-Since') +KEEP_ALIVE = istr('Keep-Alive') +LAST_EVENT_ID = istr('Last-Event-ID') +LAST_MODIFIED = istr('Last-Modified') +LINK = istr('Link') +LOCATION = istr('Location') +MAX_FORWARDS = istr('Max-Forwards') +ORIGIN = istr('Origin') +PRAGMA = istr('Pragma') +PROXY_AUTHENTICATE = istr('Proxy-Authenticate') +PROXY_AUTHORIZATION = istr('Proxy-Authorization') +RANGE = istr('Range') +REFERER = istr('Referer') +RETRY_AFTER = istr('Retry-After') +SEC_WEBSOCKET_ACCEPT = istr('Sec-WebSocket-Accept') +SEC_WEBSOCKET_VERSION = istr('Sec-WebSocket-Version') +SEC_WEBSOCKET_PROTOCOL = istr('Sec-WebSocket-Protocol') +SEC_WEBSOCKET_EXTENSIONS = istr('Sec-WebSocket-Extensions') +SEC_WEBSOCKET_KEY = istr('Sec-WebSocket-Key') +SEC_WEBSOCKET_KEY1 = istr('Sec-WebSocket-Key1') +SERVER = istr('Server') +SET_COOKIE = istr('Set-Cookie') +TE = istr('TE') +TRAILER = istr('Trailer') +TRANSFER_ENCODING = istr('Transfer-Encoding') +UPGRADE = istr('Upgrade') +WEBSOCKET = istr('WebSocket') +URI = istr('URI') +USER_AGENT = istr('User-Agent') +VARY = istr('Vary') +VIA = istr('Via') +WANT_DIGEST = istr('Want-Digest') +WARNING = istr('Warning') +WWW_AUTHENTICATE = istr('WWW-Authenticate') +X_FORWARDED_FOR = istr('X-Forwarded-For') +X_FORWARDED_HOST = istr('X-Forwarded-Host') +X_FORWARDED_PROTO = istr('X-Forwarded-Proto') diff --git a/venv/lib/python3.7/site-packages/aiohttp/helpers.py b/venv/lib/python3.7/site-packages/aiohttp/helpers.py new file mode 100644 index 0000000..cbfbb1e --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/helpers.py @@ -0,0 +1,704 @@ +"""Various helper functions""" + +import asyncio +import base64 +import binascii +import cgi +import functools +import inspect +import netrc +import os +import platform +import re +import sys +import time +import warnings +import weakref +from collections import namedtuple +from contextlib import suppress +from math import ceil +from pathlib import Path +from types import TracebackType +from typing import ( # noqa + Any, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) +from urllib.parse import quote +from urllib.request import getproxies + +import async_timeout +import attr +from multidict import MultiDict, MultiDictProxy +from yarl import URL + +from . import hdrs +from .log import client_logger, internal_logger +from .typedefs import PathLike # noqa + +__all__ = ('BasicAuth', 'ChainMapProxy') + +PY_36 = sys.version_info >= (3, 6) +PY_37 = sys.version_info >= (3, 7) + +if not PY_37: + import idna_ssl + idna_ssl.patch_match_hostname() + +try: + from typing import ContextManager +except ImportError: + from typing_extensions import ContextManager + + +def all_tasks( + loop: Optional[asyncio.AbstractEventLoop] = None +) -> Set['asyncio.Task[Any]']: + tasks = list(asyncio.Task.all_tasks(loop)) # type: ignore + return {t for t in tasks if not t.done()} + + +if PY_37: + all_tasks = getattr(asyncio, 'all_tasks') # noqa + + +_T = TypeVar('_T') + + +sentinel = object() # type: Any +NO_EXTENSIONS = bool(os.environ.get('AIOHTTP_NO_EXTENSIONS')) # type: bool + +# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr +# for compatibility with older versions +DEBUG = (getattr(sys.flags, 'dev_mode', False) or + (not sys.flags.ignore_environment and + bool(os.environ.get('PYTHONASYNCIODEBUG')))) # type: bool + + +CHAR = set(chr(i) for i in range(0, 128)) +CTL = set(chr(i) for i in range(0, 32)) | {chr(127), } +SEPARATORS = {'(', ')', '<', '>', '@', ',', ';', ':', '\\', '"', '/', '[', ']', + '?', '=', '{', '}', ' ', chr(9)} +TOKEN = CHAR ^ CTL ^ SEPARATORS + + +coroutines = asyncio.coroutines +old_debug = coroutines._DEBUG # type: ignore + +# prevent "coroutine noop was never awaited" warning. +coroutines._DEBUG = False # type: ignore + + +@asyncio.coroutine +def noop(*args, **kwargs): # type: ignore + return # type: ignore + + +async def noop2(*args: Any, **kwargs: Any) -> None: + return + + +coroutines._DEBUG = old_debug # type: ignore + + +class BasicAuth(namedtuple('BasicAuth', ['login', 'password', 'encoding'])): + """Http basic authentication helper.""" + + def __new__(cls, login: str, + password: str='', + encoding: str='latin1') -> 'BasicAuth': + if login is None: + raise ValueError('None is not allowed as login value') + + if password is None: + raise ValueError('None is not allowed as password value') + + if ':' in login: + raise ValueError( + 'A ":" is not allowed in login (RFC 1945#section-11.1)') + + return super().__new__(cls, login, password, encoding) + + @classmethod + def decode(cls, auth_header: str, encoding: str='latin1') -> 'BasicAuth': + """Create a BasicAuth object from an Authorization HTTP header.""" + try: + auth_type, encoded_credentials = auth_header.split(' ', 1) + except ValueError: + raise ValueError('Could not parse authorization header.') + + if auth_type.lower() != 'basic': + raise ValueError('Unknown authorization method %s' % auth_type) + + try: + decoded = base64.b64decode( + encoded_credentials.encode('ascii'), validate=True + ).decode(encoding) + except binascii.Error: + raise ValueError('Invalid base64 encoding.') + + try: + # RFC 2617 HTTP Authentication + # https://www.ietf.org/rfc/rfc2617.txt + # the colon must be present, but the username and password may be + # otherwise blank. + username, password = decoded.split(':', 1) + except ValueError: + raise ValueError('Invalid credentials.') + + return cls(username, password, encoding=encoding) + + @classmethod + def from_url(cls, url: URL, + *, encoding: str='latin1') -> Optional['BasicAuth']: + """Create BasicAuth from url.""" + if not isinstance(url, URL): + raise TypeError("url should be yarl.URL instance") + if url.user is None: + return None + return cls(url.user, url.password or '', encoding=encoding) + + def encode(self) -> str: + """Encode credentials.""" + creds = ('%s:%s' % (self.login, self.password)).encode(self.encoding) + return 'Basic %s' % base64.b64encode(creds).decode(self.encoding) + + +def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: + auth = BasicAuth.from_url(url) + if auth is None: + return url, None + else: + return url.with_user(None), auth + + +def netrc_from_env() -> Optional[netrc.netrc]: + """Attempt to load the netrc file from the path specified by the env-var + NETRC or in the default location in the user's home directory. + + Returns None if it couldn't be found or fails to parse. + """ + netrc_env = os.environ.get('NETRC') + + if netrc_env is not None: + netrc_path = Path(netrc_env) + else: + try: + home_dir = Path.home() + except RuntimeError as e: # pragma: no cover + # if pathlib can't resolve home, it may raise a RuntimeError + client_logger.debug('Could not resolve home directory when ' + 'trying to look for .netrc file: %s', e) + return None + + netrc_path = home_dir / ( + '_netrc' if platform.system() == 'Windows' else '.netrc') + + try: + return netrc.netrc(str(netrc_path)) + except netrc.NetrcParseError as e: + client_logger.warning('Could not parse .netrc file: %s', e) + except OSError as e: + # we couldn't read the file (doesn't exist, permissions, etc.) + if netrc_env or netrc_path.is_file(): + # only warn if the environment wanted us to load it, + # or it appears like the default file does actually exist + client_logger.warning('Could not read .netrc file: %s', e) + + return None + + +@attr.s(frozen=True, slots=True) +class ProxyInfo: + proxy = attr.ib(type=URL) + proxy_auth = attr.ib(type=Optional[BasicAuth]) + + +def proxies_from_env() -> Dict[str, ProxyInfo]: + proxy_urls = {k: URL(v) for k, v in getproxies().items() + if k in ('http', 'https')} + netrc_obj = netrc_from_env() + stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} + ret = {} + for proto, val in stripped.items(): + proxy, auth = val + if proxy.scheme == 'https': + client_logger.warning( + "HTTPS proxies %s are not supported, ignoring", proxy) + continue + if netrc_obj and auth is None: + auth_from_netrc = None + if proxy.host is not None: + auth_from_netrc = netrc_obj.authenticators(proxy.host) + if auth_from_netrc is not None: + # auth_from_netrc is a (`user`, `account`, `password`) tuple, + # `user` and `account` both can be username, + # if `user` is None, use `account` + *logins, password = auth_from_netrc + login = logins[0] if logins[0] else logins[-1] + auth = BasicAuth(cast(str, login), cast(str, password)) + ret[proto] = ProxyInfo(proxy, auth) + return ret + + +def current_task(loop: Optional[asyncio.AbstractEventLoop]=None) -> asyncio.Task: # type: ignore # noqa # Return type is intentionally Generic here + if PY_37: + return asyncio.current_task(loop=loop) # type: ignore + else: + return asyncio.Task.current_task(loop=loop) # type: ignore + + +def get_running_loop( + loop: Optional[asyncio.AbstractEventLoop]=None +) -> asyncio.AbstractEventLoop: + if loop is None: + loop = asyncio.get_event_loop() + if not loop.is_running(): + warnings.warn("The object should be created from async function", + DeprecationWarning, stacklevel=3) + if loop.get_debug(): + internal_logger.warning( + "The object should be created from async function", + stack_info=True) + return loop + + +def isasyncgenfunction(obj: Any) -> bool: + func = getattr(inspect, 'isasyncgenfunction', None) + if func is not None: + return func(obj) + else: + return False + + +@attr.s(frozen=True, slots=True) +class MimeType: + type = attr.ib(type=str) + subtype = attr.ib(type=str) + suffix = attr.ib(type=str) + parameters = attr.ib(type=MultiDictProxy) # type: MultiDictProxy[str] + + +@functools.lru_cache(maxsize=56) +def parse_mimetype(mimetype: str) -> MimeType: + """Parses a MIME type into its components. + + mimetype is a MIME type string. + + Returns a MimeType object. + + Example: + + >>> parse_mimetype('text/html; charset=utf-8') + MimeType(type='text', subtype='html', suffix='', + parameters={'charset': 'utf-8'}) + + """ + if not mimetype: + return MimeType(type='', subtype='', suffix='', + parameters=MultiDictProxy(MultiDict())) + + parts = mimetype.split(';') + params = MultiDict() # type: MultiDict[str] + for item in parts[1:]: + if not item: + continue + key, value = cast(Tuple[str, str], + item.split('=', 1) if '=' in item else (item, '')) + params.add(key.lower().strip(), value.strip(' "')) + + fulltype = parts[0].strip().lower() + if fulltype == '*': + fulltype = '*/*' + + mtype, stype = (cast(Tuple[str, str], fulltype.split('/', 1)) + if '/' in fulltype else (fulltype, '')) + stype, suffix = (cast(Tuple[str, str], stype.split('+', 1)) + if '+' in stype else (stype, '')) + + return MimeType(type=mtype, subtype=stype, suffix=suffix, + parameters=MultiDictProxy(params)) + + +def guess_filename(obj: Any, default: Optional[str]=None) -> Optional[str]: + name = getattr(obj, 'name', None) + if name and isinstance(name, str) and name[0] != '<' and name[-1] != '>': + return Path(name).name + return default + + +def content_disposition_header(disptype: str, + quote_fields: bool=True, + **params: str) -> str: + """Sets ``Content-Disposition`` header. + + disptype is a disposition type: inline, attachment, form-data. + Should be valid extension token (see RFC 2183) + + params is a dict with disposition params. + """ + if not disptype or not (TOKEN > set(disptype)): + raise ValueError('bad content disposition type {!r}' + ''.format(disptype)) + + value = disptype + if params: + lparams = [] + for key, val in params.items(): + if not key or not (TOKEN > set(key)): + raise ValueError('bad content disposition parameter' + ' {!r}={!r}'.format(key, val)) + qval = quote(val, '') if quote_fields else val + lparams.append((key, '"%s"' % qval)) + if key == 'filename': + lparams.append(('filename*', "utf-8''" + qval)) + sparams = '; '.join('='.join(pair) for pair in lparams) + value = '; '.join((value, sparams)) + return value + + +class reify: + """Use as a class method decorator. It operates almost exactly like + the Python `@property` decorator, but it puts the result of the + method it decorates into the instance dict after the first call, + effectively replacing the function it decorates with an instance + variable. It is, in Python parlance, a data descriptor. + + """ + + def __init__(self, wrapped: Callable[..., Any]) -> None: + self.wrapped = wrapped + self.__doc__ = wrapped.__doc__ + self.name = wrapped.__name__ + + def __get__(self, inst: Any, owner: Any) -> Any: + try: + try: + return inst._cache[self.name] + except KeyError: + val = self.wrapped(inst) + inst._cache[self.name] = val + return val + except AttributeError: + if inst is None: + return self + raise + + def __set__(self, inst: Any, value: Any) -> None: + raise AttributeError("reified property is read-only") + + +reify_py = reify + +try: + from ._helpers import reify as reify_c + if not NO_EXTENSIONS: + reify = reify_c # type: ignore +except ImportError: + pass + +_ipv4_pattern = (r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}' + r'(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$') +_ipv6_pattern = ( + r'^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}' + r'(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)' + r'((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})' + r'(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}' + r'(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}' + r'[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)' + r'(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}' + r':|:(:[A-F0-9]{1,4}){7})$') +_ipv4_regex = re.compile(_ipv4_pattern) +_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE) +_ipv4_regexb = re.compile(_ipv4_pattern.encode('ascii')) +_ipv6_regexb = re.compile(_ipv6_pattern.encode('ascii'), flags=re.IGNORECASE) + + +def _is_ip_address( + regex: Pattern[str], regexb: Pattern[bytes], + host: Optional[Union[str, bytes]])-> bool: + if host is None: + return False + if isinstance(host, str): + return bool(regex.match(host)) + elif isinstance(host, (bytes, bytearray, memoryview)): + return bool(regexb.match(host)) + else: + raise TypeError("{} [{}] is not a str or bytes" + .format(host, type(host))) + + +is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb) +is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb) + + +def is_ip_address( + host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool: + return is_ipv4_address(host) or is_ipv6_address(host) + + +_cached_current_datetime = None +_cached_formatted_datetime = None + + +def rfc822_formatted_time() -> str: + global _cached_current_datetime + global _cached_formatted_datetime + + now = int(time.time()) + if now != _cached_current_datetime: + # Weekday and month names for HTTP date/time formatting; + # always English! + # Tuples are constants stored in codeobject! + _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun") + _monthname = ("", # Dummy so we can use 1-based month numbers + "Jan", "Feb", "Mar", "Apr", "May", "Jun", + "Jul", "Aug", "Sep", "Oct", "Nov", "Dec") + + year, month, day, hh, mm, ss, wd, y, z = time.gmtime(now) # type: ignore # noqa + _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( + _weekdayname[wd], day, _monthname[month], year, hh, mm, ss + ) + _cached_current_datetime = now + return _cached_formatted_datetime # type: ignore + + +def _weakref_handle(info): # type: ignore + ref, name = info + ob = ref() + if ob is not None: + with suppress(Exception): + getattr(ob, name)() + + +def weakref_handle(ob, name, timeout, loop, ceil_timeout=True): # type: ignore + if timeout is not None and timeout > 0: + when = loop.time() + timeout + if ceil_timeout: + when = ceil(when) + + return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name)) + + +def call_later(cb, timeout, loop): # type: ignore + if timeout is not None and timeout > 0: + when = ceil(loop.time() + timeout) + return loop.call_at(when, cb) + + +class TimeoutHandle: + """ Timeout handle """ + + def __init__(self, + loop: asyncio.AbstractEventLoop, + timeout: Optional[float]) -> None: + self._timeout = timeout + self._loop = loop + self._callbacks = [] # type: List[Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]] # noqa + + def register(self, callback: Callable[..., None], + *args: Any, **kwargs: Any) -> None: + self._callbacks.append((callback, args, kwargs)) + + def close(self) -> None: + self._callbacks.clear() + + def start(self) -> Optional[asyncio.Handle]: + if self._timeout is not None and self._timeout > 0: + at = ceil(self._loop.time() + self._timeout) + return self._loop.call_at(at, self.__call__) + else: + return None + + def timer(self) -> 'BaseTimerContext': + if self._timeout is not None and self._timeout > 0: + timer = TimerContext(self._loop) + self.register(timer.timeout) + return timer + else: + return TimerNoop() + + def __call__(self) -> None: + for cb, args, kwargs in self._callbacks: + with suppress(Exception): + cb(*args, **kwargs) + + self._callbacks.clear() + + +class BaseTimerContext(ContextManager['BaseTimerContext']): + pass + + +class TimerNoop(BaseTimerContext): + + def __enter__(self) -> BaseTimerContext: + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return False + + +class TimerContext(BaseTimerContext): + """ Low resolution timeout context manager """ + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._tasks = [] # type: List[asyncio.Task[Any]] + self._cancelled = False + + def __enter__(self) -> BaseTimerContext: + task = current_task(loop=self._loop) + + if task is None: + raise RuntimeError('Timeout context manager should be used ' + 'inside a task') + + if self._cancelled: + task.cancel() + raise asyncio.TimeoutError from None + + self._tasks.append(task) + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + if self._tasks: + self._tasks.pop() + + if exc_type is asyncio.CancelledError and self._cancelled: + raise asyncio.TimeoutError from None + return None + + def timeout(self) -> None: + if not self._cancelled: + for task in set(self._tasks): + task.cancel() + + self._cancelled = True + + +class CeilTimeout(async_timeout.timeout): + + def __enter__(self) -> async_timeout.timeout: + if self._timeout is not None: + self._task = current_task(loop=self._loop) + if self._task is None: + raise RuntimeError( + 'Timeout context manager should be used inside a task') + self._cancel_handler = self._loop.call_at( + ceil(self._loop.time() + self._timeout), self._cancel_task) + return self + + +class HeadersMixin: + + ATTRS = frozenset([ + '_content_type', '_content_dict', '_stored_content_type']) + + _content_type = None # type: Optional[str] + _content_dict = None # type: Optional[Dict[str, str]] + _stored_content_type = sentinel + + def _parse_content_type(self, raw: str) -> None: + self._stored_content_type = raw + if raw is None: + # default value according to RFC 2616 + self._content_type = 'application/octet-stream' + self._content_dict = {} + else: + self._content_type, self._content_dict = cgi.parse_header(raw) + + @property + def content_type(self) -> str: + """The value of content part for Content-Type HTTP header.""" + raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore + if self._stored_content_type != raw: + self._parse_content_type(raw) + return self._content_type # type: ignore + + @property + def charset(self) -> Optional[str]: + """The value of charset part for Content-Type HTTP header.""" + raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore + if self._stored_content_type != raw: + self._parse_content_type(raw) + return self._content_dict.get('charset') # type: ignore + + @property + def content_length(self) -> Optional[int]: + """The value of Content-Length HTTP header.""" + content_length = self._headers.get(hdrs.CONTENT_LENGTH) # type: ignore + + if content_length is not None: + return int(content_length) + else: + return None + + +def set_result(fut: 'asyncio.Future[_T]', result: _T) -> None: + if not fut.done(): + fut.set_result(result) + + +def set_exception(fut: 'asyncio.Future[_T]', exc: BaseException) -> None: + if not fut.done(): + fut.set_exception(exc) + + +class ChainMapProxy(Mapping[str, Any]): + __slots__ = ('_maps',) + + def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None: + self._maps = tuple(maps) + + def __init_subclass__(cls) -> None: + raise TypeError("Inheritance class {} from ChainMapProxy " + "is forbidden".format(cls.__name__)) + + def __getitem__(self, key: str) -> Any: + for mapping in self._maps: + try: + return mapping[key] + except KeyError: + pass + raise KeyError(key) + + def get(self, key: str, default: Any=None) -> Any: + return self[key] if key in self else default + + def __len__(self) -> int: + # reuses stored hash values if possible + return len(set().union(*self._maps)) # type: ignore + + def __iter__(self) -> Iterator[str]: + d = {} # type: Dict[str, Any] + for mapping in reversed(self._maps): + # reuses stored hash values if possible + d.update(mapping) + return iter(d) + + def __contains__(self, key: object) -> bool: + return any(key in m for m in self._maps) + + def __bool__(self) -> bool: + return any(self._maps) + + def __repr__(self) -> str: + content = ", ".join(map(repr, self._maps)) + return 'ChainMapProxy({})'.format(content) diff --git a/venv/lib/python3.7/site-packages/aiohttp/http.py b/venv/lib/python3.7/site-packages/aiohttp/http.py new file mode 100644 index 0000000..7536216 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/http.py @@ -0,0 +1,56 @@ +import http.server +import sys +from typing import Mapping, Tuple # noqa + +from . import __version__ +from .http_exceptions import HttpProcessingError +from .http_parser import ( + HeadersParser, + HttpParser, + HttpRequestParser, + HttpResponseParser, + RawRequestMessage, + RawResponseMessage, +) +from .http_websocket import ( + WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE, + WS_KEY, + WebSocketError, + WebSocketReader, + WebSocketWriter, + WSCloseCode, + WSMessage, + WSMsgType, + ws_ext_gen, + ws_ext_parse, +) +from .http_writer import ( + HttpVersion, + HttpVersion10, + HttpVersion11, + StreamWriter, +) + +__all__ = ( + 'HttpProcessingError', 'RESPONSES', 'SERVER_SOFTWARE', + + # .http_writer + 'StreamWriter', 'HttpVersion', 'HttpVersion10', 'HttpVersion11', + + # .http_parser + 'HeadersParser', 'HttpParser', + 'HttpRequestParser', 'HttpResponseParser', + 'RawRequestMessage', 'RawResponseMessage', + + # .http_websocket + 'WS_CLOSED_MESSAGE', 'WS_CLOSING_MESSAGE', 'WS_KEY', + 'WebSocketReader', 'WebSocketWriter', 'ws_ext_gen', 'ws_ext_parse', + 'WSMessage', 'WebSocketError', 'WSMsgType', 'WSCloseCode', +) + + +SERVER_SOFTWARE = 'Python/{0[0]}.{0[1]} aiohttp/{1}'.format( + sys.version_info, __version__) # type: str + +RESPONSES = http.server.BaseHTTPRequestHandler.responses # type: Mapping[int, Tuple[str, str]] # noqa diff --git a/venv/lib/python3.7/site-packages/aiohttp/http_exceptions.py b/venv/lib/python3.7/site-packages/aiohttp/http_exceptions.py new file mode 100644 index 0000000..d45bd77 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/http_exceptions.py @@ -0,0 +1,98 @@ +"""Low-level http related exceptions.""" + + +from typing import Optional, Union + +from .typedefs import _CIMultiDict + +__all__ = ('HttpProcessingError',) + + +class HttpProcessingError(Exception): + """HTTP error. + + Shortcut for raising HTTP errors with custom code, message and headers. + + code: HTTP Error code. + message: (optional) Error message. + headers: (optional) Headers to be sent in response, a list of pairs + """ + + code = 0 + message = '' + headers = None + + def __init__(self, *, + code: Optional[int]=None, + message: str='', + headers: Optional[_CIMultiDict]=None) -> None: + if code is not None: + self.code = code + self.headers = headers + self.message = message + + super().__init__("%s, message='%s'" % (self.code, message)) + + +class BadHttpMessage(HttpProcessingError): + + code = 400 + message = 'Bad Request' + + def __init__(self, message: str, *, + headers: Optional[_CIMultiDict]=None) -> None: + super().__init__(message=message, headers=headers) + + +class HttpBadRequest(BadHttpMessage): + + code = 400 + message = 'Bad Request' + + +class PayloadEncodingError(BadHttpMessage): + """Base class for payload errors""" + + +class ContentEncodingError(PayloadEncodingError): + """Content encoding error.""" + + +class TransferEncodingError(PayloadEncodingError): + """transfer encoding error.""" + + +class ContentLengthError(PayloadEncodingError): + """Not enough data for satisfy content length header.""" + + +class LineTooLong(BadHttpMessage): + + def __init__(self, line: str, + limit: str='Unknown', + actual_size: str='Unknown') -> None: + super().__init__( + "Got more than %s bytes (%s) when reading %s." % ( + limit, actual_size, line)) + + +class InvalidHeader(BadHttpMessage): + + def __init__(self, hdr: Union[bytes, str]) -> None: + if isinstance(hdr, bytes): + hdr = hdr.decode('utf-8', 'surrogateescape') + super().__init__('Invalid HTTP Header: {}'.format(hdr)) + self.hdr = hdr + + +class BadStatusLine(BadHttpMessage): + + def __init__(self, line: str='') -> None: + if not line: + line = repr(line) + self.args = line, + self.line = line + + +class InvalidURLError(BadHttpMessage): + pass diff --git a/venv/lib/python3.7/site-packages/aiohttp/http_parser.py b/venv/lib/python3.7/site-packages/aiohttp/http_parser.py new file mode 100644 index 0000000..9e22d10 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/http_parser.py @@ -0,0 +1,764 @@ +import abc +import asyncio +import collections +import re +import string +import zlib +from enum import IntEnum +from typing import Any, List, Optional, Tuple, Type, Union # noqa + +from multidict import CIMultiDict, CIMultiDictProxy, istr +from yarl import URL + +from . import hdrs +from .base_protocol import BaseProtocol +from .helpers import NO_EXTENSIONS, BaseTimerContext +from .http_exceptions import ( + BadStatusLine, + ContentEncodingError, + ContentLengthError, + InvalidHeader, + LineTooLong, + TransferEncodingError, +) +from .http_writer import HttpVersion, HttpVersion10 +from .log import internal_logger +from .streams import EMPTY_PAYLOAD, StreamReader +from .typedefs import RawHeaders + +try: + import brotli + HAS_BROTLI = True +except ImportError: # pragma: no cover + HAS_BROTLI = False + + +__all__ = ( + 'HeadersParser', 'HttpParser', 'HttpRequestParser', 'HttpResponseParser', + 'RawRequestMessage', 'RawResponseMessage') + +ASCIISET = set(string.printable) + +# See https://tools.ietf.org/html/rfc7230#section-3.1.1 +# and https://tools.ietf.org/html/rfc7230#appendix-B +# +# method = token +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." / +# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA +# token = 1*tchar +METHRE = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+") +VERSRE = re.compile(r'HTTP/(\d+).(\d+)') +HDRRE = re.compile(rb'[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]') + +RawRequestMessage = collections.namedtuple( + 'RawRequestMessage', + ['method', 'path', 'version', 'headers', 'raw_headers', + 'should_close', 'compression', 'upgrade', 'chunked', 'url']) + +RawResponseMessage = collections.namedtuple( + 'RawResponseMessage', + ['version', 'code', 'reason', 'headers', 'raw_headers', + 'should_close', 'compression', 'upgrade', 'chunked']) + + +class ParseState(IntEnum): + + PARSE_NONE = 0 + PARSE_LENGTH = 1 + PARSE_CHUNKED = 2 + PARSE_UNTIL_EOF = 3 + + +class ChunkState(IntEnum): + PARSE_CHUNKED_SIZE = 0 + PARSE_CHUNKED_CHUNK = 1 + PARSE_CHUNKED_CHUNK_EOF = 2 + PARSE_MAYBE_TRAILERS = 3 + PARSE_TRAILERS = 4 + + +class HeadersParser: + def __init__(self, + max_line_size: int=8190, + max_headers: int=32768, + max_field_size: int=8190) -> None: + self.max_line_size = max_line_size + self.max_headers = max_headers + self.max_field_size = max_field_size + + def parse_headers( + self, + lines: List[bytes] + ) -> Tuple['CIMultiDictProxy[str]', RawHeaders]: + headers = CIMultiDict() # type: CIMultiDict[str] + raw_headers = [] + + lines_idx = 1 + line = lines[1] + line_count = len(lines) + + while line: + # Parse initial header name : value pair. + try: + bname, bvalue = line.split(b':', 1) + except ValueError: + raise InvalidHeader(line) from None + + bname = bname.strip(b' \t') + bvalue = bvalue.lstrip() + if HDRRE.search(bname): + raise InvalidHeader(bname) + if len(bname) > self.max_field_size: + raise LineTooLong( + "request header name {}".format( + bname.decode("utf8", "xmlcharrefreplace")), + str(self.max_field_size), + str(len(bname))) + + header_length = len(bvalue) + + # next line + lines_idx += 1 + line = lines[lines_idx] + + # consume continuation lines + continuation = line and line[0] in (32, 9) # (' ', '\t') + + if continuation: + bvalue_lst = [bvalue] + while continuation: + header_length += len(line) + if header_length > self.max_field_size: + raise LineTooLong( + 'request header field {}'.format( + bname.decode("utf8", "xmlcharrefreplace")), + str(self.max_field_size), + str(header_length)) + bvalue_lst.append(line) + + # next line + lines_idx += 1 + if lines_idx < line_count: + line = lines[lines_idx] + if line: + continuation = line[0] in (32, 9) # (' ', '\t') + else: + line = b'' + break + bvalue = b''.join(bvalue_lst) + else: + if header_length > self.max_field_size: + raise LineTooLong( + 'request header field {}'.format( + bname.decode("utf8", "xmlcharrefreplace")), + str(self.max_field_size), + str(header_length)) + + bvalue = bvalue.strip() + name = bname.decode('utf-8', 'surrogateescape') + value = bvalue.decode('utf-8', 'surrogateescape') + + headers.add(name, value) + raw_headers.append((bname, bvalue)) + + return (CIMultiDictProxy(headers), tuple(raw_headers)) + + +class HttpParser(abc.ABC): + + def __init__(self, protocol: Optional[BaseProtocol]=None, + loop: Optional[asyncio.AbstractEventLoop]=None, + max_line_size: int=8190, + max_headers: int=32768, + max_field_size: int=8190, + timer: Optional[BaseTimerContext]=None, + code: Optional[int]=None, + method: Optional[str]=None, + readall: bool=False, + payload_exception: Optional[Type[BaseException]]=None, + response_with_body: bool=True, + read_until_eof: bool=False, + auto_decompress: bool=True) -> None: + self.protocol = protocol + self.loop = loop + self.max_line_size = max_line_size + self.max_headers = max_headers + self.max_field_size = max_field_size + self.timer = timer + self.code = code + self.method = method + self.readall = readall + self.payload_exception = payload_exception + self.response_with_body = response_with_body + self.read_until_eof = read_until_eof + + self._lines = [] # type: List[bytes] + self._tail = b'' + self._upgraded = False + self._payload = None + self._payload_parser = None # type: Optional[HttpPayloadParser] + self._auto_decompress = auto_decompress + self._headers_parser = HeadersParser(max_line_size, + max_headers, + max_field_size) + + @abc.abstractmethod + def parse_message(self, lines: List[bytes]) -> Any: + pass + + def feed_eof(self) -> Any: + if self._payload_parser is not None: + self._payload_parser.feed_eof() + self._payload_parser = None + else: + # try to extract partial message + if self._tail: + self._lines.append(self._tail) + + if self._lines: + if self._lines[-1] != '\r\n': + self._lines.append(b'') + try: + return self.parse_message(self._lines) + except Exception: + return None + + def feed_data( + self, + data: bytes, + SEP: bytes=b'\r\n', + EMPTY: bytes=b'', + CONTENT_LENGTH: istr=hdrs.CONTENT_LENGTH, + METH_CONNECT: str=hdrs.METH_CONNECT, + SEC_WEBSOCKET_KEY1: istr=hdrs.SEC_WEBSOCKET_KEY1 + ) -> Tuple[List[Any], bool, bytes]: + + messages = [] + + if self._tail: + data, self._tail = self._tail + data, b'' + + data_len = len(data) + start_pos = 0 + loop = self.loop + + while start_pos < data_len: + + # read HTTP message (request/response line + headers), \r\n\r\n + # and split by lines + if self._payload_parser is None and not self._upgraded: + pos = data.find(SEP, start_pos) + # consume \r\n + if pos == start_pos and not self._lines: + start_pos = pos + 2 + continue + + if pos >= start_pos: + # line found + self._lines.append(data[start_pos:pos]) + start_pos = pos + 2 + + # \r\n\r\n found + if self._lines[-1] == EMPTY: + try: + msg = self.parse_message(self._lines) + finally: + self._lines.clear() + + # payload length + length = msg.headers.get(CONTENT_LENGTH) + if length is not None: + try: + length = int(length) + except ValueError: + raise InvalidHeader(CONTENT_LENGTH) + if length < 0: + raise InvalidHeader(CONTENT_LENGTH) + + # do not support old websocket spec + if SEC_WEBSOCKET_KEY1 in msg.headers: + raise InvalidHeader(SEC_WEBSOCKET_KEY1) + + self._upgraded = msg.upgrade + + method = getattr(msg, 'method', self.method) + + assert self.protocol is not None + # calculate payload + if ((length is not None and length > 0) or + msg.chunked and not msg.upgrade): + payload = StreamReader( + self.protocol, timer=self.timer, loop=loop) + payload_parser = HttpPayloadParser( + payload, length=length, + chunked=msg.chunked, method=method, + compression=msg.compression, + code=self.code, readall=self.readall, + response_with_body=self.response_with_body, + auto_decompress=self._auto_decompress) + if not payload_parser.done: + self._payload_parser = payload_parser + elif method == METH_CONNECT: + payload = StreamReader( + self.protocol, timer=self.timer, loop=loop) + self._upgraded = True + self._payload_parser = HttpPayloadParser( + payload, method=msg.method, + compression=msg.compression, readall=True, + auto_decompress=self._auto_decompress) + else: + if (getattr(msg, 'code', 100) >= 199 and + length is None and self.read_until_eof): + payload = StreamReader( + self.protocol, timer=self.timer, loop=loop) + payload_parser = HttpPayloadParser( + payload, length=length, + chunked=msg.chunked, method=method, + compression=msg.compression, + code=self.code, readall=True, + response_with_body=self.response_with_body, + auto_decompress=self._auto_decompress) + if not payload_parser.done: + self._payload_parser = payload_parser + else: + payload = EMPTY_PAYLOAD # type: ignore + + messages.append((msg, payload)) + else: + self._tail = data[start_pos:] + data = EMPTY + break + + # no parser, just store + elif self._payload_parser is None and self._upgraded: + assert not self._lines + break + + # feed payload + elif data and start_pos < data_len: + assert not self._lines + assert self._payload_parser is not None + try: + eof, data = self._payload_parser.feed_data( + data[start_pos:]) + except BaseException as exc: + if self.payload_exception is not None: + self._payload_parser.payload.set_exception( + self.payload_exception(str(exc))) + else: + self._payload_parser.payload.set_exception(exc) + + eof = True + data = b'' + + if eof: + start_pos = 0 + data_len = len(data) + self._payload_parser = None + continue + else: + break + + if data and start_pos < data_len: + data = data[start_pos:] + else: + data = EMPTY + + return messages, self._upgraded, data + + def parse_headers( + self, + lines: List[bytes] + ) -> Tuple['CIMultiDictProxy[str]', + RawHeaders, + Optional[bool], + Optional[str], + bool, + bool]: + """Parses RFC 5322 headers from a stream. + + Line continuations are supported. Returns list of header name + and value pairs. Header name is in upper case. + """ + headers, raw_headers = self._headers_parser.parse_headers(lines) + close_conn = None + encoding = None + upgrade = False + chunked = False + + # keep-alive + conn = headers.get(hdrs.CONNECTION) + if conn: + v = conn.lower() + if v == 'close': + close_conn = True + elif v == 'keep-alive': + close_conn = False + elif v == 'upgrade': + upgrade = True + + # encoding + enc = headers.get(hdrs.CONTENT_ENCODING) + if enc: + enc = enc.lower() + if enc in ('gzip', 'deflate', 'br'): + encoding = enc + + # chunking + te = headers.get(hdrs.TRANSFER_ENCODING) + if te and 'chunked' in te.lower(): + chunked = True + + return (headers, raw_headers, close_conn, encoding, upgrade, chunked) + + +class HttpRequestParser(HttpParser): + """Read request status line. Exception .http_exceptions.BadStatusLine + could be raised in case of any errors in status line. + Returns RawRequestMessage. + """ + + def parse_message(self, lines: List[bytes]) -> Any: + # request line + line = lines[0].decode('utf-8', 'surrogateescape') + try: + method, path, version = line.split(None, 2) + except ValueError: + raise BadStatusLine(line) from None + + if len(path) > self.max_line_size: + raise LineTooLong( + 'Status line is too long', + str(self.max_line_size), + str(len(path))) + + # method + if not METHRE.match(method): + raise BadStatusLine(method) + + # version + try: + if version.startswith('HTTP/'): + n1, n2 = version[5:].split('.', 1) + version_o = HttpVersion(int(n1), int(n2)) + else: + raise BadStatusLine(version) + except Exception: + raise BadStatusLine(version) + + # read headers + (headers, raw_headers, + close, compression, upgrade, chunked) = self.parse_headers(lines) + + if close is None: # then the headers weren't set in the request + if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close + close = True + else: # HTTP 1.1 must ask to close. + close = False + + return RawRequestMessage( + method, path, version_o, headers, raw_headers, + close, compression, upgrade, chunked, URL(path)) + + +class HttpResponseParser(HttpParser): + """Read response status line and headers. + + BadStatusLine could be raised in case of any errors in status line. + Returns RawResponseMessage""" + + def parse_message(self, lines: List[bytes]) -> Any: + line = lines[0].decode('utf-8', 'surrogateescape') + try: + version, status = line.split(None, 1) + except ValueError: + raise BadStatusLine(line) from None + + try: + status, reason = status.split(None, 1) + except ValueError: + reason = '' + + if len(reason) > self.max_line_size: + raise LineTooLong( + 'Status line is too long', + str(self.max_line_size), + str(len(reason))) + + # version + match = VERSRE.match(version) + if match is None: + raise BadStatusLine(line) + version_o = HttpVersion(int(match.group(1)), int(match.group(2))) + + # The status code is a three-digit number + try: + status_i = int(status) + except ValueError: + raise BadStatusLine(line) from None + + if status_i > 999: + raise BadStatusLine(line) + + # read headers + (headers, raw_headers, + close, compression, upgrade, chunked) = self.parse_headers(lines) + + if close is None: + close = version_o <= HttpVersion10 + + return RawResponseMessage( + version_o, status_i, reason.strip(), + headers, raw_headers, close, compression, upgrade, chunked) + + +class HttpPayloadParser: + + def __init__(self, payload: StreamReader, + length: Optional[int]=None, + chunked: bool=False, + compression: Optional[str]=None, + code: Optional[int]=None, + method: Optional[str]=None, + readall: bool=False, + response_with_body: bool=True, + auto_decompress: bool=True) -> None: + self._length = 0 + self._type = ParseState.PARSE_NONE + self._chunk = ChunkState.PARSE_CHUNKED_SIZE + self._chunk_size = 0 + self._chunk_tail = b'' + self._auto_decompress = auto_decompress + self.done = False + + # payload decompression wrapper + if response_with_body and compression and self._auto_decompress: + real_payload = DeflateBuffer(payload, compression) # type: Union[StreamReader, DeflateBuffer] # noqa + else: + real_payload = payload + + # payload parser + if not response_with_body: + # don't parse payload if it's not expected to be received + self._type = ParseState.PARSE_NONE + real_payload.feed_eof() + self.done = True + + elif chunked: + self._type = ParseState.PARSE_CHUNKED + elif length is not None: + self._type = ParseState.PARSE_LENGTH + self._length = length + if self._length == 0: + real_payload.feed_eof() + self.done = True + else: + if readall and code != 204: + self._type = ParseState.PARSE_UNTIL_EOF + elif method in ('PUT', 'POST'): + internal_logger.warning( # pragma: no cover + 'Content-Length or Transfer-Encoding header is required') + self._type = ParseState.PARSE_NONE + real_payload.feed_eof() + self.done = True + + self.payload = real_payload + + def feed_eof(self) -> None: + if self._type == ParseState.PARSE_UNTIL_EOF: + self.payload.feed_eof() + elif self._type == ParseState.PARSE_LENGTH: + raise ContentLengthError( + "Not enough data for satisfy content length header.") + elif self._type == ParseState.PARSE_CHUNKED: + raise TransferEncodingError( + "Not enough data for satisfy transfer length header.") + + def feed_data(self, + chunk: bytes, + SEP: bytes=b'\r\n', + CHUNK_EXT: bytes=b';') -> Tuple[bool, bytes]: + # Read specified amount of bytes + if self._type == ParseState.PARSE_LENGTH: + required = self._length + chunk_len = len(chunk) + + if required >= chunk_len: + self._length = required - chunk_len + self.payload.feed_data(chunk, chunk_len) + if self._length == 0: + self.payload.feed_eof() + return True, b'' + else: + self._length = 0 + self.payload.feed_data(chunk[:required], required) + self.payload.feed_eof() + return True, chunk[required:] + + # Chunked transfer encoding parser + elif self._type == ParseState.PARSE_CHUNKED: + if self._chunk_tail: + chunk = self._chunk_tail + chunk + self._chunk_tail = b'' + + while chunk: + + # read next chunk size + if self._chunk == ChunkState.PARSE_CHUNKED_SIZE: + pos = chunk.find(SEP) + if pos >= 0: + i = chunk.find(CHUNK_EXT, 0, pos) + if i >= 0: + size_b = chunk[:i] # strip chunk-extensions + else: + size_b = chunk[:pos] + + try: + size = int(bytes(size_b), 16) + except ValueError: + exc = TransferEncodingError( + chunk[:pos].decode('ascii', 'surrogateescape')) + self.payload.set_exception(exc) + raise exc from None + + chunk = chunk[pos+2:] + if size == 0: # eof marker + self._chunk = ChunkState.PARSE_MAYBE_TRAILERS + else: + self._chunk = ChunkState.PARSE_CHUNKED_CHUNK + self._chunk_size = size + self.payload.begin_http_chunk_receiving() + else: + self._chunk_tail = chunk + return False, b'' + + # read chunk and feed buffer + if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK: + required = self._chunk_size + chunk_len = len(chunk) + + if required > chunk_len: + self._chunk_size = required - chunk_len + self.payload.feed_data(chunk, chunk_len) + return False, b'' + else: + self._chunk_size = 0 + self.payload.feed_data(chunk[:required], required) + chunk = chunk[required:] + self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF + self.payload.end_http_chunk_receiving() + + # toss the CRLF at the end of the chunk + if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF: + if chunk[:2] == SEP: + chunk = chunk[2:] + self._chunk = ChunkState.PARSE_CHUNKED_SIZE + else: + self._chunk_tail = chunk + return False, b'' + + # if stream does not contain trailer, after 0\r\n + # we should get another \r\n otherwise + # trailers needs to be skiped until \r\n\r\n + if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS: + if chunk[:2] == SEP: + # end of stream + self.payload.feed_eof() + return True, chunk[2:] + else: + self._chunk = ChunkState.PARSE_TRAILERS + + # read and discard trailer up to the CRLF terminator + if self._chunk == ChunkState.PARSE_TRAILERS: + pos = chunk.find(SEP) + if pos >= 0: + chunk = chunk[pos+2:] + self._chunk = ChunkState.PARSE_MAYBE_TRAILERS + else: + self._chunk_tail = chunk + return False, b'' + + # Read all bytes until eof + elif self._type == ParseState.PARSE_UNTIL_EOF: + self.payload.feed_data(chunk, len(chunk)) + + return False, b'' + + +class DeflateBuffer: + """DeflateStream decompress stream and feed data into specified stream.""" + + def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: + self.out = out + self.size = 0 + self.encoding = encoding + self._started_decoding = False + + if encoding == 'br': + if not HAS_BROTLI: # pragma: no cover + raise ContentEncodingError( + 'Can not decode content-encoding: brotli (br). ' + 'Please install `brotlipy`') + self.decompressor = brotli.Decompressor() + else: + zlib_mode = (16 + zlib.MAX_WBITS + if encoding == 'gzip' else -zlib.MAX_WBITS) + self.decompressor = zlib.decompressobj(wbits=zlib_mode) + + def set_exception(self, exc: BaseException) -> None: + self.out.set_exception(exc) + + def feed_data(self, chunk: bytes, size: int) -> None: + self.size += size + try: + chunk = self.decompressor.decompress(chunk) + except Exception: + if not self._started_decoding and self.encoding == 'deflate': + self.decompressor = zlib.decompressobj() + try: + chunk = self.decompressor.decompress(chunk) + except Exception: + raise ContentEncodingError( + 'Can not decode content-encoding: %s' % self.encoding) + else: + raise ContentEncodingError( + 'Can not decode content-encoding: %s' % self.encoding) + + if chunk: + self._started_decoding = True + self.out.feed_data(chunk, len(chunk)) + + def feed_eof(self) -> None: + chunk = self.decompressor.flush() + + if chunk or self.size > 0: + self.out.feed_data(chunk, len(chunk)) + if self.encoding == 'deflate' and not self.decompressor.eof: + raise ContentEncodingError('deflate') + + self.out.feed_eof() + + def begin_http_chunk_receiving(self) -> None: + self.out.begin_http_chunk_receiving() + + def end_http_chunk_receiving(self) -> None: + self.out.end_http_chunk_receiving() + + +HttpRequestParserPy = HttpRequestParser +HttpResponseParserPy = HttpResponseParser +RawRequestMessagePy = RawRequestMessage +RawResponseMessagePy = RawResponseMessage + +try: + if not NO_EXTENSIONS: + from ._http_parser import (HttpRequestParser, # type: ignore # noqa + HttpResponseParser, + RawRequestMessage, + RawResponseMessage) + HttpRequestParserC = HttpRequestParser + HttpResponseParserC = HttpResponseParser + RawRequestMessageC = RawRequestMessage + RawResponseMessageC = RawResponseMessage +except ImportError: # pragma: no cover + pass diff --git a/venv/lib/python3.7/site-packages/aiohttp/http_websocket.py b/venv/lib/python3.7/site-packages/aiohttp/http_websocket.py new file mode 100644 index 0000000..d8fc10f --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/http_websocket.py @@ -0,0 +1,653 @@ +"""WebSocket protocol versions 13 and 8.""" + +import asyncio +import collections +import json +import random +import re +import sys +import zlib +from enum import IntEnum +from struct import Struct +from typing import Any, Callable, List, Optional, Tuple, Union + +from .base_protocol import BaseProtocol +from .helpers import NO_EXTENSIONS +from .log import ws_logger +from .streams import DataQueue + +__all__ = ('WS_CLOSED_MESSAGE', 'WS_CLOSING_MESSAGE', 'WS_KEY', + 'WebSocketReader', 'WebSocketWriter', 'WSMessage', + 'WebSocketError', 'WSMsgType', 'WSCloseCode') + + +class WSCloseCode(IntEnum): + OK = 1000 + GOING_AWAY = 1001 + PROTOCOL_ERROR = 1002 + UNSUPPORTED_DATA = 1003 + INVALID_TEXT = 1007 + POLICY_VIOLATION = 1008 + MESSAGE_TOO_BIG = 1009 + MANDATORY_EXTENSION = 1010 + INTERNAL_ERROR = 1011 + SERVICE_RESTART = 1012 + TRY_AGAIN_LATER = 1013 + + +ALLOWED_CLOSE_CODES = {int(i) for i in WSCloseCode} + + +class WSMsgType(IntEnum): + # websocket spec types + CONTINUATION = 0x0 + TEXT = 0x1 + BINARY = 0x2 + PING = 0x9 + PONG = 0xa + CLOSE = 0x8 + + # aiohttp specific types + CLOSING = 0x100 + CLOSED = 0x101 + ERROR = 0x102 + + text = TEXT + binary = BINARY + ping = PING + pong = PONG + close = CLOSE + closing = CLOSING + closed = CLOSED + error = ERROR + + +WS_KEY = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11' + + +UNPACK_LEN2 = Struct('!H').unpack_from +UNPACK_LEN3 = Struct('!Q').unpack_from +UNPACK_CLOSE_CODE = Struct('!H').unpack +PACK_LEN1 = Struct('!BB').pack +PACK_LEN2 = Struct('!BBH').pack +PACK_LEN3 = Struct('!BBQ').pack +PACK_CLOSE_CODE = Struct('!H').pack +MSG_SIZE = 2 ** 14 +DEFAULT_LIMIT = 2 ** 16 + + +_WSMessageBase = collections.namedtuple('_WSMessageBase', + ['type', 'data', 'extra']) + + +class WSMessage(_WSMessageBase): + + def json(self, *, # type: ignore + loads: Callable[[Any], Any]=json.loads) -> None: + """Return parsed JSON data. + + .. versionadded:: 0.22 + """ + return loads(self.data) + + +WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None) +WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None) + + +class WebSocketError(Exception): + """WebSocket protocol parser error.""" + + def __init__(self, code: int, message: str) -> None: + self.code = code + super().__init__(message) + + +class WSHandshakeError(Exception): + """WebSocket protocol handshake error.""" + + +native_byteorder = sys.byteorder + + +# Used by _websocket_mask_python +_XOR_TABLE = [bytes(a ^ b for a in range(256)) for b in range(256)] + + +def _websocket_mask_python(mask: bytes, data: bytearray) -> None: + """Websocket masking function. + + `mask` is a `bytes` object of length 4; `data` is a `bytearray` + object of any length. The contents of `data` are masked with `mask`, + as specified in section 5.3 of RFC 6455. + + Note that this function mutates the `data` argument. + + This pure-python implementation may be replaced by an optimized + version when available. + + """ + assert isinstance(data, bytearray), data + assert len(mask) == 4, mask + + if data: + a, b, c, d = (_XOR_TABLE[n] for n in mask) + data[::4] = data[::4].translate(a) + data[1::4] = data[1::4].translate(b) + data[2::4] = data[2::4].translate(c) + data[3::4] = data[3::4].translate(d) + + +if NO_EXTENSIONS: # pragma: no cover + _websocket_mask = _websocket_mask_python +else: + try: + from ._websocket import _websocket_mask_cython # type: ignore + _websocket_mask = _websocket_mask_cython + except ImportError: # pragma: no cover + _websocket_mask = _websocket_mask_python + +_WS_DEFLATE_TRAILING = bytes([0x00, 0x00, 0xff, 0xff]) + + +_WS_EXT_RE = re.compile(r'^(?:;\s*(?:' + r'(server_no_context_takeover)|' + r'(client_no_context_takeover)|' + r'(server_max_window_bits(?:=(\d+))?)|' + r'(client_max_window_bits(?:=(\d+))?)))*$') + +_WS_EXT_RE_SPLIT = re.compile(r'permessage-deflate([^,]+)?') + + +def ws_ext_parse(extstr: str, isserver: bool=False) -> Tuple[int, bool]: + if not extstr: + return 0, False + + compress = 0 + notakeover = False + for ext in _WS_EXT_RE_SPLIT.finditer(extstr): + defext = ext.group(1) + # Return compress = 15 when get `permessage-deflate` + if not defext: + compress = 15 + break + match = _WS_EXT_RE.match(defext) + if match: + compress = 15 + if isserver: + # Server never fail to detect compress handshake. + # Server does not need to send max wbit to client + if match.group(4): + compress = int(match.group(4)) + # Group3 must match if group4 matches + # Compress wbit 8 does not support in zlib + # If compress level not support, + # CONTINUE to next extension + if compress > 15 or compress < 9: + compress = 0 + continue + if match.group(1): + notakeover = True + # Ignore regex group 5 & 6 for client_max_window_bits + break + else: + if match.group(6): + compress = int(match.group(6)) + # Group5 must match if group6 matches + # Compress wbit 8 does not support in zlib + # If compress level not support, + # FAIL the parse progress + if compress > 15 or compress < 9: + raise WSHandshakeError('Invalid window size') + if match.group(2): + notakeover = True + # Ignore regex group 5 & 6 for client_max_window_bits + break + # Return Fail if client side and not match + elif not isserver: + raise WSHandshakeError('Extension for deflate not supported' + + ext.group(1)) + + return compress, notakeover + + +def ws_ext_gen(compress: int=15, isserver: bool=False, + server_notakeover: bool=False) -> str: + # client_notakeover=False not used for server + # compress wbit 8 does not support in zlib + if compress < 9 or compress > 15: + raise ValueError('Compress wbits must between 9 and 15, ' + 'zlib does not support wbits=8') + enabledext = ['permessage-deflate'] + if not isserver: + enabledext.append('client_max_window_bits') + + if compress < 15: + enabledext.append('server_max_window_bits=' + str(compress)) + if server_notakeover: + enabledext.append('server_no_context_takeover') + # if client_notakeover: + # enabledext.append('client_no_context_takeover') + return '; '.join(enabledext) + + +class WSParserState(IntEnum): + READ_HEADER = 1 + READ_PAYLOAD_LENGTH = 2 + READ_PAYLOAD_MASK = 3 + READ_PAYLOAD = 4 + + +class WebSocketReader: + + def __init__(self, queue: DataQueue[WSMessage], + max_msg_size: int, compress: bool=True) -> None: + self.queue = queue + self._max_msg_size = max_msg_size + + self._exc = None # type: Optional[BaseException] + self._partial = bytearray() + self._state = WSParserState.READ_HEADER + + self._opcode = None # type: Optional[int] + self._frame_fin = False + self._frame_opcode = None # type: Optional[int] + self._frame_payload = bytearray() + + self._tail = b'' + self._has_mask = False + self._frame_mask = None # type: Optional[bytes] + self._payload_length = 0 + self._payload_length_flag = 0 + self._compressed = None # type: Optional[bool] + self._decompressobj = None # type: Any # zlib.decompressobj actually + self._compress = compress + + def feed_eof(self) -> None: + self.queue.feed_eof() + + def feed_data(self, data: bytes) -> Tuple[bool, bytes]: + if self._exc: + return True, data + + try: + return self._feed_data(data) + except Exception as exc: + self._exc = exc + self.queue.set_exception(exc) + return True, b'' + + def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: + for fin, opcode, payload, compressed in self.parse_frame(data): + if compressed and not self._decompressobj: + self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS) + if opcode == WSMsgType.CLOSE: + if len(payload) >= 2: + close_code = UNPACK_CLOSE_CODE(payload[:2])[0] + if (close_code < 3000 and + close_code not in ALLOWED_CLOSE_CODES): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + 'Invalid close code: {}'.format(close_code)) + try: + close_message = payload[2:].decode('utf-8') + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, + 'Invalid UTF-8 text message') from exc + msg = WSMessage(WSMsgType.CLOSE, close_code, close_message) + elif payload: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + 'Invalid close frame: {} {} {!r}'.format( + fin, opcode, payload)) + else: + msg = WSMessage(WSMsgType.CLOSE, 0, '') + + self.queue.feed_data(msg, 0) + + elif opcode == WSMsgType.PING: + self.queue.feed_data( + WSMessage(WSMsgType.PING, payload, ''), len(payload)) + + elif opcode == WSMsgType.PONG: + self.queue.feed_data( + WSMessage(WSMsgType.PONG, payload, ''), len(payload)) + + elif opcode not in ( + WSMsgType.TEXT, WSMsgType.BINARY) and self._opcode is None: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Unexpected opcode={!r}".format(opcode)) + else: + # load text/binary + if not fin: + # got partial frame payload + if opcode != WSMsgType.CONTINUATION: + self._opcode = opcode + self._partial.extend(payload) + if (self._max_msg_size and + len(self._partial) >= self._max_msg_size): + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(self._partial), self._max_msg_size)) + else: + # previous frame was non finished + # we should get continuation opcode + if self._partial: + if opcode != WSMsgType.CONTINUATION: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + 'The opcode in non-fin frame is expected ' + 'to be zero, got {!r}'.format(opcode)) + + if opcode == WSMsgType.CONTINUATION: + assert self._opcode is not None + opcode = self._opcode + self._opcode = None + + self._partial.extend(payload) + if (self._max_msg_size and + len(self._partial) >= self._max_msg_size): + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(self._partial), self._max_msg_size)) + + # Decompress process must to be done after all packets + # received. + if compressed: + self._partial.extend(_WS_DEFLATE_TRAILING) + payload_merged = self._decompressobj.decompress( + self._partial, self._max_msg_size) + if self._decompressobj.unconsumed_tail: + left = len(self._decompressobj.unconsumed_tail) + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Decompressed message size exceeds limit {}". + format(self._max_msg_size + left, + self._max_msg_size)) + else: + payload_merged = bytes(self._partial) + + self._partial.clear() + + if opcode == WSMsgType.TEXT: + try: + text = payload_merged.decode('utf-8') + self.queue.feed_data( + WSMessage(WSMsgType.TEXT, text, ''), len(text)) + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, + 'Invalid UTF-8 text message') from exc + else: + self.queue.feed_data( + WSMessage(WSMsgType.BINARY, payload_merged, ''), + len(payload_merged)) + + return False, b'' + + def parse_frame(self, buf: bytes) -> List[Tuple[bool, Optional[int], + bytearray, + Optional[bool]]]: + """Return the next frame from the socket.""" + frames = [] + if self._tail: + buf, self._tail = self._tail + buf, b'' + + start_pos = 0 + buf_length = len(buf) + + while True: + # read header + if self._state == WSParserState.READ_HEADER: + if buf_length - start_pos >= 2: + data = buf[start_pos:start_pos+2] + start_pos += 2 + first_byte, second_byte = data + + fin = (first_byte >> 7) & 1 + rsv1 = (first_byte >> 6) & 1 + rsv2 = (first_byte >> 5) & 1 + rsv3 = (first_byte >> 4) & 1 + opcode = first_byte & 0xf + + # frame-fin = %x0 ; more frames of this message follow + # / %x1 ; final frame of this message + # frame-rsv1 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv2 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv3 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # + # Remove rsv1 from this test for deflate development + if rsv2 or rsv3 or (rsv1 and not self._compress): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + 'Received frame with non-zero reserved bits') + + if opcode > 0x7 and fin == 0: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + 'Received fragmented control frame') + + has_mask = (second_byte >> 7) & 1 + length = second_byte & 0x7f + + # Control frames MUST have a payload + # length of 125 bytes or less + if opcode > 0x7 and length > 125: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + 'Control frame payload cannot be ' + 'larger than 125 bytes') + + # Set compress status if last package is FIN + # OR set compress status if this is first fragment + # Raise error if not first fragment with rsv1 = 0x1 + if self._frame_fin or self._compressed is None: + self._compressed = True if rsv1 else False + elif rsv1: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + 'Received frame with non-zero reserved bits') + + self._frame_fin = bool(fin) + self._frame_opcode = opcode + self._has_mask = bool(has_mask) + self._payload_length_flag = length + self._state = WSParserState.READ_PAYLOAD_LENGTH + else: + break + + # read payload length + if self._state == WSParserState.READ_PAYLOAD_LENGTH: + length = self._payload_length_flag + if length == 126: + if buf_length - start_pos >= 2: + data = buf[start_pos:start_pos+2] + start_pos += 2 + length = UNPACK_LEN2(data)[0] + self._payload_length = length + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD) + else: + break + elif length > 126: + if buf_length - start_pos >= 8: + data = buf[start_pos:start_pos+8] + start_pos += 8 + length = UNPACK_LEN3(data)[0] + self._payload_length = length + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD) + else: + break + else: + self._payload_length = length + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD) + + # read payload mask + if self._state == WSParserState.READ_PAYLOAD_MASK: + if buf_length - start_pos >= 4: + self._frame_mask = buf[start_pos:start_pos+4] + start_pos += 4 + self._state = WSParserState.READ_PAYLOAD + else: + break + + if self._state == WSParserState.READ_PAYLOAD: + length = self._payload_length + payload = self._frame_payload + + chunk_len = buf_length - start_pos + if length >= chunk_len: + self._payload_length = length - chunk_len + payload.extend(buf[start_pos:]) + start_pos = buf_length + else: + self._payload_length = 0 + payload.extend(buf[start_pos:start_pos+length]) + start_pos = start_pos + length + + if self._payload_length == 0: + if self._has_mask: + assert self._frame_mask is not None + _websocket_mask(self._frame_mask, payload) + + frames.append(( + self._frame_fin, + self._frame_opcode, + payload, + self._compressed)) + + self._frame_payload = bytearray() + self._state = WSParserState.READ_HEADER + else: + break + + self._tail = buf[start_pos:] + + return frames + + +class WebSocketWriter: + + def __init__(self, protocol: BaseProtocol, transport: asyncio.Transport, *, + use_mask: bool=False, limit: int=DEFAULT_LIMIT, + random: Any=random.Random(), + compress: int=0, notakeover: bool=False) -> None: + self.protocol = protocol + self.transport = transport + self.use_mask = use_mask + self.randrange = random.randrange + self.compress = compress + self.notakeover = notakeover + self._closing = False + self._limit = limit + self._output_size = 0 + self._compressobj = None # type: Any # actually compressobj + + async def _send_frame(self, message: bytes, opcode: int, + compress: Optional[int]=None) -> None: + """Send a frame over the websocket with message as its payload.""" + if self._closing: + ws_logger.warning('websocket connection is closing.') + + rsv = 0 + + # Only compress larger packets (disabled) + # Does small packet needs to be compressed? + # if self.compress and opcode < 8 and len(message) > 124: + if (compress or self.compress) and opcode < 8: + if compress: + # Do not set self._compress if compressing is for this frame + compressobj = zlib.compressobj(wbits=-compress) + else: # self.compress + if not self._compressobj: + self._compressobj = zlib.compressobj(wbits=-self.compress) + compressobj = self._compressobj + + message = compressobj.compress(message) + message = message + compressobj.flush( + zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH) + if message.endswith(_WS_DEFLATE_TRAILING): + message = message[:-4] + rsv = rsv | 0x40 + + msg_length = len(message) + + use_mask = self.use_mask + if use_mask: + mask_bit = 0x80 + else: + mask_bit = 0 + + if msg_length < 126: + header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit) + elif msg_length < (1 << 16): + header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length) + else: + header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) + if use_mask: + mask = self.randrange(0, 0xffffffff) + mask = mask.to_bytes(4, 'big') + message = bytearray(message) + _websocket_mask(mask, message) + self.transport.write(header + mask + message) + self._output_size += len(header) + len(mask) + len(message) + else: + if len(message) > MSG_SIZE: + self.transport.write(header) + self.transport.write(message) + else: + self.transport.write(header + message) + + self._output_size += len(header) + len(message) + + if self._output_size > self._limit: + self._output_size = 0 + await self.protocol._drain_helper() + + async def pong(self, message: bytes=b'') -> None: + """Send pong message.""" + if isinstance(message, str): + message = message.encode('utf-8') + await self._send_frame(message, WSMsgType.PONG) + + async def ping(self, message: bytes=b'') -> None: + """Send ping message.""" + if isinstance(message, str): + message = message.encode('utf-8') + await self._send_frame(message, WSMsgType.PING) + + async def send(self, message: Union[str, bytes], + binary: bool=False, + compress: Optional[int]=None) -> None: + """Send a frame over the websocket with message as its payload.""" + if isinstance(message, str): + message = message.encode('utf-8') + if binary: + await self._send_frame(message, WSMsgType.BINARY, compress) + else: + await self._send_frame(message, WSMsgType.TEXT, compress) + + async def close(self, code: int=1000, message: bytes=b'') -> None: + """Close the websocket, sending the specified code and message.""" + if isinstance(message, str): + message = message.encode('utf-8') + try: + await self._send_frame( + PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE) + finally: + self._closing = True diff --git a/venv/lib/python3.7/site-packages/aiohttp/http_writer.py b/venv/lib/python3.7/site-packages/aiohttp/http_writer.py new file mode 100644 index 0000000..7e27fbf --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/http_writer.py @@ -0,0 +1,172 @@ +"""Http related parsers and protocol.""" + +import asyncio +import collections +import zlib +from typing import Any, Awaitable, Callable, Optional, Union # noqa + +from multidict import CIMultiDict # noqa + +from .abc import AbstractStreamWriter +from .base_protocol import BaseProtocol +from .helpers import NO_EXTENSIONS + +__all__ = ('StreamWriter', 'HttpVersion', 'HttpVersion10', 'HttpVersion11') + +HttpVersion = collections.namedtuple('HttpVersion', ['major', 'minor']) +HttpVersion10 = HttpVersion(1, 0) +HttpVersion11 = HttpVersion(1, 1) + + +_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] + + +class StreamWriter(AbstractStreamWriter): + + def __init__(self, + protocol: BaseProtocol, + loop: asyncio.AbstractEventLoop, + on_chunk_sent: _T_OnChunkSent = None) -> None: + self._protocol = protocol + self._transport = protocol.transport + + self.loop = loop + self.length = None + self.chunked = False + self.buffer_size = 0 + self.output_size = 0 + + self._eof = False + self._compress = None # type: Any + self._drain_waiter = None + + self._on_chunk_sent = on_chunk_sent # type: _T_OnChunkSent + + @property + def transport(self) -> Optional[asyncio.Transport]: + return self._transport + + @property + def protocol(self) -> BaseProtocol: + return self._protocol + + def enable_chunking(self) -> None: + self.chunked = True + + def enable_compression(self, encoding: str='deflate') -> None: + zlib_mode = (16 + zlib.MAX_WBITS + if encoding == 'gzip' else -zlib.MAX_WBITS) + self._compress = zlib.compressobj(wbits=zlib_mode) + + def _write(self, chunk: bytes) -> None: + size = len(chunk) + self.buffer_size += size + self.output_size += size + + if self._transport is None or self._transport.is_closing(): + raise ConnectionResetError('Cannot write to closing transport') + self._transport.write(chunk) + + async def write(self, chunk: bytes, + *, drain: bool=True, LIMIT: int=0x10000) -> None: + """Writes chunk of data to a stream. + + write_eof() indicates end of stream. + writer can't be used after write_eof() method being called. + write() return drain future. + """ + if self._on_chunk_sent is not None: + await self._on_chunk_sent(chunk) + + if self._compress is not None: + chunk = self._compress.compress(chunk) + if not chunk: + return + + if self.length is not None: + chunk_len = len(chunk) + if self.length >= chunk_len: + self.length = self.length - chunk_len + else: + chunk = chunk[:self.length] + self.length = 0 + if not chunk: + return + + if chunk: + if self.chunked: + chunk_len_pre = ('%x\r\n' % len(chunk)).encode('ascii') + chunk = chunk_len_pre + chunk + b'\r\n' + + self._write(chunk) + + if self.buffer_size > LIMIT and drain: + self.buffer_size = 0 + await self.drain() + + async def write_headers(self, status_line: str, + headers: 'CIMultiDict[str]') -> None: + """Write request/response status and headers.""" + # status + headers + buf = _serialize_headers(status_line, headers) + self._write(buf) + + async def write_eof(self, chunk: bytes=b'') -> None: + if self._eof: + return + + if chunk and self._on_chunk_sent is not None: + await self._on_chunk_sent(chunk) + + if self._compress: + if chunk: + chunk = self._compress.compress(chunk) + + chunk = chunk + self._compress.flush() + if chunk and self.chunked: + chunk_len = ('%x\r\n' % len(chunk)).encode('ascii') + chunk = chunk_len + chunk + b'\r\n0\r\n\r\n' + else: + if self.chunked: + if chunk: + chunk_len = ('%x\r\n' % len(chunk)).encode('ascii') + chunk = chunk_len + chunk + b'\r\n0\r\n\r\n' + else: + chunk = b'0\r\n\r\n' + + if chunk: + self._write(chunk) + + await self.drain() + + self._eof = True + self._transport = None + + async def drain(self) -> None: + """Flush the write buffer. + + The intended use is to write + + await w.write(data) + await w.drain() + """ + if self._protocol.transport is not None: + await self._protocol._drain_helper() + + +def _py_serialize_headers(status_line: str, + headers: 'CIMultiDict[str]') -> bytes: + line = status_line + '\r\n' + ''.join( + [k + ': ' + v + '\r\n' for k, v in headers.items()]) + return line.encode('utf-8') + b'\r\n' + + +_serialize_headers = _py_serialize_headers + +try: + import aiohttp._http_writer as _http_writer # type: ignore + _c_serialize_headers = _http_writer._serialize_headers + if not NO_EXTENSIONS: + _serialize_headers = _c_serialize_headers +except ImportError: + pass diff --git a/venv/lib/python3.7/site-packages/aiohttp/locks.py b/venv/lib/python3.7/site-packages/aiohttp/locks.py new file mode 100644 index 0000000..ed41f97 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/locks.py @@ -0,0 +1,44 @@ +import asyncio +import collections +from typing import Any, Optional + +try: + from typing import Deque +except ImportError: + from typing_extensions import Deque # noqa + + +class EventResultOrError: + """ + This class wrappers the Event asyncio lock allowing either awake the + locked Tasks without any error or raising an exception. + + thanks to @vorpalsmith for the simple design. + """ + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._exc = None # type: Optional[BaseException] + self._event = asyncio.Event(loop=loop) + self._waiters = collections.deque() # type: Deque[asyncio.Future[Any]] + + def set(self, exc: Optional[BaseException]=None) -> None: + self._exc = exc + self._event.set() + + async def wait(self) -> Any: + waiter = self._loop.create_task(self._event.wait()) + self._waiters.append(waiter) + try: + val = await waiter + finally: + self._waiters.remove(waiter) + + if self._exc is not None: + raise self._exc + + return val + + def cancel(self) -> None: + """ Cancel all waiters """ + for waiter in self._waiters: + waiter.cancel() diff --git a/venv/lib/python3.7/site-packages/aiohttp/log.py b/venv/lib/python3.7/site-packages/aiohttp/log.py new file mode 100644 index 0000000..cfda0e5 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/log.py @@ -0,0 +1,8 @@ +import logging + +access_logger = logging.getLogger('aiohttp.access') +client_logger = logging.getLogger('aiohttp.client') +internal_logger = logging.getLogger('aiohttp.internal') +server_logger = logging.getLogger('aiohttp.server') +web_logger = logging.getLogger('aiohttp.web') +ws_logger = logging.getLogger('aiohttp.websocket') diff --git a/venv/lib/python3.7/site-packages/aiohttp/multipart.py b/venv/lib/python3.7/site-packages/aiohttp/multipart.py new file mode 100644 index 0000000..b16598e --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/multipart.py @@ -0,0 +1,937 @@ +import base64 +import binascii +import json +import re +import uuid +import warnings +import zlib +from collections import deque +from types import TracebackType +from typing import ( # noqa + TYPE_CHECKING, + Any, + Dict, + Iterator, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +from urllib.parse import parse_qsl, unquote, urlencode + +from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping # noqa + +from .hdrs import ( + CONTENT_DISPOSITION, + CONTENT_ENCODING, + CONTENT_LENGTH, + CONTENT_TRANSFER_ENCODING, + CONTENT_TYPE, +) +from .helpers import CHAR, TOKEN, parse_mimetype, reify +from .http import HeadersParser +from .payload import ( + JsonPayload, + LookupError, + Order, + Payload, + StringPayload, + get_payload, + payload_type, +) +from .streams import StreamReader + +__all__ = ('MultipartReader', 'MultipartWriter', 'BodyPartReader', + 'BadContentDispositionHeader', 'BadContentDispositionParam', + 'parse_content_disposition', 'content_disposition_filename') + + +if TYPE_CHECKING: # pragma: no cover + from .client_reqrep import ClientResponse # noqa + + +class BadContentDispositionHeader(RuntimeWarning): + pass + + +class BadContentDispositionParam(RuntimeWarning): + pass + + +def parse_content_disposition(header: Optional[str]) -> Tuple[Optional[str], + Dict[str, str]]: + + def is_token(string: str) -> bool: + return bool(string) and TOKEN >= set(string) + + def is_quoted(string: str) -> bool: + return string[0] == string[-1] == '"' + + def is_rfc5987(string: str) -> bool: + return is_token(string) and string.count("'") == 2 + + def is_extended_param(string: str) -> bool: + return string.endswith('*') + + def is_continuous_param(string: str) -> bool: + pos = string.find('*') + 1 + if not pos: + return False + substring = string[pos:-1] if string.endswith('*') else string[pos:] + return substring.isdigit() + + def unescape(text: str, *, + chars: str=''.join(map(re.escape, CHAR))) -> str: + return re.sub('\\\\([{}])'.format(chars), '\\1', text) + + if not header: + return None, {} + + disptype, *parts = header.split(';') + if not is_token(disptype): + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + params = {} # type: Dict[str, str] + while parts: + item = parts.pop(0) + + if '=' not in item: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + key, value = item.split('=', 1) + key = key.lower().strip() + value = value.lstrip() + + if key in params: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + if not is_token(key): + warnings.warn(BadContentDispositionParam(item)) + continue + + elif is_continuous_param(key): + if is_quoted(value): + value = unescape(value[1:-1]) + elif not is_token(value): + warnings.warn(BadContentDispositionParam(item)) + continue + + elif is_extended_param(key): + if is_rfc5987(value): + encoding, _, value = value.split("'", 2) + encoding = encoding or 'utf-8' + else: + warnings.warn(BadContentDispositionParam(item)) + continue + + try: + value = unquote(value, encoding, 'strict') + except UnicodeDecodeError: # pragma: nocover + warnings.warn(BadContentDispositionParam(item)) + continue + + else: + failed = True + if is_quoted(value): + failed = False + value = unescape(value[1:-1].lstrip('\\/')) + elif is_token(value): + failed = False + elif parts: + # maybe just ; in filename, in any case this is just + # one case fix, for proper fix we need to redesign parser + _value = '%s;%s' % (value, parts[0]) + if is_quoted(_value): + parts.pop(0) + value = unescape(_value[1:-1].lstrip('\\/')) + failed = False + + if failed: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + params[key] = value + + return disptype.lower(), params + + +def content_disposition_filename(params: Mapping[str, str], + name: str='filename') -> Optional[str]: + name_suf = '%s*' % name + if not params: + return None + elif name_suf in params: + return params[name_suf] + elif name in params: + return params[name] + else: + parts = [] + fnparams = sorted((key, value) + for key, value in params.items() + if key.startswith(name_suf)) + for num, (key, value) in enumerate(fnparams): + _, tail = key.split('*', 1) + if tail.endswith('*'): + tail = tail[:-1] + if tail == str(num): + parts.append(value) + else: + break + if not parts: + return None + value = ''.join(parts) + if "'" in value: + encoding, _, value = value.split("'", 2) + encoding = encoding or 'utf-8' + return unquote(value, encoding, 'strict') + return value + + +class MultipartResponseWrapper: + """Wrapper around the MultipartBodyReader. + + It takes care about + underlying connection and close it when it needs in. + """ + + def __init__(self, resp: 'ClientResponse', stream: Any) -> None: + # TODO: add strong annotation to stream + self.resp = resp + self.stream = stream + + def __aiter__(self) -> 'MultipartResponseWrapper': + return self + + async def __anext__(self) -> Any: + part = await self.next() + if part is None: + raise StopAsyncIteration # NOQA + return part + + def at_eof(self) -> bool: + """Returns True when all response data had been read.""" + return self.resp.content.at_eof() + + async def next(self) -> Any: + """Emits next multipart reader object.""" + item = await self.stream.next() + if self.stream.at_eof(): + await self.release() + return item + + async def release(self) -> None: + """Releases the connection gracefully, reading all the content + to the void.""" + await self.resp.release() + + +class BodyPartReader: + """Multipart reader for single body part.""" + + chunk_size = 8192 + + def __init__(self, boundary: bytes, + headers: Mapping[str, Optional[str]], + content: StreamReader) -> None: + self.headers = headers + self._boundary = boundary + self._content = content + self._at_eof = False + length = self.headers.get(CONTENT_LENGTH, None) + self._length = int(length) if length is not None else None + self._read_bytes = 0 + # TODO: typeing.Deque is not supported by Python 3.5 + self._unread = deque() # type: Any + self._prev_chunk = None # type: Optional[bytes] + self._content_eof = 0 + self._cache = {} # type: Dict[str, Any] + + def __aiter__(self) -> 'BodyPartReader': + return self + + async def __anext__(self) -> Any: + part = await self.next() + if part is None: + raise StopAsyncIteration # NOQA + return part + + async def next(self) -> Any: + item = await self.read() + if not item: + return None + return item + + async def read(self, *, decode: bool=False) -> Any: + """Reads body part data. + + decode: Decodes data following by encoding + method from Content-Encoding header. If it missed + data remains untouched + """ + if self._at_eof: + return b'' + data = bytearray() + while not self._at_eof: + data.extend((await self.read_chunk(self.chunk_size))) + if decode: + return self.decode(data) + return data + + async def read_chunk(self, size: int=chunk_size) -> bytes: + """Reads body part content chunk of the specified size. + + size: chunk size + """ + if self._at_eof: + return b'' + if self._length: + chunk = await self._read_chunk_from_length(size) + else: + chunk = await self._read_chunk_from_stream(size) + + self._read_bytes += len(chunk) + if self._read_bytes == self._length: + self._at_eof = True + if self._at_eof: + clrf = await self._content.readline() + assert b'\r\n' == clrf, \ + 'reader did not read all the data or it is malformed' + return chunk + + async def _read_chunk_from_length(self, size: int) -> bytes: + # Reads body part content chunk of the specified size. + # The body part must has Content-Length header with proper value. + assert self._length is not None, \ + 'Content-Length required for chunked read' + chunk_size = min(size, self._length - self._read_bytes) + chunk = await self._content.read(chunk_size) + return chunk + + async def _read_chunk_from_stream(self, size: int) -> bytes: + # Reads content chunk of body part with unknown length. + # The Content-Length header for body part is not necessary. + assert size >= len(self._boundary) + 2, \ + 'Chunk size must be greater or equal than boundary length + 2' + first_chunk = self._prev_chunk is None + if first_chunk: + self._prev_chunk = await self._content.read(size) + + chunk = await self._content.read(size) + self._content_eof += int(self._content.at_eof()) + assert self._content_eof < 3, "Reading after EOF" + assert self._prev_chunk is not None + window = self._prev_chunk + chunk + sub = b'\r\n' + self._boundary + if first_chunk: + idx = window.find(sub) + else: + idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub))) + if idx >= 0: + # pushing boundary back to content + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + category=DeprecationWarning) + self._content.unread_data(window[idx:]) + if size > idx: + self._prev_chunk = self._prev_chunk[:idx] + chunk = window[len(self._prev_chunk):idx] + if not chunk: + self._at_eof = True + result = self._prev_chunk + self._prev_chunk = chunk + return result + + async def readline(self) -> bytes: + """Reads body part by line by line.""" + if self._at_eof: + return b'' + + if self._unread: + line = self._unread.popleft() + else: + line = await self._content.readline() + + if line.startswith(self._boundary): + # the very last boundary may not come with \r\n, + # so set single rules for everyone + sline = line.rstrip(b'\r\n') + boundary = self._boundary + last_boundary = self._boundary + b'--' + # ensure that we read exactly the boundary, not something alike + if sline == boundary or sline == last_boundary: + self._at_eof = True + self._unread.append(line) + return b'' + else: + next_line = await self._content.readline() + if next_line.startswith(self._boundary): + line = line[:-2] # strip CRLF but only once + self._unread.append(next_line) + + return line + + async def release(self) -> None: + """Like read(), but reads all the data to the void.""" + if self._at_eof: + return + while not self._at_eof: + await self.read_chunk(self.chunk_size) + + async def text(self, *, encoding: Optional[str]=None) -> str: + """Like read(), but assumes that body part contains text data.""" + data = await self.read(decode=True) + # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA + # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA + encoding = encoding or self.get_charset(default='utf-8') + return data.decode(encoding) + + async def json(self, *, encoding: Optional[str]=None) -> Any: + """Like read(), but assumes that body parts contains JSON data.""" + data = await self.read(decode=True) + if not data: + return None + encoding = encoding or self.get_charset(default='utf-8') + return json.loads(data.decode(encoding)) + + async def form(self, *, + encoding: Optional[str]=None) -> List[Tuple[str, str]]: + """Like read(), but assumes that body parts contains form + urlencoded data. + """ + data = await self.read(decode=True) + if not data: + return [] + if encoding is not None: + real_encoding = encoding + else: + real_encoding = self.get_charset(default='utf-8') + return parse_qsl(data.rstrip().decode(real_encoding), + keep_blank_values=True, + encoding=real_encoding) + + def at_eof(self) -> bool: + """Returns True if the boundary was reached or False otherwise.""" + return self._at_eof + + def decode(self, data: bytes) -> bytes: + """Decodes data according the specified Content-Encoding + or Content-Transfer-Encoding headers value. + """ + if CONTENT_TRANSFER_ENCODING in self.headers: + data = self._decode_content_transfer(data) + if CONTENT_ENCODING in self.headers: + return self._decode_content(data) + return data + + def _decode_content(self, data: bytes) -> bytes: + encoding = cast(str, self.headers[CONTENT_ENCODING]).lower() + + if encoding == 'deflate': + return zlib.decompress(data, -zlib.MAX_WBITS) + elif encoding == 'gzip': + return zlib.decompress(data, 16 + zlib.MAX_WBITS) + elif encoding == 'identity': + return data + else: + raise RuntimeError('unknown content encoding: {}'.format(encoding)) + + def _decode_content_transfer(self, data: bytes) -> bytes: + encoding = cast(str, self.headers[CONTENT_TRANSFER_ENCODING]).lower() + + if encoding == 'base64': + return base64.b64decode(data) + elif encoding == 'quoted-printable': + return binascii.a2b_qp(data) + elif encoding in ('binary', '8bit', '7bit'): + return data + else: + raise RuntimeError('unknown content transfer encoding: {}' + ''.format(encoding)) + + def get_charset(self, default: str) -> str: + """Returns charset parameter from Content-Type header or default.""" + ctype = self.headers.get(CONTENT_TYPE, '') + mimetype = parse_mimetype(ctype) + return mimetype.parameters.get('charset', default) + + @reify + def name(self) -> Optional[str]: + """Returns name specified in Content-Disposition header or None + if missed or header is malformed. + """ + + _, params = parse_content_disposition( + self.headers.get(CONTENT_DISPOSITION)) + return content_disposition_filename(params, 'name') + + @reify + def filename(self) -> Optional[str]: + """Returns filename specified in Content-Disposition header or None + if missed or header is malformed. + """ + _, params = parse_content_disposition( + self.headers.get(CONTENT_DISPOSITION)) + return content_disposition_filename(params, 'filename') + + +@payload_type(BodyPartReader, order=Order.try_first) +class BodyPartReaderPayload(Payload): + + def __init__(self, value: BodyPartReader, + *args: Any, **kwargs: Any) -> None: + super().__init__(value, *args, **kwargs) + + params = {} # type: Dict[str, str] + if value.name is not None: + params['name'] = value.name + if value.filename is not None: + params['filename'] = value.filename + + if params: + self.set_content_disposition('attachment', True, **params) + + async def write(self, writer: Any) -> None: + field = self._value + chunk = await field.read_chunk(size=2**16) + while chunk: + await writer.write(field.decode(chunk)) + chunk = await field.read_chunk(size=2**16) + + +class MultipartReader: + """Multipart body reader.""" + + #: Response wrapper, used when multipart readers constructs from response. + response_wrapper_cls = MultipartResponseWrapper + #: Multipart reader class, used to handle multipart/* body parts. + #: None points to type(self) + multipart_reader_cls = None + #: Body part reader class for non multipart/* content types. + part_reader_cls = BodyPartReader + + def __init__(self, headers: Mapping[str, str], + content: StreamReader) -> None: + self.headers = headers + self._boundary = ('--' + self._get_boundary()).encode() + self._content = content + self._last_part = None + self._at_eof = False + self._at_bof = True + self._unread = [] # type: List[bytes] + + def __aiter__(self) -> 'MultipartReader': + return self + + async def __anext__(self) -> Any: + part = await self.next() + if part is None: + raise StopAsyncIteration # NOQA + return part + + @classmethod + def from_response(cls, response: 'ClientResponse') -> Any: + """Constructs reader instance from HTTP response. + + :param response: :class:`~aiohttp.client.ClientResponse` instance + """ + obj = cls.response_wrapper_cls(response, cls(response.headers, + response.content)) + return obj + + def at_eof(self) -> bool: + """Returns True if the final boundary was reached or + False otherwise. + """ + return self._at_eof + + async def next(self) -> Any: + """Emits the next multipart body part.""" + # So, if we're at BOF, we need to skip till the boundary. + if self._at_eof: + return + await self._maybe_release_last_part() + if self._at_bof: + await self._read_until_first_boundary() + self._at_bof = False + else: + await self._read_boundary() + if self._at_eof: # we just read the last boundary, nothing to do there + return + self._last_part = await self.fetch_next_part() + return self._last_part + + async def release(self) -> None: + """Reads all the body parts to the void till the final boundary.""" + while not self._at_eof: + item = await self.next() + if item is None: + break + await item.release() + + async def fetch_next_part(self) -> Any: + """Returns the next body part reader.""" + headers = await self._read_headers() + return self._get_part_reader(headers) + + def _get_part_reader(self, headers: 'CIMultiDictProxy[str]') -> Any: + """Dispatches the response by the `Content-Type` header, returning + suitable reader instance. + + :param dict headers: Response headers + """ + ctype = headers.get(CONTENT_TYPE, '') + mimetype = parse_mimetype(ctype) + + if mimetype.type == 'multipart': + if self.multipart_reader_cls is None: + return type(self)(headers, self._content) + return self.multipart_reader_cls(headers, self._content) + else: + return self.part_reader_cls(self._boundary, headers, self._content) + + def _get_boundary(self) -> str: + mimetype = parse_mimetype(self.headers[CONTENT_TYPE]) + + assert mimetype.type == 'multipart', ( + 'multipart/* content type expected' + ) + + if 'boundary' not in mimetype.parameters: + raise ValueError('boundary missed for Content-Type: %s' + % self.headers[CONTENT_TYPE]) + + boundary = mimetype.parameters['boundary'] + if len(boundary) > 70: + raise ValueError('boundary %r is too long (70 chars max)' + % boundary) + + return boundary + + async def _readline(self) -> bytes: + if self._unread: + return self._unread.pop() + return await self._content.readline() + + async def _read_until_first_boundary(self) -> None: + while True: + chunk = await self._readline() + if chunk == b'': + raise ValueError("Could not find starting boundary %r" + % (self._boundary)) + chunk = chunk.rstrip() + if chunk == self._boundary: + return + elif chunk == self._boundary + b'--': + self._at_eof = True + return + + async def _read_boundary(self) -> None: + chunk = (await self._readline()).rstrip() + if chunk == self._boundary: + pass + elif chunk == self._boundary + b'--': + self._at_eof = True + epilogue = await self._readline() + next_line = await self._readline() + + # the epilogue is expected and then either the end of input or the + # parent multipart boundary, if the parent boundary is found then + # it should be marked as unread and handed to the parent for + # processing + if next_line[:2] == b'--': + self._unread.append(next_line) + # otherwise the request is likely missing an epilogue and both + # lines should be passed to the parent for processing + # (this handles the old behavior gracefully) + else: + self._unread.extend([next_line, epilogue]) + else: + raise ValueError('Invalid boundary %r, expected %r' + % (chunk, self._boundary)) + + async def _read_headers(self) -> 'CIMultiDictProxy[str]': + lines = [b''] + while True: + chunk = await self._content.readline() + chunk = chunk.strip() + lines.append(chunk) + if not chunk: + break + parser = HeadersParser() + headers, raw_headers = parser.parse_headers(lines) + return headers + + async def _maybe_release_last_part(self) -> None: + """Ensures that the last read body part is read completely.""" + if self._last_part is not None: + if not self._last_part.at_eof(): + await self._last_part.release() + self._unread.extend(self._last_part._unread) + self._last_part = None + + +_Part = Tuple[Payload, str, str] + + +class MultipartWriter(Payload): + """Multipart body writer.""" + + def __init__(self, subtype: str='mixed', + boundary: Optional[str]=None) -> None: + boundary = boundary if boundary is not None else uuid.uuid4().hex + # The underlying Payload API demands a str (utf-8), not bytes, + # so we need to ensure we don't lose anything during conversion. + # As a result, require the boundary to be ASCII only. + # In both situations. + + try: + self._boundary = boundary.encode('ascii') + except UnicodeEncodeError: + raise ValueError('boundary should contain ASCII only chars') \ + from None + ctype = ('multipart/{}; boundary={}' + .format(subtype, self._boundary_value)) + + super().__init__(None, content_type=ctype) + + self._parts = [] # type: List[_Part] # noqa + + def __enter__(self) -> 'MultipartWriter': + return self + + def __exit__(self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + pass + + def __iter__(self) -> Iterator[_Part]: + return iter(self._parts) + + def __len__(self) -> int: + return len(self._parts) + + _valid_tchar_regex = re.compile(br"\A[!#$%&'*+\-.^_`|~\w]+\Z") + _invalid_qdtext_char_regex = re.compile(br"[\x00-\x08\x0A-\x1F\x7F]") + + @property + def _boundary_value(self) -> str: + """Wrap boundary parameter value in quotes, if necessary. + + Reads self.boundary and returns a unicode sting. + """ + # Refer to RFCs 7231, 7230, 5234. + # + # parameter = token "=" ( token / quoted-string ) + # token = 1*tchar + # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE + # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text + # obs-text = %x80-FF + # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) + # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" + # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" + # / DIGIT / ALPHA + # ; any VCHAR, except delimiters + # VCHAR = %x21-7E + value = self._boundary + if re.match(self._valid_tchar_regex, value): + return value.decode('ascii') # cannot fail + + if re.search(self._invalid_qdtext_char_regex, value): + raise ValueError("boundary value contains invalid characters") + + # escape %x5C and %x22 + quoted_value_content = value.replace(b'\\', b'\\\\') + quoted_value_content = quoted_value_content.replace(b'"', b'\\"') + + return '"' + quoted_value_content.decode('ascii') + '"' + + @property + def boundary(self) -> str: + return self._boundary.decode('ascii') + + def append( + self, + obj: Any, + headers: Optional['MultiMapping[str]']=None + ) -> Payload: + if headers is None: + headers = CIMultiDict() + + if isinstance(obj, Payload): + obj.headers.update(headers) + return self.append_payload(obj) + else: + try: + payload = get_payload(obj, headers=headers) + except LookupError: + raise TypeError('Cannot create payload from %r' % obj) + else: + return self.append_payload(payload) + + def append_payload(self, payload: Payload) -> Payload: + """Adds a new body part to multipart writer.""" + # compression + encoding = payload.headers.get(CONTENT_ENCODING, '').lower() # type: Optional[str] # noqa + if encoding and encoding not in ('deflate', 'gzip', 'identity'): + raise RuntimeError('unknown content encoding: {}'.format(encoding)) + if encoding == 'identity': + encoding = None + + # te encoding + te_encoding = payload.headers.get( + CONTENT_TRANSFER_ENCODING, '').lower() # type: Optional[str] # noqa + if te_encoding not in ('', 'base64', 'quoted-printable', 'binary'): + raise RuntimeError('unknown content transfer encoding: {}' + ''.format(te_encoding)) + if te_encoding == 'binary': + te_encoding = None + + # size + size = payload.size + if size is not None and not (encoding or te_encoding): + payload.headers[CONTENT_LENGTH] = str(size) + + self._parts.append((payload, encoding, te_encoding)) # type: ignore + return payload + + def append_json( + self, + obj: Any, + headers: Optional['MultiMapping[str]']=None + ) -> Payload: + """Helper to append JSON part.""" + if headers is None: + headers = CIMultiDict() + + return self.append_payload(JsonPayload(obj, headers=headers)) + + def append_form( + self, + obj: Union[Sequence[Tuple[str, str]], + Mapping[str, str]], + headers: Optional['MultiMapping[str]']=None + ) -> Payload: + """Helper to append form urlencoded part.""" + assert isinstance(obj, (Sequence, Mapping)) + + if headers is None: + headers = CIMultiDict() + + if isinstance(obj, Mapping): + obj = list(obj.items()) + data = urlencode(obj, doseq=True) + + return self.append_payload( + StringPayload(data, headers=headers, + content_type='application/x-www-form-urlencoded')) + + @property + def size(self) -> Optional[int]: + """Size of the payload.""" + if not self._parts: + return 0 + + total = 0 + for part, encoding, te_encoding in self._parts: + if encoding or te_encoding or part.size is None: + return None + + total += int( + 2 + len(self._boundary) + 2 + # b'--'+self._boundary+b'\r\n' + part.size + len(part._binary_headers) + + 2 # b'\r\n' + ) + + total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' + return total + + async def write(self, writer: Any, + close_boundary: bool=True) -> None: + """Write body.""" + if not self._parts: + return + + for part, encoding, te_encoding in self._parts: + await writer.write(b'--' + self._boundary + b'\r\n') + await writer.write(part._binary_headers) + + if encoding or te_encoding: + w = MultipartPayloadWriter(writer) + if encoding: + w.enable_compression(encoding) + if te_encoding: + w.enable_encoding(te_encoding) + await part.write(w) # type: ignore + await w.write_eof() + else: + await part.write(writer) + + await writer.write(b'\r\n') + + if close_boundary: + await writer.write(b'--' + self._boundary + b'--\r\n') + + +class MultipartPayloadWriter: + + def __init__(self, writer: Any) -> None: + self._writer = writer + self._encoding = None # type: Optional[str] + self._compress = None # type: Any + self._encoding_buffer = None # type: Optional[bytearray] + + def enable_encoding(self, encoding: str) -> None: + if encoding == 'base64': + self._encoding = encoding + self._encoding_buffer = bytearray() + elif encoding == 'quoted-printable': + self._encoding = 'quoted-printable' + + def enable_compression(self, encoding: str='deflate') -> None: + zlib_mode = (16 + zlib.MAX_WBITS + if encoding == 'gzip' else -zlib.MAX_WBITS) + self._compress = zlib.compressobj(wbits=zlib_mode) + + async def write_eof(self) -> None: + if self._compress is not None: + chunk = self._compress.flush() + if chunk: + self._compress = None + await self.write(chunk) + + if self._encoding == 'base64': + if self._encoding_buffer: + await self._writer.write(base64.b64encode( + self._encoding_buffer)) + + async def write(self, chunk: bytes) -> None: + if self._compress is not None: + if chunk: + chunk = self._compress.compress(chunk) + if not chunk: + return + + if self._encoding == 'base64': + buf = self._encoding_buffer + assert buf is not None + buf.extend(chunk) + + if buf: + div, mod = divmod(len(buf), 3) + enc_chunk, self._encoding_buffer = ( + buf[:div * 3], buf[div * 3:]) + if enc_chunk: + b64chunk = base64.b64encode(enc_chunk) + await self._writer.write(b64chunk) + elif self._encoding == 'quoted-printable': + await self._writer.write(binascii.b2a_qp(chunk)) + else: + await self._writer.write(chunk) diff --git a/venv/lib/python3.7/site-packages/aiohttp/payload.py b/venv/lib/python3.7/site-packages/aiohttp/payload.py new file mode 100644 index 0000000..5b110ba --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/payload.py @@ -0,0 +1,456 @@ +import asyncio +import enum +import io +import json +import mimetypes +import os +import warnings +from abc import ABC, abstractmethod +from itertools import chain +from typing import ( + IO, + TYPE_CHECKING, + Any, + ByteString, + Dict, + Iterable, + Optional, + Text, + TextIO, + Tuple, + Type, + Union, +) + +from multidict import CIMultiDict + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import ( + PY_36, + content_disposition_header, + guess_filename, + parse_mimetype, + sentinel, +) +from .streams import DEFAULT_LIMIT, StreamReader +from .typedefs import JSONEncoder, _CIMultiDict + +__all__ = ('PAYLOAD_REGISTRY', 'get_payload', 'payload_type', 'Payload', + 'BytesPayload', 'StringPayload', + 'IOBasePayload', 'BytesIOPayload', 'BufferedReaderPayload', + 'TextIOPayload', 'StringIOPayload', 'JsonPayload', + 'AsyncIterablePayload') + +TOO_LARGE_BYTES_BODY = 2 ** 20 # 1 MB + + +if TYPE_CHECKING: # pragma: no cover + from typing import List # noqa + + +class LookupError(Exception): + pass + + +class Order(str, enum.Enum): + normal = 'normal' + try_first = 'try_first' + try_last = 'try_last' + + +def get_payload(data: Any, *args: Any, **kwargs: Any) -> 'Payload': + return PAYLOAD_REGISTRY.get(data, *args, **kwargs) + + +def register_payload(factory: Type['Payload'], + type: Any, + *, + order: Order=Order.normal) -> None: + PAYLOAD_REGISTRY.register(factory, type, order=order) + + +class payload_type: + + def __init__(self, type: Any, *, order: Order=Order.normal) -> None: + self.type = type + self.order = order + + def __call__(self, factory: Type['Payload']) -> Type['Payload']: + register_payload(factory, self.type, order=self.order) + return factory + + +class PayloadRegistry: + """Payload registry. + + note: we need zope.interface for more efficient adapter search + """ + + def __init__(self) -> None: + self._first = [] # type: List[Tuple[Type[Payload], Any]] + self._normal = [] # type: List[Tuple[Type[Payload], Any]] + self._last = [] # type: List[Tuple[Type[Payload], Any]] + + def get(self, + data: Any, + *args: Any, + _CHAIN: Any=chain, + **kwargs: Any) -> 'Payload': + if isinstance(data, Payload): + return data + for factory, type in _CHAIN(self._first, self._normal, self._last): + if isinstance(data, type): + return factory(data, *args, **kwargs) + + raise LookupError() + + def register(self, + factory: Type['Payload'], + type: Any, + *, + order: Order=Order.normal) -> None: + if order is Order.try_first: + self._first.append((factory, type)) + elif order is Order.normal: + self._normal.append((factory, type)) + elif order is Order.try_last: + self._last.append((factory, type)) + else: + raise ValueError("Unsupported order {!r}".format(order)) + + +class Payload(ABC): + + _default_content_type = 'application/octet-stream' # type: str + _size = None # type: Optional[int] + + def __init__(self, + value: Any, + headers: Optional[ + Union[ + _CIMultiDict, + Dict[str, str], + Iterable[Tuple[str, str]] + ] + ] = None, + content_type: Optional[str]=sentinel, + filename: Optional[str]=None, + encoding: Optional[str]=None, + **kwargs: Any) -> None: + self._encoding = encoding + self._filename = filename + self._headers = CIMultiDict() # type: _CIMultiDict + self._value = value + if content_type is not sentinel and content_type is not None: + self._headers[hdrs.CONTENT_TYPE] = content_type + elif self._filename is not None: + content_type = mimetypes.guess_type(self._filename)[0] + if content_type is None: + content_type = self._default_content_type + self._headers[hdrs.CONTENT_TYPE] = content_type + else: + self._headers[hdrs.CONTENT_TYPE] = self._default_content_type + self._headers.update(headers or {}) + + @property + def size(self) -> Optional[int]: + """Size of the payload.""" + return self._size + + @property + def filename(self) -> Optional[str]: + """Filename of the payload.""" + return self._filename + + @property + def headers(self) -> _CIMultiDict: + """Custom item headers""" + return self._headers + + @property + def _binary_headers(self) -> bytes: + return ''.join( + [k + ': ' + v + '\r\n' for k, v in self.headers.items()] + ).encode('utf-8') + b'\r\n' + + @property + def encoding(self) -> Optional[str]: + """Payload encoding""" + return self._encoding + + @property + def content_type(self) -> str: + """Content type""" + return self._headers[hdrs.CONTENT_TYPE] + + def set_content_disposition(self, + disptype: str, + quote_fields: bool=True, + **params: Any) -> None: + """Sets ``Content-Disposition`` header.""" + self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header( + disptype, quote_fields=quote_fields, **params) + + @abstractmethod + async def write(self, writer: AbstractStreamWriter) -> None: + """Write payload. + + writer is an AbstractStreamWriter instance: + """ + + +class BytesPayload(Payload): + + def __init__(self, + value: ByteString, + *args: Any, + **kwargs: Any) -> None: + if not isinstance(value, (bytes, bytearray, memoryview)): + raise TypeError("value argument must be byte-ish, not (!r)" + .format(type(value))) + + if 'content_type' not in kwargs: + kwargs['content_type'] = 'application/octet-stream' + + super().__init__(value, *args, **kwargs) + + self._size = len(value) + + if self._size > TOO_LARGE_BYTES_BODY: + if PY_36: + kwargs = {'source': self} + else: + kwargs = {} + warnings.warn("Sending a large body directly with raw bytes might" + " lock the event loop. You should probably pass an " + "io.BytesIO object instead", ResourceWarning, + **kwargs) + + async def write(self, writer: AbstractStreamWriter) -> None: + await writer.write(self._value) + + +class StringPayload(BytesPayload): + + def __init__(self, + value: Text, + *args: Any, + encoding: Optional[str]=None, + content_type: Optional[str]=None, + **kwargs: Any) -> None: + + if encoding is None: + if content_type is None: + real_encoding = 'utf-8' + content_type = 'text/plain; charset=utf-8' + else: + mimetype = parse_mimetype(content_type) + real_encoding = mimetype.parameters.get('charset', 'utf-8') + else: + if content_type is None: + content_type = 'text/plain; charset=%s' % encoding + real_encoding = encoding + + super().__init__( + value.encode(real_encoding), + encoding=real_encoding, + content_type=content_type, + *args, + **kwargs, + ) + + +class StringIOPayload(StringPayload): + + def __init__(self, + value: IO[str], + *args: Any, + **kwargs: Any) -> None: + super().__init__(value.read(), *args, **kwargs) + + +class IOBasePayload(Payload): + + def __init__(self, + value: IO[Any], + disposition: str='attachment', + *args: Any, + **kwargs: Any) -> None: + if 'filename' not in kwargs: + kwargs['filename'] = guess_filename(value) + + super().__init__(value, *args, **kwargs) + + if self._filename is not None and disposition is not None: + if hdrs.CONTENT_DISPOSITION not in self.headers: + self.set_content_disposition( + disposition, filename=self._filename + ) + + async def write(self, writer: AbstractStreamWriter) -> None: + loop = asyncio.get_event_loop() + try: + chunk = await loop.run_in_executor( + None, self._value.read, DEFAULT_LIMIT + ) + while chunk: + await writer.write(chunk) + chunk = await loop.run_in_executor( + None, self._value.read, DEFAULT_LIMIT + ) + finally: + await loop.run_in_executor(None, self._value.close) + + +class TextIOPayload(IOBasePayload): + + def __init__(self, + value: TextIO, + *args: Any, + encoding: Optional[str]=None, + content_type: Optional[str]=None, + **kwargs: Any) -> None: + + if encoding is None: + if content_type is None: + encoding = 'utf-8' + content_type = 'text/plain; charset=utf-8' + else: + mimetype = parse_mimetype(content_type) + encoding = mimetype.parameters.get('charset', 'utf-8') + else: + if content_type is None: + content_type = 'text/plain; charset=%s' % encoding + + super().__init__( + value, + content_type=content_type, + encoding=encoding, + *args, + **kwargs, + ) + + @property + def size(self) -> Optional[int]: + try: + return os.fstat(self._value.fileno()).st_size - self._value.tell() + except OSError: + return None + + async def write(self, writer: AbstractStreamWriter) -> None: + loop = asyncio.get_event_loop() + try: + chunk = await loop.run_in_executor( + None, self._value.read, DEFAULT_LIMIT + ) + while chunk: + await writer.write(chunk.encode(self._encoding)) + chunk = await loop.run_in_executor( + None, self._value.read, DEFAULT_LIMIT + ) + finally: + await loop.run_in_executor(None, self._value.close) + + +class BytesIOPayload(IOBasePayload): + + @property + def size(self) -> int: + position = self._value.tell() + end = self._value.seek(0, os.SEEK_END) + self._value.seek(position) + return end - position + + +class BufferedReaderPayload(IOBasePayload): + + @property + def size(self) -> Optional[int]: + try: + return os.fstat(self._value.fileno()).st_size - self._value.tell() + except OSError: + # data.fileno() is not supported, e.g. + # io.BufferedReader(io.BytesIO(b'data')) + return None + + +class JsonPayload(BytesPayload): + + def __init__(self, + value: Any, + encoding: str='utf-8', + content_type: str='application/json', + dumps: JSONEncoder=json.dumps, + *args: Any, + **kwargs: Any) -> None: + + super().__init__( + dumps(value).encode(encoding), + content_type=content_type, encoding=encoding, *args, **kwargs) + + +if TYPE_CHECKING: # pragma: no cover + from typing import AsyncIterator, AsyncIterable + + _AsyncIterator = AsyncIterator[bytes] + _AsyncIterable = AsyncIterable[bytes] +else: + from collections.abc import AsyncIterable, AsyncIterator + + _AsyncIterator = AsyncIterator + _AsyncIterable = AsyncIterable + + +class AsyncIterablePayload(Payload): + + _iter = None # type: Optional[_AsyncIterator] + + def __init__(self, + value: _AsyncIterable, + *args: Any, + **kwargs: Any) -> None: + if not isinstance(value, AsyncIterable): + raise TypeError("value argument must support " + "collections.abc.AsyncIterablebe interface, " + "got {!r}".format(type(value))) + + if 'content_type' not in kwargs: + kwargs['content_type'] = 'application/octet-stream' + + super().__init__(value, *args, **kwargs) + + self._iter = value.__aiter__() + + async def write(self, writer: AbstractStreamWriter) -> None: + if self._iter: + try: + # iter is not None check prevents rare cases + # when the case iterable is used twice + while True: + chunk = await self._iter.__anext__() + await writer.write(chunk) + except StopAsyncIteration: + self._iter = None + + +class StreamReaderPayload(AsyncIterablePayload): + + def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None: + super().__init__(value.iter_any(), *args, **kwargs) + + +PAYLOAD_REGISTRY = PayloadRegistry() +PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview)) +PAYLOAD_REGISTRY.register(StringPayload, str) +PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO) +PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase) +PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO) +PAYLOAD_REGISTRY.register( + BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom)) +PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase) +PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader) +# try_last for giving a chance to more specialized async interables like +# multidict.BodyPartReaderPayload override the default +PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, + order=Order.try_last) diff --git a/venv/lib/python3.7/site-packages/aiohttp/payload_streamer.py b/venv/lib/python3.7/site-packages/aiohttp/payload_streamer.py new file mode 100644 index 0000000..e76bf43 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/payload_streamer.py @@ -0,0 +1,74 @@ +""" Payload implemenation for coroutines as data provider. + +As a simple case, you can upload data from file:: + + @aiohttp.streamer + async def file_sender(writer, file_name=None): + with open(file_name, 'rb') as f: + chunk = f.read(2**16) + while chunk: + await writer.write(chunk) + + chunk = f.read(2**16) + +Then you can use `file_sender` like this: + + async with session.post('http://httpbin.org/post', + data=file_sender(file_name='huge_file')) as resp: + print(await resp.text()) + +..note:: Coroutine must accept `writer` as first argument + +""" + +import asyncio +import warnings +from typing import Any, Awaitable, Callable, Dict, Tuple + +from .abc import AbstractStreamWriter +from .payload import Payload, payload_type + +__all__ = ('streamer',) + + +class _stream_wrapper: + + def __init__(self, + coro: Callable[..., Awaitable[None]], + args: Tuple[Any, ...], + kwargs: Dict[str, Any]) -> None: + self.coro = asyncio.coroutine(coro) + self.args = args + self.kwargs = kwargs + + async def __call__(self, writer: AbstractStreamWriter) -> None: + await self.coro(writer, *self.args, **self.kwargs) + + +class streamer: + + def __init__(self, coro: Callable[..., Awaitable[None]]) -> None: + warnings.warn("@streamer is deprecated, use async generators instead", + DeprecationWarning, + stacklevel=2) + self.coro = coro + + def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper: + return _stream_wrapper(self.coro, args, kwargs) + + +@payload_type(_stream_wrapper) +class StreamWrapperPayload(Payload): + + async def write(self, writer: AbstractStreamWriter) -> None: + await self._value(writer) + + +@payload_type(streamer) +class StreamPayload(StreamWrapperPayload): + + def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None: + super().__init__(value(), *args, **kwargs) + + async def write(self, writer: AbstractStreamWriter) -> None: + await self._value(writer) diff --git a/venv/lib/python3.7/site-packages/aiohttp/py.typed b/venv/lib/python3.7/site-packages/aiohttp/py.typed new file mode 100644 index 0000000..20a7439 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/py.typed @@ -0,0 +1 @@ +Marker \ No newline at end of file diff --git a/venv/lib/python3.7/site-packages/aiohttp/pytest_plugin.py b/venv/lib/python3.7/site-packages/aiohttp/pytest_plugin.py new file mode 100644 index 0000000..4658da6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/pytest_plugin.py @@ -0,0 +1,337 @@ +import asyncio +import contextlib +import warnings +from collections.abc import Callable + +import pytest + +from aiohttp.helpers import isasyncgenfunction +from aiohttp.web import Application + +from .test_utils import ( + BaseTestServer, + RawTestServer, + TestClient, + TestServer, + loop_context, + setup_test_loop, + teardown_test_loop, +) +from .test_utils import unused_port as _unused_port + +try: + import uvloop +except ImportError: # pragma: no cover + uvloop = None + +try: + import tokio +except ImportError: # pragma: no cover + tokio = None + + +def pytest_addoption(parser): # type: ignore + parser.addoption( + '--aiohttp-fast', action='store_true', default=False, + help='run tests faster by disabling extra checks') + parser.addoption( + '--aiohttp-loop', action='store', default='pyloop', + help='run tests with specific loop: pyloop, uvloop, tokio or all') + parser.addoption( + '--aiohttp-enable-loop-debug', action='store_true', default=False, + help='enable event loop debug mode') + + +def pytest_fixture_setup(fixturedef): # type: ignore + """ + Allow fixtures to be coroutines. Run coroutine fixtures in an event loop. + """ + func = fixturedef.func + + if isasyncgenfunction(func): + # async generator fixture + is_async_gen = True + elif asyncio.iscoroutinefunction(func): + # regular async fixture + is_async_gen = False + else: + # not an async fixture, nothing to do + return + + strip_request = False + if 'request' not in fixturedef.argnames: + fixturedef.argnames += ('request',) + strip_request = True + + def wrapper(*args, **kwargs): # type: ignore + request = kwargs['request'] + if strip_request: + del kwargs['request'] + + # if neither the fixture nor the test use the 'loop' fixture, + # 'getfixturevalue' will fail because the test is not parameterized + # (this can be removed someday if 'loop' is no longer parameterized) + if 'loop' not in request.fixturenames: + raise Exception( + "Asynchronous fixtures must depend on the 'loop' fixture or " + "be used in tests depending from it." + ) + + _loop = request.getfixturevalue('loop') + + if is_async_gen: + # for async generators, we need to advance the generator once, + # then advance it again in a finalizer + gen = func(*args, **kwargs) + + def finalizer(): # type: ignore + try: + return _loop.run_until_complete(gen.__anext__()) + except StopAsyncIteration: # NOQA + pass + + request.addfinalizer(finalizer) + return _loop.run_until_complete(gen.__anext__()) + else: + return _loop.run_until_complete(func(*args, **kwargs)) + + fixturedef.func = wrapper + + +@pytest.fixture +def fast(request): # type: ignore + """--fast config option""" + return request.config.getoption('--aiohttp-fast') + + +@pytest.fixture +def loop_debug(request): # type: ignore + """--enable-loop-debug config option""" + return request.config.getoption('--aiohttp-enable-loop-debug') + + +@contextlib.contextmanager +def _runtime_warning_context(): # type: ignore + """ + Context manager which checks for RuntimeWarnings, specifically to + avoid "coroutine 'X' was never awaited" warnings being missed. + + If RuntimeWarnings occur in the context a RuntimeError is raised. + """ + with warnings.catch_warnings(record=True) as _warnings: + yield + rw = ['{w.filename}:{w.lineno}:{w.message}'.format(w=w) + for w in _warnings # type: ignore + if w.category == RuntimeWarning] + if rw: + raise RuntimeError('{} Runtime Warning{},\n{}'.format( + len(rw), + '' if len(rw) == 1 else 's', + '\n'.join(rw) + )) + + +@contextlib.contextmanager +def _passthrough_loop_context(loop, fast=False): # type: ignore + """ + setups and tears down a loop unless one is passed in via the loop + argument when it's passed straight through. + """ + if loop: + # loop already exists, pass it straight through + yield loop + else: + # this shadows loop_context's standard behavior + loop = setup_test_loop() + yield loop + teardown_test_loop(loop, fast=fast) + + +def pytest_pycollect_makeitem(collector, name, obj): # type: ignore + """ + Fix pytest collecting for coroutines. + """ + if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj): + return list(collector._genfunctions(name, obj)) + + +def pytest_pyfunc_call(pyfuncitem): # type: ignore + """ + Run coroutines in an event loop instead of a normal function call. + """ + fast = pyfuncitem.config.getoption("--aiohttp-fast") + if asyncio.iscoroutinefunction(pyfuncitem.function): + existing_loop = pyfuncitem.funcargs.get('loop', None) + with _runtime_warning_context(): + with _passthrough_loop_context(existing_loop, fast=fast) as _loop: + testargs = {arg: pyfuncitem.funcargs[arg] + for arg in pyfuncitem._fixtureinfo.argnames} + _loop.run_until_complete(pyfuncitem.obj(**testargs)) + + return True + + +def pytest_generate_tests(metafunc): # type: ignore + if 'loop_factory' not in metafunc.fixturenames: + return + + loops = metafunc.config.option.aiohttp_loop + avail_factories = {'pyloop': asyncio.DefaultEventLoopPolicy} + + if uvloop is not None: # pragma: no cover + avail_factories['uvloop'] = uvloop.EventLoopPolicy + + if tokio is not None: # pragma: no cover + avail_factories['tokio'] = tokio.EventLoopPolicy + + if loops == 'all': + loops = 'pyloop,uvloop?,tokio?' + + factories = {} # type: ignore + for name in loops.split(','): + required = not name.endswith('?') + name = name.strip(' ?') + if name not in avail_factories: # pragma: no cover + if required: + raise ValueError( + "Unknown loop '%s', available loops: %s" % ( + name, list(factories.keys()))) + else: + continue + factories[name] = avail_factories[name] + metafunc.parametrize("loop_factory", + list(factories.values()), + ids=list(factories.keys())) + + +@pytest.fixture +def loop(loop_factory, fast, loop_debug): # type: ignore + """Return an instance of the event loop.""" + policy = loop_factory() + asyncio.set_event_loop_policy(policy) + with loop_context(fast=fast) as _loop: + if loop_debug: + _loop.set_debug(True) # pragma: no cover + asyncio.set_event_loop(_loop) + yield _loop + + +@pytest.fixture +def unused_port(aiohttp_unused_port): # type: ignore # pragma: no cover + warnings.warn("Deprecated, use aiohttp_unused_port fixture instead", + DeprecationWarning) + return aiohttp_unused_port + + +@pytest.fixture +def aiohttp_unused_port(): # type: ignore + """Return a port that is unused on the current host.""" + return _unused_port + + +@pytest.fixture +def aiohttp_server(loop): # type: ignore + """Factory to create a TestServer instance, given an app. + + aiohttp_server(app, **kwargs) + """ + servers = [] + + async def go(app, *, port=None, **kwargs): # type: ignore + server = TestServer(app, port=port) + await server.start_server(loop=loop, **kwargs) + servers.append(server) + return server + + yield go + + async def finalize(): # type: ignore + while servers: + await servers.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.fixture +def test_server(aiohttp_server): # type: ignore # pragma: no cover + warnings.warn("Deprecated, use aiohttp_server fixture instead", + DeprecationWarning) + return aiohttp_server + + +@pytest.fixture +def aiohttp_raw_server(loop): # type: ignore + """Factory to create a RawTestServer instance, given a web handler. + + aiohttp_raw_server(handler, **kwargs) + """ + servers = [] + + async def go(handler, *, port=None, **kwargs): # type: ignore + server = RawTestServer(handler, port=port) + await server.start_server(loop=loop, **kwargs) + servers.append(server) + return server + + yield go + + async def finalize(): # type: ignore + while servers: + await servers.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.fixture +def raw_test_server(aiohttp_raw_server): # type: ignore # pragma: no cover + warnings.warn("Deprecated, use aiohttp_raw_server fixture instead", + DeprecationWarning) + return aiohttp_raw_server + + +@pytest.fixture +def aiohttp_client(loop): # type: ignore + """Factory to create a TestClient instance. + + aiohttp_client(app, **kwargs) + aiohttp_client(server, **kwargs) + aiohttp_client(raw_server, **kwargs) + """ + clients = [] + + async def go(__param, *args, server_kwargs=None, **kwargs): # type: ignore + + if (isinstance(__param, Callable) and # type: ignore + not isinstance(__param, (Application, BaseTestServer))): + __param = __param(loop, *args, **kwargs) + kwargs = {} + else: + assert not args, "args should be empty" + + if isinstance(__param, Application): + server_kwargs = server_kwargs or {} + server = TestServer(__param, loop=loop, **server_kwargs) + client = TestClient(server, loop=loop, **kwargs) + elif isinstance(__param, BaseTestServer): + client = TestClient(__param, loop=loop, **kwargs) + else: + raise ValueError("Unknown argument type: %r" % type(__param)) + + await client.start_server() + clients.append(client) + return client + + yield go + + async def finalize(): # type: ignore + while clients: + await clients.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.fixture +def test_client(aiohttp_client): # type: ignore # pragma: no cover + warnings.warn("Deprecated, use aiohttp_client fixture instead", + DeprecationWarning) + return aiohttp_client diff --git a/venv/lib/python3.7/site-packages/aiohttp/resolver.py b/venv/lib/python3.7/site-packages/aiohttp/resolver.py new file mode 100644 index 0000000..e0b6e13 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/resolver.py @@ -0,0 +1,112 @@ +import asyncio +import socket +from typing import Any, Dict, List, Optional + +from .abc import AbstractResolver +from .helpers import get_running_loop + +__all__ = ('ThreadedResolver', 'AsyncResolver', 'DefaultResolver') + +try: + import aiodns + # aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname') +except ImportError: # pragma: no cover + aiodns = None + +aiodns_default = False + + +class ThreadedResolver(AbstractResolver): + """Use Executor for synchronous getaddrinfo() calls, which defaults to + concurrent.futures.ThreadPoolExecutor. + """ + + def __init__(self, loop: Optional[asyncio.AbstractEventLoop]=None) -> None: + self._loop = get_running_loop(loop) + + async def resolve(self, host: str, port: int=0, + family: int=socket.AF_INET) -> List[Dict[str, Any]]: + infos = await self._loop.getaddrinfo( + host, port, type=socket.SOCK_STREAM, family=family) + + hosts = [] + for family, _, proto, _, address in infos: + hosts.append( + {'hostname': host, + 'host': address[0], 'port': address[1], + 'family': family, 'proto': proto, + 'flags': socket.AI_NUMERICHOST}) + + return hosts + + async def close(self) -> None: + pass + + +class AsyncResolver(AbstractResolver): + """Use the `aiodns` package to make asynchronous DNS lookups""" + + def __init__(self, loop: Optional[asyncio.AbstractEventLoop]=None, + *args: Any, **kwargs: Any) -> None: + if aiodns is None: + raise RuntimeError("Resolver requires aiodns library") + + self._loop = get_running_loop(loop) + self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs) + + if not hasattr(self._resolver, 'gethostbyname'): + # aiodns 1.1 is not available, fallback to DNSResolver.query + self.resolve = self._resolve_with_query # type: ignore + + async def resolve(self, host: str, port: int=0, + family: int=socket.AF_INET) -> List[Dict[str, Any]]: + try: + resp = await self._resolver.gethostbyname(host, family) + except aiodns.error.DNSError as exc: + msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" + raise OSError(msg) from exc + hosts = [] + for address in resp.addresses: + hosts.append( + {'hostname': host, + 'host': address, 'port': port, + 'family': family, 'proto': 0, + 'flags': socket.AI_NUMERICHOST}) + + if not hosts: + raise OSError("DNS lookup failed") + + return hosts + + async def _resolve_with_query( + self, host: str, port: int=0, + family: int=socket.AF_INET) -> List[Dict[str, Any]]: + if family == socket.AF_INET6: + qtype = 'AAAA' + else: + qtype = 'A' + + try: + resp = await self._resolver.query(host, qtype) + except aiodns.error.DNSError as exc: + msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" + raise OSError(msg) from exc + + hosts = [] + for rr in resp: + hosts.append( + {'hostname': host, + 'host': rr.host, 'port': port, + 'family': family, 'proto': 0, + 'flags': socket.AI_NUMERICHOST}) + + if not hosts: + raise OSError("DNS lookup failed") + + return hosts + + async def close(self) -> None: + return self._resolver.cancel() + + +DefaultResolver = AsyncResolver if aiodns_default else ThreadedResolver diff --git a/venv/lib/python3.7/site-packages/aiohttp/signals.py b/venv/lib/python3.7/site-packages/aiohttp/signals.py new file mode 100644 index 0000000..dda0dab --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/signals.py @@ -0,0 +1,34 @@ +from aiohttp.frozenlist import FrozenList + +__all__ = ('Signal',) + + +class Signal(FrozenList): + """Coroutine-based signal implementation. + + To connect a callback to a signal, use any list method. + + Signals are fired using the send() coroutine, which takes named + arguments. + """ + + __slots__ = ('_owner',) + + def __init__(self, owner): + super().__init__() + self._owner = owner + + def __repr__(self): + return ''.format(self._owner, + self.frozen, + list(self)) + + async def send(self, *args, **kwargs): + """ + Sends data to all registered receivers. + """ + if not self.frozen: + raise RuntimeError("Cannot send non-frozen signal.") + + for receiver in self: + await receiver(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.7/site-packages/aiohttp/signals.pyi b/venv/lib/python3.7/site-packages/aiohttp/signals.pyi new file mode 100644 index 0000000..d9778e7 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/signals.pyi @@ -0,0 +1,18 @@ +from typing import Any, Generic, TypeVar + +from aiohttp.frozenlist import FrozenList + + +__all__ = ('Signal',) + + +_T = TypeVar('_T') + + +class Signal(FrozenList[_T], Generic[_T]): + + def __init__(self, owner: Any) -> None: ... + + def __repr__(self) -> str: ... + + async def send(self, *args: Any, **kwargs: Any) -> None: ... diff --git a/venv/lib/python3.7/site-packages/aiohttp/streams.py b/venv/lib/python3.7/site-packages/aiohttp/streams.py new file mode 100644 index 0000000..412e200 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/streams.py @@ -0,0 +1,631 @@ +import asyncio +import collections +import warnings +from typing import List # noqa +from typing import Awaitable, Callable, Generic, Optional, Tuple, TypeVar + +from .base_protocol import BaseProtocol +from .helpers import BaseTimerContext, set_exception, set_result +from .log import internal_logger + +try: # pragma: no cover + from typing import Deque # noqa +except ImportError: + from typing_extensions import Deque # noqa + + +__all__ = ( + 'EMPTY_PAYLOAD', 'EofStream', 'StreamReader', 'DataQueue', + 'FlowControlDataQueue') + +DEFAULT_LIMIT = 2 ** 16 + +_T = TypeVar('_T') + + +class EofStream(Exception): + """eof stream indication.""" + + +class AsyncStreamIterator(Generic[_T]): + + def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None: + self.read_func = read_func + + def __aiter__(self) -> 'AsyncStreamIterator[_T]': + return self + + async def __anext__(self) -> _T: + try: + rv = await self.read_func() + except EofStream: + raise StopAsyncIteration # NOQA + if rv == b'': + raise StopAsyncIteration # NOQA + return rv + + +class ChunkTupleAsyncStreamIterator: + + def __init__(self, stream: 'StreamReader') -> None: + self._stream = stream + + def __aiter__(self) -> 'ChunkTupleAsyncStreamIterator': + return self + + async def __anext__(self) -> Tuple[bytes, bool]: + rv = await self._stream.readchunk() + if rv == (b'', False): + raise StopAsyncIteration # NOQA + return rv + + +class AsyncStreamReaderMixin: + + def __aiter__(self) -> AsyncStreamIterator[bytes]: + return AsyncStreamIterator(self.readline) # type: ignore + + def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]: + """Returns an asynchronous iterator that yields chunks of size n. + + Python-3.5 available for Python 3.5+ only + """ + return AsyncStreamIterator(lambda: self.read(n)) # type: ignore + + def iter_any(self) -> AsyncStreamIterator[bytes]: + """Returns an asynchronous iterator that yields all the available + data as soon as it is received + + Python-3.5 available for Python 3.5+ only + """ + return AsyncStreamIterator(self.readany) # type: ignore + + def iter_chunks(self) -> ChunkTupleAsyncStreamIterator: + """Returns an asynchronous iterator that yields chunks of data + as they are received by the server. The yielded objects are tuples + of (bytes, bool) as returned by the StreamReader.readchunk method. + + Python-3.5 available for Python 3.5+ only + """ + return ChunkTupleAsyncStreamIterator(self) # type: ignore + + +class StreamReader(AsyncStreamReaderMixin): + """An enhancement of asyncio.StreamReader. + + Supports asynchronous iteration by line, chunk or as available:: + + async for line in reader: + ... + async for chunk in reader.iter_chunked(1024): + ... + async for slice in reader.iter_any(): + ... + + """ + + total_bytes = 0 + + def __init__(self, protocol: BaseProtocol, + *, limit: int=DEFAULT_LIMIT, + timer: Optional[BaseTimerContext]=None, + loop: Optional[asyncio.AbstractEventLoop]=None) -> None: + self._protocol = protocol + self._low_water = limit + self._high_water = limit * 2 + if loop is None: + loop = asyncio.get_event_loop() + self._loop = loop + self._size = 0 + self._cursor = 0 + self._http_chunk_splits = None # type: Optional[List[int]] + self._buffer = collections.deque() # type: Deque[bytes] + self._buffer_offset = 0 + self._eof = False + self._waiter = None # type: Optional[asyncio.Future[bool]] + self._eof_waiter = None # type: Optional[asyncio.Future[bool]] + self._exception = None # type: Optional[BaseException] + self._timer = timer + self._eof_callbacks = [] # type: List[Callable[[], None]] + + def __repr__(self) -> str: + info = [self.__class__.__name__] + if self._size: + info.append('%d bytes' % self._size) + if self._eof: + info.append('eof') + if self._low_water != DEFAULT_LIMIT: + info.append('low=%d high=%d' % (self._low_water, self._high_water)) + if self._waiter: + info.append('w=%r' % self._waiter) + if self._exception: + info.append('e=%r' % self._exception) + return '<%s>' % ' '.join(info) + + def exception(self) -> Optional[BaseException]: + return self._exception + + def set_exception(self, exc: BaseException) -> None: + self._exception = exc + self._eof_callbacks.clear() + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_exception(waiter, exc) + + waiter = self._eof_waiter + if waiter is not None: + set_exception(waiter, exc) + self._eof_waiter = None + + def on_eof(self, callback: Callable[[], None]) -> None: + if self._eof: + try: + callback() + except Exception: + internal_logger.exception('Exception in eof callback') + else: + self._eof_callbacks.append(callback) + + def feed_eof(self) -> None: + self._eof = True + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, True) + + waiter = self._eof_waiter + if waiter is not None: + self._eof_waiter = None + set_result(waiter, True) + + for cb in self._eof_callbacks: + try: + cb() + except Exception: + internal_logger.exception('Exception in eof callback') + + self._eof_callbacks.clear() + + def is_eof(self) -> bool: + """Return True if 'feed_eof' was called.""" + return self._eof + + def at_eof(self) -> bool: + """Return True if the buffer is empty and 'feed_eof' was called.""" + return self._eof and not self._buffer + + async def wait_eof(self) -> None: + if self._eof: + return + + assert self._eof_waiter is None + self._eof_waiter = self._loop.create_future() + try: + await self._eof_waiter + finally: + self._eof_waiter = None + + def unread_data(self, data: bytes) -> None: + """ rollback reading some data from stream, inserting it to buffer head. + """ + warnings.warn("unread_data() is deprecated " + "and will be removed in future releases (#3260)", + DeprecationWarning, + stacklevel=2) + if not data: + return + + if self._buffer_offset: + self._buffer[0] = self._buffer[0][self._buffer_offset:] + self._buffer_offset = 0 + self._size += len(data) + self._cursor -= len(data) + self._buffer.appendleft(data) + self._eof_counter = 0 + + # TODO: size is ignored, remove the param later + def feed_data(self, data: bytes, size: int=0) -> None: + assert not self._eof, 'feed_data after feed_eof' + + if not data: + return + + self._size += len(data) + self._buffer.append(data) + self.total_bytes += len(data) + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, False) + + if (self._size > self._high_water and + not self._protocol._reading_paused): + self._protocol.pause_reading() + + def begin_http_chunk_receiving(self) -> None: + if self._http_chunk_splits is None: + if self.total_bytes: + raise RuntimeError("Called begin_http_chunk_receiving when" + "some data was already fed") + self._http_chunk_splits = [] + + def end_http_chunk_receiving(self) -> None: + if self._http_chunk_splits is None: + raise RuntimeError("Called end_chunk_receiving without calling " + "begin_chunk_receiving first") + + # self._http_chunk_splits contains logical byte offsets from start of + # the body transfer. Each offset is the offset of the end of a chunk. + # "Logical" means bytes, accessible for a user. + # If no chunks containig logical data were received, current position + # is difinitely zero. + pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0 + + if self.total_bytes == pos: + # We should not add empty chunks here. So we check for that. + # Note, when chunked + gzip is used, we can receive a chunk + # of compressed data, but that data may not be enough for gzip FSM + # to yield any uncompressed data. That's why current position may + # not change after receiving a chunk. + return + + self._http_chunk_splits.append(self.total_bytes) + + # wake up readchunk when end of http chunk received + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, False) + + async def _wait(self, func_name: str) -> None: + # StreamReader uses a future to link the protocol feed_data() method + # to a read coroutine. Running two read coroutines at the same time + # would have an unexpected behaviour. It would not possible to know + # which coroutine would get the next data. + if self._waiter is not None: + raise RuntimeError('%s() called while another coroutine is ' + 'already waiting for incoming data' % func_name) + + waiter = self._waiter = self._loop.create_future() + try: + if self._timer: + with self._timer: + await waiter + else: + await waiter + finally: + self._waiter = None + + async def readline(self) -> bytes: + if self._exception is not None: + raise self._exception + + line = [] + line_size = 0 + not_enough = True + + while not_enough: + while self._buffer and not_enough: + offset = self._buffer_offset + ichar = self._buffer[0].find(b'\n', offset) + 1 + # Read from current offset to found b'\n' or to the end. + data = self._read_nowait_chunk(ichar - offset if ichar else -1) + line.append(data) + line_size += len(data) + if ichar: + not_enough = False + + if line_size > self._high_water: + raise ValueError('Line is too long') + + if self._eof: + break + + if not_enough: + await self._wait('readline') + + return b''.join(line) + + async def read(self, n: int=-1) -> bytes: + if self._exception is not None: + raise self._exception + + # migration problem; with DataQueue you have to catch + # EofStream exception, so common way is to run payload.read() inside + # infinite loop. what can cause real infinite loop with StreamReader + # lets keep this code one major release. + if __debug__: + if self._eof and not self._buffer: + self._eof_counter = getattr(self, '_eof_counter', 0) + 1 + if self._eof_counter > 5: + internal_logger.warning( + 'Multiple access to StreamReader in eof state, ' + 'might be infinite loop.', stack_info=True) + + if not n: + return b'' + + if n < 0: + # This used to just loop creating a new waiter hoping to + # collect everything in self._buffer, but that would + # deadlock if the subprocess sends more than self.limit + # bytes. So just call self.readany() until EOF. + blocks = [] + while True: + block = await self.readany() + if not block: + break + blocks.append(block) + return b''.join(blocks) + + # TODO: should be `if` instead of `while` + # because waiter maybe triggered on chunk end, + # without feeding any data + while not self._buffer and not self._eof: + await self._wait('read') + + return self._read_nowait(n) + + async def readany(self) -> bytes: + if self._exception is not None: + raise self._exception + + # TODO: should be `if` instead of `while` + # because waiter maybe triggered on chunk end, + # without feeding any data + while not self._buffer and not self._eof: + await self._wait('readany') + + return self._read_nowait(-1) + + async def readchunk(self) -> Tuple[bytes, bool]: + """Returns a tuple of (data, end_of_http_chunk). When chunked transfer + encoding is used, end_of_http_chunk is a boolean indicating if the end + of the data corresponds to the end of a HTTP chunk , otherwise it is + always False. + """ + if self._exception is not None: + raise self._exception + + if not self._buffer and not self._eof: + if (self._http_chunk_splits and + self._cursor == self._http_chunk_splits[0]): + # end of http chunk without available data + self._http_chunk_splits = self._http_chunk_splits[1:] + return (b"", True) + await self._wait('readchunk') + + if not self._buffer and not self._http_chunk_splits: + # end of file + return (b"", False) + elif self._http_chunk_splits is not None: + while self._http_chunk_splits: + pos = self._http_chunk_splits[0] + self._http_chunk_splits = self._http_chunk_splits[1:] + if pos == self._cursor: + return (b"", True) + if pos > self._cursor: + return (self._read_nowait(pos-self._cursor), True) + return (self._read_nowait(-1), False) + else: + return (self._read_nowait_chunk(-1), False) + + async def readexactly(self, n: int) -> bytes: + if self._exception is not None: + raise self._exception + + blocks = [] # type: List[bytes] + while n > 0: + block = await self.read(n) + if not block: + partial = b''.join(blocks) + raise asyncio.streams.IncompleteReadError( + partial, len(partial) + n) + blocks.append(block) + n -= len(block) + + return b''.join(blocks) + + def read_nowait(self, n: int=-1) -> bytes: + # default was changed to be consistent with .read(-1) + # + # I believe the most users don't know about the method and + # they are not affected. + if self._exception is not None: + raise self._exception + + if self._waiter and not self._waiter.done(): + raise RuntimeError( + 'Called while some coroutine is waiting for incoming data.') + + return self._read_nowait(n) + + def _read_nowait_chunk(self, n: int) -> bytes: + first_buffer = self._buffer[0] + offset = self._buffer_offset + if n != -1 and len(first_buffer) - offset > n: + data = first_buffer[offset:offset + n] + self._buffer_offset += n + + elif offset: + self._buffer.popleft() + data = first_buffer[offset:] + self._buffer_offset = 0 + + else: + data = self._buffer.popleft() + + self._size -= len(data) + self._cursor += len(data) + + if self._size < self._low_water and self._protocol._reading_paused: + self._protocol.resume_reading() + return data + + def _read_nowait(self, n: int) -> bytes: + chunks = [] + + while self._buffer: + chunk = self._read_nowait_chunk(n) + chunks.append(chunk) + if n != -1: + n -= len(chunk) + if n == 0: + break + + return b''.join(chunks) if chunks else b'' + + +class EmptyStreamReader(AsyncStreamReaderMixin): + + def exception(self) -> Optional[BaseException]: + return None + + def set_exception(self, exc: BaseException) -> None: + pass + + def on_eof(self, callback: Callable[[], None]) -> None: + try: + callback() + except Exception: + internal_logger.exception('Exception in eof callback') + + def feed_eof(self) -> None: + pass + + def is_eof(self) -> bool: + return True + + def at_eof(self) -> bool: + return True + + async def wait_eof(self) -> None: + return + + def feed_data(self, data: bytes, n: int=0) -> None: + pass + + async def readline(self) -> bytes: + return b'' + + async def read(self, n: int=-1) -> bytes: + return b'' + + async def readany(self) -> bytes: + return b'' + + async def readchunk(self) -> Tuple[bytes, bool]: + return (b'', True) + + async def readexactly(self, n: int) -> bytes: + raise asyncio.streams.IncompleteReadError(b'', n) + + def read_nowait(self) -> bytes: + return b'' + + +EMPTY_PAYLOAD = EmptyStreamReader() + + +class DataQueue(Generic[_T]): + """DataQueue is a general-purpose blocking queue with one reader.""" + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._eof = False + self._waiter = None # type: Optional[asyncio.Future[bool]] + self._exception = None # type: Optional[BaseException] + self._size = 0 + self._buffer = collections.deque() # type: Deque[Tuple[_T, int]] + + def __len__(self) -> int: + return len(self._buffer) + + def is_eof(self) -> bool: + return self._eof + + def at_eof(self) -> bool: + return self._eof and not self._buffer + + def exception(self) -> Optional[BaseException]: + return self._exception + + def set_exception(self, exc: BaseException) -> None: + self._eof = True + self._exception = exc + + waiter = self._waiter + if waiter is not None: + set_exception(waiter, exc) + self._waiter = None + + def feed_data(self, data: _T, size: int=0) -> None: + self._size += size + self._buffer.append((data, size)) + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, True) + + def feed_eof(self) -> None: + self._eof = True + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, False) + + async def read(self) -> _T: + if not self._buffer and not self._eof: + assert not self._waiter + self._waiter = self._loop.create_future() + try: + await self._waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._waiter = None + raise + + if self._buffer: + data, size = self._buffer.popleft() + self._size -= size + return data + else: + if self._exception is not None: + raise self._exception + else: + raise EofStream + + def __aiter__(self) -> AsyncStreamIterator[_T]: + return AsyncStreamIterator(self.read) + + +class FlowControlDataQueue(DataQueue[_T]): + """FlowControlDataQueue resumes and pauses an underlying stream. + + It is a destination for parsed data.""" + + def __init__(self, protocol: BaseProtocol, *, + limit: int=DEFAULT_LIMIT, + loop: asyncio.AbstractEventLoop) -> None: + super().__init__(loop=loop) + + self._protocol = protocol + self._limit = limit * 2 + + def feed_data(self, data: _T, size: int=0) -> None: + super().feed_data(data, size) + + if self._size > self._limit and not self._protocol._reading_paused: + self._protocol.pause_reading() + + async def read(self) -> _T: + try: + return await super().read() + finally: + if self._size < self._limit and self._protocol._reading_paused: + self._protocol.resume_reading() diff --git a/venv/lib/python3.7/site-packages/aiohttp/tcp_helpers.py b/venv/lib/python3.7/site-packages/aiohttp/tcp_helpers.py new file mode 100644 index 0000000..440c116 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/tcp_helpers.py @@ -0,0 +1,63 @@ +"""Helper methods to tune a TCP connection""" + +import asyncio +import socket +from contextlib import suppress +from typing import Optional # noqa + +__all__ = ('tcp_keepalive', 'tcp_nodelay', 'tcp_cork') + + +if hasattr(socket, 'TCP_CORK'): # pragma: no cover + CORK = socket.TCP_CORK # type: Optional[int] +elif hasattr(socket, 'TCP_NOPUSH'): # pragma: no cover + CORK = socket.TCP_NOPUSH # type: ignore +else: # pragma: no cover + CORK = None + + +if hasattr(socket, 'SO_KEEPALIVE'): + def tcp_keepalive(transport: asyncio.Transport) -> None: + sock = transport.get_extra_info('socket') + if sock is not None: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) +else: + def tcp_keepalive( + transport: asyncio.Transport) -> None: # pragma: no cover + pass + + +def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None: + sock = transport.get_extra_info('socket') + + if sock is None: + return + + if sock.family not in (socket.AF_INET, socket.AF_INET6): + return + + value = bool(value) + + # socket may be closed already, on windows OSError get raised + with suppress(OSError): + sock.setsockopt( + socket.IPPROTO_TCP, socket.TCP_NODELAY, value) + + +def tcp_cork(transport: asyncio.Transport, value: bool) -> None: + sock = transport.get_extra_info('socket') + + if CORK is None: + return + + if sock is None: + return + + if sock.family not in (socket.AF_INET, socket.AF_INET6): + return + + value = bool(value) + + with suppress(OSError): + sock.setsockopt( + socket.IPPROTO_TCP, CORK, value) diff --git a/venv/lib/python3.7/site-packages/aiohttp/test_utils.py b/venv/lib/python3.7/site-packages/aiohttp/test_utils.py new file mode 100644 index 0000000..d1db1a7 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/test_utils.py @@ -0,0 +1,659 @@ +"""Utilities shared by tests.""" + +import asyncio +import contextlib +import functools +import gc +import socket +import sys +import unittest +from abc import ABC, abstractmethod +from types import TracebackType +from typing import ( # noqa + TYPE_CHECKING, + Any, + Callable, + Iterator, + List, + Optional, + Type, + Union, +) +from unittest import mock + +from multidict import CIMultiDict, CIMultiDictProxy +from yarl import URL + +import aiohttp +from aiohttp.client import ( + ClientResponse, + _RequestContextManager, + _WSRequestContextManager, +) + +from . import ClientSession, hdrs +from .abc import AbstractCookieJar +from .client_reqrep import ClientResponse # noqa +from .client_ws import ClientWebSocketResponse # noqa +from .helpers import sentinel +from .http import HttpVersion, RawRequestMessage +from .signals import Signal +from .web import ( + Application, + AppRunner, + BaseRunner, + Request, + Server, + ServerRunner, + SockSite, + UrlMappingMatchInfo, +) +from .web_protocol import _RequestHandler + +if TYPE_CHECKING: # pragma: no cover + from ssl import SSLContext +else: + SSLContext = None + + +def get_unused_port_socket(host: str) -> socket.socket: + return get_port_socket(host, 0) + + +def get_port_socket(host: str, port: int) -> socket.socket: + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + s.bind((host, port)) + return s + + +def unused_port() -> int: + """Return a port that is unused on the current host.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(('127.0.0.1', 0)) + return s.getsockname()[1] + + +class BaseTestServer(ABC): + def __init__(self, + *, + scheme: Union[str, object]=sentinel, + loop: Optional[asyncio.AbstractEventLoop]=None, + host: str='127.0.0.1', + port: Optional[int]=None, + skip_url_asserts: bool=False, + **kwargs: Any) -> None: + self._loop = loop + self.runner = None # type: Optional[BaseRunner] + self._root = None # type: Optional[URL] + self.host = host + self.port = port + self._closed = False + self.scheme = scheme + self.skip_url_asserts = skip_url_asserts + + async def start_server(self, + loop: Optional[asyncio.AbstractEventLoop]=None, + **kwargs: Any) -> None: + if self.runner: + return + self._loop = loop + self._ssl = kwargs.pop('ssl', None) + self.runner = await self._make_runner(**kwargs) + await self.runner.setup() + if not self.port: + self.port = 0 + _sock = get_port_socket(self.host, self.port) + self.host, self.port = _sock.getsockname()[:2] + site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl) + await site.start() + server = site._server + assert server is not None + sockets = server.sockets + assert sockets is not None + self.port = sockets[0].getsockname()[1] + if self.scheme is sentinel: + if self._ssl: + scheme = 'https' + else: + scheme = 'http' + self.scheme = scheme + self._root = URL('{}://{}:{}'.format(self.scheme, + self.host, + self.port)) + + @abstractmethod # pragma: no cover + async def _make_runner(self, **kwargs: Any) -> BaseRunner: + pass + + def make_url(self, path: str) -> URL: + assert self._root is not None + url = URL(path) + if not self.skip_url_asserts: + assert not url.is_absolute() + return self._root.join(url) + else: + return URL(str(self._root) + path) + + @property + def started(self) -> bool: + return self.runner is not None + + @property + def closed(self) -> bool: + return self._closed + + @property + def handler(self) -> Server: + # for backward compatibility + # web.Server instance + runner = self.runner + assert runner is not None + assert runner.server is not None + return runner.server + + async def close(self) -> None: + """Close all fixtures created by the test client. + + After that point, the TestClient is no longer usable. + + This is an idempotent function: running close multiple times + will not have any additional effects. + + close is also run when the object is garbage collected, and on + exit when used as a context manager. + + """ + if self.started and not self.closed: + assert self.runner is not None + await self.runner.cleanup() + self._root = None + self.port = None + self._closed = True + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__(self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType]) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> 'BaseTestServer': + await self.start_server(loop=self._loop) + return self + + async def __aexit__(self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType]) -> None: + await self.close() + + +class TestServer(BaseTestServer): + + def __init__(self, app: Application, *, + scheme: Union[str, object]=sentinel, + host: str='127.0.0.1', + port: Optional[int]=None, + **kwargs: Any): + self.app = app + super().__init__(scheme=scheme, host=host, port=port, **kwargs) + + async def _make_runner(self, **kwargs: Any) -> BaseRunner: + return AppRunner(self.app, **kwargs) + + +class RawTestServer(BaseTestServer): + + def __init__(self, handler: _RequestHandler, *, + scheme: Union[str, object]=sentinel, + host: str='127.0.0.1', + port: Optional[int]=None, + **kwargs: Any) -> None: + self._handler = handler + super().__init__(scheme=scheme, host=host, port=port, **kwargs) + + async def _make_runner(self, + debug: bool=True, + **kwargs: Any) -> ServerRunner: + srv = Server( + self._handler, loop=self._loop, debug=debug, **kwargs) + return ServerRunner(srv, debug=debug, **kwargs) + + +class TestClient: + """ + A test client implementation. + + To write functional tests for aiohttp based servers. + + """ + + def __init__(self, server: BaseTestServer, *, + cookie_jar: Optional[AbstractCookieJar]=None, + loop: Optional[asyncio.AbstractEventLoop]=None, + **kwargs: Any) -> None: + if not isinstance(server, BaseTestServer): + raise TypeError("server must be TestServer " + "instance, found type: %r" % type(server)) + self._server = server + self._loop = loop + if cookie_jar is None: + cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop) + self._session = ClientSession(loop=loop, + cookie_jar=cookie_jar, + **kwargs) + self._closed = False + self._responses = [] # type: List[ClientResponse] + self._websockets = [] # type: List[ClientWebSocketResponse] + + async def start_server(self) -> None: + await self._server.start_server(loop=self._loop) + + @property + def host(self) -> str: + return self._server.host + + @property + def port(self) -> Optional[int]: + return self._server.port + + @property + def server(self) -> BaseTestServer: + return self._server + + @property + def app(self) -> Application: + return getattr(self._server, "app", None) + + @property + def session(self) -> ClientSession: + """An internal aiohttp.ClientSession. + + Unlike the methods on the TestClient, client session requests + do not automatically include the host in the url queried, and + will require an absolute path to the resource. + + """ + return self._session + + def make_url(self, path: str) -> URL: + return self._server.make_url(path) + + async def request(self, method: str, path: str, + **kwargs: Any) -> ClientResponse: + """Routes a request to tested http server. + + The interface is identical to aiohttp.ClientSession.request, + except the loop kwarg is overridden by the instance used by the + test server. + + """ + resp = await self._session.request( + method, self.make_url(path), **kwargs + ) + # save it to close later + self._responses.append(resp) + return resp + + def get(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP GET request.""" + return _RequestContextManager( + self.request(hdrs.METH_GET, path, **kwargs) + ) + + def post(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP POST request.""" + return _RequestContextManager( + self.request(hdrs.METH_POST, path, **kwargs) + ) + + def options(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP OPTIONS request.""" + return _RequestContextManager( + self.request(hdrs.METH_OPTIONS, path, **kwargs) + ) + + def head(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP HEAD request.""" + return _RequestContextManager( + self.request(hdrs.METH_HEAD, path, **kwargs) + ) + + def put(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PUT request.""" + return _RequestContextManager( + self.request(hdrs.METH_PUT, path, **kwargs) + ) + + def patch(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager( + self.request(hdrs.METH_PATCH, path, **kwargs) + ) + + def delete(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager( + self.request(hdrs.METH_DELETE, path, **kwargs) + ) + + def ws_connect(self, path: str, **kwargs: Any) -> _WSRequestContextManager: + """Initiate websocket connection. + + The api corresponds to aiohttp.ClientSession.ws_connect. + + """ + return _WSRequestContextManager( + self._ws_connect(path, **kwargs) + ) + + async def _ws_connect(self, path: str, + **kwargs: Any) -> ClientWebSocketResponse: + ws = await self._session.ws_connect( + self.make_url(path), **kwargs) + self._websockets.append(ws) + return ws + + async def close(self) -> None: + """Close all fixtures created by the test client. + + After that point, the TestClient is no longer usable. + + This is an idempotent function: running close multiple times + will not have any additional effects. + + close is also run on exit when used as a(n) (asynchronous) + context manager. + + """ + if not self._closed: + for resp in self._responses: + resp.close() + for ws in self._websockets: + await ws.close() + await self._session.close() + await self._server.close() + self._closed = True + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__(self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType]) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> 'TestClient': + await self.start_server() + return self + + async def __aexit__(self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType]) -> None: + await self.close() + + +class AioHTTPTestCase(unittest.TestCase): + """A base class to allow for unittest web applications using + aiohttp. + + Provides the following: + + * self.client (aiohttp.test_utils.TestClient): an aiohttp test client. + * self.loop (asyncio.BaseEventLoop): the event loop in which the + application and server are running. + * self.app (aiohttp.web.Application): the application returned by + self.get_application() + + Note that the TestClient's methods are asynchronous: you have to + execute function on the test client using asynchronous methods. + """ + + async def get_application(self) -> Application: + """ + This method should be overridden + to return the aiohttp.web.Application + object to test. + + """ + return self.get_app() + + def get_app(self) -> Application: + """Obsolete method used to constructing web application. + + Use .get_application() coroutine instead + + """ + raise RuntimeError("Did you forget to define get_application()?") + + def setUp(self) -> None: + self.loop = setup_test_loop() + + self.app = self.loop.run_until_complete(self.get_application()) + self.server = self.loop.run_until_complete(self.get_server(self.app)) + self.client = self.loop.run_until_complete( + self.get_client(self.server)) + + self.loop.run_until_complete(self.client.start_server()) + + self.loop.run_until_complete(self.setUpAsync()) + + async def setUpAsync(self) -> None: + pass + + def tearDown(self) -> None: + self.loop.run_until_complete(self.tearDownAsync()) + self.loop.run_until_complete(self.client.close()) + teardown_test_loop(self.loop) + + async def tearDownAsync(self) -> None: + pass + + async def get_server(self, app: Application) -> TestServer: + """Return a TestServer instance.""" + return TestServer(app, loop=self.loop) + + async def get_client(self, server: TestServer) -> TestClient: + """Return a TestClient instance.""" + return TestClient(server, loop=self.loop) + + +def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any: + """A decorator dedicated to use with asynchronous methods of an + AioHTTPTestCase. + + Handles executing an asynchronous function, using + the self.loop of the AioHTTPTestCase. + """ + + @functools.wraps(func, *args, **kwargs) + def new_func(self: Any, *inner_args: Any, **inner_kwargs: Any) -> Any: + return self.loop.run_until_complete( + func(self, *inner_args, **inner_kwargs)) + + return new_func + + +_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop] + + +@contextlib.contextmanager +def loop_context(loop_factory: _LOOP_FACTORY=asyncio.new_event_loop, + fast: bool=False) -> Iterator[asyncio.AbstractEventLoop]: + """A contextmanager that creates an event_loop, for test purposes. + + Handles the creation and cleanup of a test loop. + """ + loop = setup_test_loop(loop_factory) + yield loop + teardown_test_loop(loop, fast=fast) + + +def setup_test_loop( + loop_factory: _LOOP_FACTORY=asyncio.new_event_loop +) -> asyncio.AbstractEventLoop: + """Create and return an asyncio.BaseEventLoop + instance. + + The caller should also call teardown_test_loop, + once they are done with the loop. + """ + loop = loop_factory() + try: + module = loop.__class__.__module__ + skip_watcher = 'uvloop' in module + except AttributeError: # pragma: no cover + # Just in case + skip_watcher = True + asyncio.set_event_loop(loop) + if sys.platform != "win32" and not skip_watcher: + policy = asyncio.get_event_loop_policy() + watcher = asyncio.SafeChildWatcher() # type: ignore + watcher.attach_loop(loop) + with contextlib.suppress(NotImplementedError): + policy.set_child_watcher(watcher) + return loop + + +def teardown_test_loop(loop: asyncio.AbstractEventLoop, + fast: bool=False) -> None: + """Teardown and cleanup an event_loop created + by setup_test_loop. + + """ + closed = loop.is_closed() + if not closed: + loop.call_soon(loop.stop) + loop.run_forever() + loop.close() + + if not fast: + gc.collect() + + asyncio.set_event_loop(None) + + +def _create_app_mock() -> mock.MagicMock: + def get_dict(app: Any, key: str) -> Any: + return app.__app_dict[key] + + def set_dict(app: Any, key: str, value: Any) -> None: + app.__app_dict[key] = value + + app = mock.MagicMock() + app.__app_dict = {} + app.__getitem__ = get_dict + app.__setitem__ = set_dict + + app._debug = False + app.on_response_prepare = Signal(app) + app.on_response_prepare.freeze() + return app + + +def _create_transport(sslcontext: Optional[SSLContext]=None) -> mock.Mock: + transport = mock.Mock() + + def get_extra_info(key: str) -> Optional[SSLContext]: + if key == 'sslcontext': + return sslcontext + else: + return None + + transport.get_extra_info.side_effect = get_extra_info + return transport + + +def make_mocked_request(method: str, path: str, + headers: Any=None, *, + match_info: Any=sentinel, + version: HttpVersion=HttpVersion(1, 1), + closing: bool=False, + app: Any=None, + writer: Any=sentinel, + protocol: Any=sentinel, + transport: Any=sentinel, + payload: Any=sentinel, + sslcontext: Optional[SSLContext]=None, + client_max_size: int=1024**2, + loop: Any=...) -> Any: + """Creates mocked web.Request testing purposes. + + Useful in unit tests, when spinning full web server is overkill or + specific conditions and errors are hard to trigger. + + """ + + task = mock.Mock() + if loop is ...: + loop = mock.Mock() + loop.create_future.return_value = () + + if version < HttpVersion(1, 1): + closing = True + + if headers: + headers = CIMultiDictProxy(CIMultiDict(headers)) + raw_hdrs = tuple( + (k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items()) + else: + headers = CIMultiDictProxy(CIMultiDict()) + raw_hdrs = () + + chunked = 'chunked' in headers.get(hdrs.TRANSFER_ENCODING, '').lower() + + message = RawRequestMessage( + method, path, version, headers, + raw_hdrs, closing, False, False, chunked, URL(path)) + if app is None: + app = _create_app_mock() + + if transport is sentinel: + transport = _create_transport(sslcontext) + + if protocol is sentinel: + protocol = mock.Mock() + protocol.transport = transport + + if writer is sentinel: + writer = mock.Mock() + writer.write_headers = make_mocked_coro(None) + writer.write = make_mocked_coro(None) + writer.write_eof = make_mocked_coro(None) + writer.drain = make_mocked_coro(None) + writer.transport = transport + + protocol.transport = transport + protocol.writer = writer + + if payload is sentinel: + payload = mock.Mock() + + req = Request(message, payload, + protocol, writer, task, loop, + client_max_size=client_max_size) + + match_info = UrlMappingMatchInfo( + {} if match_info is sentinel else match_info, mock.Mock()) + match_info.add_app(app) + req._match_info = match_info + + return req + + +def make_mocked_coro(return_value: Any=sentinel, + raise_exception: Any=sentinel) -> Any: + """Creates a coroutine mock.""" + @asyncio.coroutine + def mock_coro(*args: Any, **kwargs: Any) -> Any: + if raise_exception is not sentinel: + raise raise_exception + return return_value + + return mock.Mock(wraps=mock_coro) diff --git a/venv/lib/python3.7/site-packages/aiohttp/tracing.py b/venv/lib/python3.7/site-packages/aiohttp/tracing.py new file mode 100644 index 0000000..84c10ed --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/tracing.py @@ -0,0 +1,369 @@ +from types import SimpleNamespace +from typing import TYPE_CHECKING, Awaitable, Callable, Type + +import attr +from multidict import CIMultiDict # noqa +from yarl import URL + +from .client_reqrep import ClientResponse +from .signals import Signal + +if TYPE_CHECKING: # pragma: no cover + from .client import ClientSession # noqa + + _Signal = Signal[Callable[['TraceConfig'], Awaitable[None]]] +else: + _Signal = Signal + + +__all__ = ( + 'TraceConfig', 'TraceRequestStartParams', 'TraceRequestEndParams', + 'TraceRequestExceptionParams', 'TraceConnectionQueuedStartParams', + 'TraceConnectionQueuedEndParams', 'TraceConnectionCreateStartParams', + 'TraceConnectionCreateEndParams', 'TraceConnectionReuseconnParams', + 'TraceDnsResolveHostStartParams', 'TraceDnsResolveHostEndParams', + 'TraceDnsCacheHitParams', 'TraceDnsCacheMissParams', + 'TraceRequestRedirectParams', + 'TraceRequestChunkSentParams', 'TraceResponseChunkReceivedParams', +) + + +class TraceConfig: + """First-class used to trace requests launched via ClientSession + objects.""" + + def __init__( + self, + trace_config_ctx_factory: Type[SimpleNamespace]=SimpleNamespace + ) -> None: + self._on_request_start = Signal(self) # type: _Signal + self._on_request_chunk_sent = Signal(self) # type: _Signal + self._on_response_chunk_received = Signal(self) # type: _Signal + self._on_request_end = Signal(self) # type: _Signal + self._on_request_exception = Signal(self) # type: _Signal + self._on_request_redirect = Signal(self) # type: _Signal + self._on_connection_queued_start = Signal(self) # type: _Signal + self._on_connection_queued_end = Signal(self) # type: _Signal + self._on_connection_create_start = Signal(self) # type: _Signal + self._on_connection_create_end = Signal(self) # type: _Signal + self._on_connection_reuseconn = Signal(self) # type: _Signal + self._on_dns_resolvehost_start = Signal(self) # type: _Signal + self._on_dns_resolvehost_end = Signal(self) # type: _Signal + self._on_dns_cache_hit = Signal(self) # type: _Signal + self._on_dns_cache_miss = Signal(self) # type: _Signal + + self._trace_config_ctx_factory = trace_config_ctx_factory # type: Type[SimpleNamespace] # noqa + + def trace_config_ctx( + self, + trace_request_ctx: SimpleNamespace=None + ) -> SimpleNamespace: # noqa + """ Return a new trace_config_ctx instance """ + return self._trace_config_ctx_factory( + trace_request_ctx=trace_request_ctx) + + def freeze(self) -> None: + self._on_request_start.freeze() + self._on_request_chunk_sent.freeze() + self._on_response_chunk_received.freeze() + self._on_request_end.freeze() + self._on_request_exception.freeze() + self._on_request_redirect.freeze() + self._on_connection_queued_start.freeze() + self._on_connection_queued_end.freeze() + self._on_connection_create_start.freeze() + self._on_connection_create_end.freeze() + self._on_connection_reuseconn.freeze() + self._on_dns_resolvehost_start.freeze() + self._on_dns_resolvehost_end.freeze() + self._on_dns_cache_hit.freeze() + self._on_dns_cache_miss.freeze() + + @property + def on_request_start(self) -> _Signal: + return self._on_request_start + + @property + def on_request_chunk_sent(self) -> _Signal: + return self._on_request_chunk_sent + + @property + def on_response_chunk_received(self) -> _Signal: + return self._on_response_chunk_received + + @property + def on_request_end(self) -> _Signal: + return self._on_request_end + + @property + def on_request_exception(self) -> _Signal: + return self._on_request_exception + + @property + def on_request_redirect(self) -> _Signal: + return self._on_request_redirect + + @property + def on_connection_queued_start(self) -> _Signal: + return self._on_connection_queued_start + + @property + def on_connection_queued_end(self) -> _Signal: + return self._on_connection_queued_end + + @property + def on_connection_create_start(self) -> _Signal: + return self._on_connection_create_start + + @property + def on_connection_create_end(self) -> _Signal: + return self._on_connection_create_end + + @property + def on_connection_reuseconn(self) -> _Signal: + return self._on_connection_reuseconn + + @property + def on_dns_resolvehost_start(self) -> _Signal: + return self._on_dns_resolvehost_start + + @property + def on_dns_resolvehost_end(self) -> _Signal: + return self._on_dns_resolvehost_end + + @property + def on_dns_cache_hit(self) -> _Signal: + return self._on_dns_cache_hit + + @property + def on_dns_cache_miss(self) -> _Signal: + return self._on_dns_cache_miss + + +@attr.s(frozen=True, slots=True) +class TraceRequestStartParams: + """ Parameters sent by the `on_request_start` signal""" + method = attr.ib(type=str) + url = attr.ib(type=URL) + headers = attr.ib(type='CIMultiDict[str]') + + +@attr.s(frozen=True, slots=True) +class TraceRequestChunkSentParams: + """ Parameters sent by the `on_request_chunk_sent` signal""" + chunk = attr.ib(type=bytes) + + +@attr.s(frozen=True, slots=True) +class TraceResponseChunkReceivedParams: + """ Parameters sent by the `on_response_chunk_received` signal""" + chunk = attr.ib(type=bytes) + + +@attr.s(frozen=True, slots=True) +class TraceRequestEndParams: + """ Parameters sent by the `on_request_end` signal""" + method = attr.ib(type=str) + url = attr.ib(type=URL) + headers = attr.ib(type='CIMultiDict[str]') + response = attr.ib(type=ClientResponse) + + +@attr.s(frozen=True, slots=True) +class TraceRequestExceptionParams: + """ Parameters sent by the `on_request_exception` signal""" + method = attr.ib(type=str) + url = attr.ib(type=URL) + headers = attr.ib(type='CIMultiDict[str]') + exception = attr.ib(type=BaseException) + + +@attr.s(frozen=True, slots=True) +class TraceRequestRedirectParams: + """ Parameters sent by the `on_request_redirect` signal""" + method = attr.ib(type=str) + url = attr.ib(type=URL) + headers = attr.ib(type='CIMultiDict[str]') + response = attr.ib(type=ClientResponse) + + +@attr.s(frozen=True, slots=True) +class TraceConnectionQueuedStartParams: + """ Parameters sent by the `on_connection_queued_start` signal""" + + +@attr.s(frozen=True, slots=True) +class TraceConnectionQueuedEndParams: + """ Parameters sent by the `on_connection_queued_end` signal""" + + +@attr.s(frozen=True, slots=True) +class TraceConnectionCreateStartParams: + """ Parameters sent by the `on_connection_create_start` signal""" + + +@attr.s(frozen=True, slots=True) +class TraceConnectionCreateEndParams: + """ Parameters sent by the `on_connection_create_end` signal""" + + +@attr.s(frozen=True, slots=True) +class TraceConnectionReuseconnParams: + """ Parameters sent by the `on_connection_reuseconn` signal""" + + +@attr.s(frozen=True, slots=True) +class TraceDnsResolveHostStartParams: + """ Parameters sent by the `on_dns_resolvehost_start` signal""" + host = attr.ib(type=str) + + +@attr.s(frozen=True, slots=True) +class TraceDnsResolveHostEndParams: + """ Parameters sent by the `on_dns_resolvehost_end` signal""" + host = attr.ib(type=str) + + +@attr.s(frozen=True, slots=True) +class TraceDnsCacheHitParams: + """ Parameters sent by the `on_dns_cache_hit` signal""" + host = attr.ib(type=str) + + +@attr.s(frozen=True, slots=True) +class TraceDnsCacheMissParams: + """ Parameters sent by the `on_dns_cache_miss` signal""" + host = attr.ib(type=str) + + +class Trace: + """ Internal class used to keep together the main dependencies used + at the moment of send a signal.""" + + def __init__(self, + session: 'ClientSession', + trace_config: TraceConfig, + trace_config_ctx: SimpleNamespace) -> None: + self._trace_config = trace_config + self._trace_config_ctx = trace_config_ctx + self._session = session + + async def send_request_start(self, + method: str, + url: URL, + headers: 'CIMultiDict[str]') -> None: + return await self._trace_config.on_request_start.send( + self._session, + self._trace_config_ctx, + TraceRequestStartParams(method, url, headers) + ) + + async def send_request_chunk_sent(self, chunk: bytes) -> None: + return await self._trace_config.on_request_chunk_sent.send( + self._session, + self._trace_config_ctx, + TraceRequestChunkSentParams(chunk) + ) + + async def send_response_chunk_received(self, chunk: bytes) -> None: + return await self._trace_config.on_response_chunk_received.send( + self._session, + self._trace_config_ctx, + TraceResponseChunkReceivedParams(chunk) + ) + + async def send_request_end(self, + method: str, + url: URL, + headers: 'CIMultiDict[str]', + response: ClientResponse) -> None: + return await self._trace_config.on_request_end.send( + self._session, + self._trace_config_ctx, + TraceRequestEndParams(method, url, headers, response) + ) + + async def send_request_exception(self, + method: str, + url: URL, + headers: 'CIMultiDict[str]', + exception: BaseException) -> None: + return await self._trace_config.on_request_exception.send( + self._session, + self._trace_config_ctx, + TraceRequestExceptionParams(method, url, headers, exception) + ) + + async def send_request_redirect(self, + method: str, + url: URL, + headers: 'CIMultiDict[str]', + response: ClientResponse) -> None: + return await self._trace_config._on_request_redirect.send( + self._session, + self._trace_config_ctx, + TraceRequestRedirectParams(method, url, headers, response) + ) + + async def send_connection_queued_start(self) -> None: + return await self._trace_config.on_connection_queued_start.send( + self._session, + self._trace_config_ctx, + TraceConnectionQueuedStartParams() + ) + + async def send_connection_queued_end(self) -> None: + return await self._trace_config.on_connection_queued_end.send( + self._session, + self._trace_config_ctx, + TraceConnectionQueuedEndParams() + ) + + async def send_connection_create_start(self) -> None: + return await self._trace_config.on_connection_create_start.send( + self._session, + self._trace_config_ctx, + TraceConnectionCreateStartParams() + ) + + async def send_connection_create_end(self) -> None: + return await self._trace_config.on_connection_create_end.send( + self._session, + self._trace_config_ctx, + TraceConnectionCreateEndParams() + ) + + async def send_connection_reuseconn(self) -> None: + return await self._trace_config.on_connection_reuseconn.send( + self._session, + self._trace_config_ctx, + TraceConnectionReuseconnParams() + ) + + async def send_dns_resolvehost_start(self, host: str) -> None: + return await self._trace_config.on_dns_resolvehost_start.send( + self._session, + self._trace_config_ctx, + TraceDnsResolveHostStartParams(host) + ) + + async def send_dns_resolvehost_end(self, host: str) -> None: + return await self._trace_config.on_dns_resolvehost_end.send( + self._session, + self._trace_config_ctx, + TraceDnsResolveHostEndParams(host) + ) + + async def send_dns_cache_hit(self, host: str) -> None: + return await self._trace_config.on_dns_cache_hit.send( + self._session, + self._trace_config_ctx, + TraceDnsCacheHitParams(host) + ) + + async def send_dns_cache_miss(self, host: str) -> None: + return await self._trace_config.on_dns_cache_miss.send( + self._session, + self._trace_config_ctx, + TraceDnsCacheMissParams(host) + ) diff --git a/venv/lib/python3.7/site-packages/aiohttp/typedefs.py b/venv/lib/python3.7/site-packages/aiohttp/typedefs.py new file mode 100644 index 0000000..d7c97a4 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/typedefs.py @@ -0,0 +1,46 @@ +import json +import os # noqa +import pathlib # noqa +import sys +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Iterable, + Mapping, + Tuple, + Union, +) + +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy +from yarl import URL + +DEFAULT_JSON_ENCODER = json.dumps +DEFAULT_JSON_DECODER = json.loads + +if TYPE_CHECKING: # pragma: no cover + _CIMultiDict = CIMultiDict[str] + _CIMultiDictProxy = CIMultiDictProxy[str] + _MultiDict = MultiDict[str] + _MultiDictProxy = MultiDictProxy[str] + from http.cookies import BaseCookie # noqa +else: + _CIMultiDict = CIMultiDict + _CIMultiDictProxy = CIMultiDictProxy + _MultiDict = MultiDict + _MultiDictProxy = MultiDictProxy + +Byteish = Union[bytes, bytearray, memoryview] +JSONEncoder = Callable[[Any], str] +JSONDecoder = Callable[[str], Any] +LooseHeaders = Union[Mapping[str, str], _CIMultiDict, _CIMultiDictProxy] +RawHeaders = Tuple[Tuple[bytes, bytes], ...] +StrOrURL = Union[str, URL] +LooseCookies = Union[Iterable[Tuple[str, 'BaseCookie[str]']], + Mapping[str, 'BaseCookie[str]'], 'BaseCookie[str]'] + + +if sys.version_info >= (3, 6): + PathLike = Union[str, 'os.PathLike[str]'] +else: + PathLike = Union[str, pathlib.PurePath] diff --git a/venv/lib/python3.7/site-packages/aiohttp/web.py b/venv/lib/python3.7/site-packages/aiohttp/web.py new file mode 100644 index 0000000..fa66658 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/web.py @@ -0,0 +1,484 @@ +import asyncio +import logging +import socket +import sys +from argparse import ArgumentParser +from collections.abc import Iterable +from importlib import import_module +from typing import Any, Awaitable, Callable, List, Optional, Type, Union, cast + +from .abc import AbstractAccessLogger +from .helpers import all_tasks +from .log import access_logger +from .web_app import Application, CleanupError +from .web_exceptions import ( + HTTPAccepted, + HTTPBadGateway, + HTTPBadRequest, + HTTPClientError, + HTTPConflict, + HTTPCreated, + HTTPError, + HTTPException, + HTTPExpectationFailed, + HTTPFailedDependency, + HTTPForbidden, + HTTPFound, + HTTPGatewayTimeout, + HTTPGone, + HTTPInsufficientStorage, + HTTPInternalServerError, + HTTPLengthRequired, + HTTPMethodNotAllowed, + HTTPMisdirectedRequest, + HTTPMovedPermanently, + HTTPMultipleChoices, + HTTPNetworkAuthenticationRequired, + HTTPNoContent, + HTTPNonAuthoritativeInformation, + HTTPNotAcceptable, + HTTPNotExtended, + HTTPNotFound, + HTTPNotImplemented, + HTTPNotModified, + HTTPOk, + HTTPPartialContent, + HTTPPaymentRequired, + HTTPPermanentRedirect, + HTTPPreconditionFailed, + HTTPPreconditionRequired, + HTTPProxyAuthenticationRequired, + HTTPRedirection, + HTTPRequestEntityTooLarge, + HTTPRequestHeaderFieldsTooLarge, + HTTPRequestRangeNotSatisfiable, + HTTPRequestTimeout, + HTTPRequestURITooLong, + HTTPResetContent, + HTTPSeeOther, + HTTPServerError, + HTTPServiceUnavailable, + HTTPSuccessful, + HTTPTemporaryRedirect, + HTTPTooManyRequests, + HTTPUnauthorized, + HTTPUnavailableForLegalReasons, + HTTPUnprocessableEntity, + HTTPUnsupportedMediaType, + HTTPUpgradeRequired, + HTTPUseProxy, + HTTPVariantAlsoNegotiates, + HTTPVersionNotSupported, +) +from .web_fileresponse import FileResponse +from .web_log import AccessLogger +from .web_middlewares import middleware, normalize_path_middleware +from .web_protocol import ( + PayloadAccessError, + RequestHandler, + RequestPayloadError, +) +from .web_request import BaseRequest, FileField, Request +from .web_response import ( + ContentCoding, + Response, + StreamResponse, + json_response, +) +from .web_routedef import ( + AbstractRouteDef, + RouteDef, + RouteTableDef, + StaticDef, + delete, + get, + head, + options, + patch, + post, + put, + route, + static, + view, +) +from .web_runner import ( + AppRunner, + BaseRunner, + BaseSite, + GracefulExit, + ServerRunner, + SockSite, + TCPSite, + UnixSite, +) +from .web_server import Server +from .web_urldispatcher import ( + AbstractResource, + AbstractRoute, + DynamicResource, + PlainResource, + Resource, + ResourceRoute, + StaticResource, + UrlDispatcher, + UrlMappingMatchInfo, + View, +) +from .web_ws import WebSocketReady, WebSocketResponse, WSMsgType + +__all__ = ( + # web_app + 'Application', + 'CleanupError', + # web_exceptions + 'HTTPAccepted', + 'HTTPBadGateway', + 'HTTPBadRequest', + 'HTTPClientError', + 'HTTPConflict', + 'HTTPCreated', + 'HTTPError', + 'HTTPException', + 'HTTPExpectationFailed', + 'HTTPFailedDependency', + 'HTTPForbidden', + 'HTTPFound', + 'HTTPGatewayTimeout', + 'HTTPGone', + 'HTTPInsufficientStorage', + 'HTTPInternalServerError', + 'HTTPLengthRequired', + 'HTTPMethodNotAllowed', + 'HTTPMisdirectedRequest', + 'HTTPMovedPermanently', + 'HTTPMultipleChoices', + 'HTTPNetworkAuthenticationRequired', + 'HTTPNoContent', + 'HTTPNonAuthoritativeInformation', + 'HTTPNotAcceptable', + 'HTTPNotExtended', + 'HTTPNotFound', + 'HTTPNotImplemented', + 'HTTPNotModified', + 'HTTPOk', + 'HTTPPartialContent', + 'HTTPPaymentRequired', + 'HTTPPermanentRedirect', + 'HTTPPreconditionFailed', + 'HTTPPreconditionRequired', + 'HTTPProxyAuthenticationRequired', + 'HTTPRedirection', + 'HTTPRequestEntityTooLarge', + 'HTTPRequestHeaderFieldsTooLarge', + 'HTTPRequestRangeNotSatisfiable', + 'HTTPRequestTimeout', + 'HTTPRequestURITooLong', + 'HTTPResetContent', + 'HTTPSeeOther', + 'HTTPServerError', + 'HTTPServiceUnavailable', + 'HTTPSuccessful', + 'HTTPTemporaryRedirect', + 'HTTPTooManyRequests', + 'HTTPUnauthorized', + 'HTTPUnavailableForLegalReasons', + 'HTTPUnprocessableEntity', + 'HTTPUnsupportedMediaType', + 'HTTPUpgradeRequired', + 'HTTPUseProxy', + 'HTTPVariantAlsoNegotiates', + 'HTTPVersionNotSupported', + # web_fileresponse + 'FileResponse', + # web_middlewares + 'middleware', + 'normalize_path_middleware', + # web_protocol + 'PayloadAccessError', + 'RequestHandler', + 'RequestPayloadError', + # web_request + 'BaseRequest', + 'FileField', + 'Request', + # web_response + 'ContentCoding', + 'Response', + 'StreamResponse', + 'json_response', + # web_routedef + 'AbstractRouteDef', + 'RouteDef', + 'RouteTableDef', + 'StaticDef', + 'delete', + 'get', + 'head', + 'options', + 'patch', + 'post', + 'put', + 'route', + 'static', + 'view', + # web_runner + 'AppRunner', + 'BaseRunner', + 'BaseSite', + 'GracefulExit', + 'ServerRunner', + 'SockSite', + 'TCPSite', + 'UnixSite', + # web_server + 'Server', + # web_urldispatcher + 'AbstractResource', + 'AbstractRoute', + 'DynamicResource', + 'PlainResource', + 'Resource', + 'ResourceRoute', + 'StaticResource', + 'UrlDispatcher', + 'UrlMappingMatchInfo', + 'View', + # web_ws + 'WebSocketReady', + 'WebSocketResponse', + 'WSMsgType', + # web + 'run_app', +) + + +try: + from ssl import SSLContext +except ImportError: # pragma: no cover + SSLContext = Any # type: ignore + + +async def _run_app(app: Union[Application, Awaitable[Application]], *, + host: Optional[str]=None, + port: Optional[int]=None, + path: Optional[str]=None, + sock: Optional[socket.socket]=None, + shutdown_timeout: float=60.0, + ssl_context: Optional[SSLContext]=None, + print: Callable[..., None]=print, + backlog: int=128, + access_log_class: Type[AbstractAccessLogger]=AccessLogger, + access_log_format: str=AccessLogger.LOG_FORMAT, + access_log: Optional[logging.Logger]=access_logger, + handle_signals: bool=True, + reuse_address: Optional[bool]=None, + reuse_port: Optional[bool]=None) -> None: + # A internal functio to actually do all dirty job for application running + if asyncio.iscoroutine(app): + app = await app # type: ignore + + app = cast(Application, app) + + runner = AppRunner(app, handle_signals=handle_signals, + access_log_class=access_log_class, + access_log_format=access_log_format, + access_log=access_log) + + await runner.setup() + + sites = [] # type: List[BaseSite] + + try: + if host is not None: + if isinstance(host, (str, bytes, bytearray, memoryview)): + sites.append(TCPSite(runner, host, port, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port)) + else: + for h in host: + sites.append(TCPSite(runner, h, port, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port)) + elif path is None and sock is None or port is not None: + sites.append(TCPSite(runner, port=port, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port)) + + if path is not None: + if isinstance(path, (str, bytes, bytearray, memoryview)): + sites.append(UnixSite(runner, path, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog)) + else: + for p in path: + sites.append(UnixSite(runner, p, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog)) + + if sock is not None: + if not isinstance(sock, Iterable): + sites.append(SockSite(runner, sock, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog)) + else: + for s in sock: + sites.append(SockSite(runner, s, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog)) + for site in sites: + await site.start() + + if print: # pragma: no branch + names = sorted(str(s.name) for s in runner.sites) + print("======== Running on {} ========\n" + "(Press CTRL+C to quit)".format(', '.join(names))) + while True: + await asyncio.sleep(3600) # sleep forever by 1 hour intervals + finally: + await runner.cleanup() + + +def _cancel_all_tasks(loop: asyncio.AbstractEventLoop) -> None: + to_cancel = all_tasks(loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete( + asyncio.gather(*to_cancel, loop=loop, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler({ + 'message': 'unhandled exception during asyncio.run() shutdown', + 'exception': task.exception(), + 'task': task, + }) + + +def run_app(app: Union[Application, Awaitable[Application]], *, + host: Optional[str]=None, + port: Optional[int]=None, + path: Optional[str]=None, + sock: Optional[socket.socket]=None, + shutdown_timeout: float=60.0, + ssl_context: Optional[SSLContext]=None, + print: Callable[..., None]=print, + backlog: int=128, + access_log_class: Type[AbstractAccessLogger]=AccessLogger, + access_log_format: str=AccessLogger.LOG_FORMAT, + access_log: Optional[logging.Logger]=access_logger, + handle_signals: bool=True, + reuse_address: Optional[bool]=None, + reuse_port: Optional[bool]=None) -> None: + """Run an app locally""" + loop = asyncio.get_event_loop() + + # Configure if and only if in debugging mode and using the default logger + if loop.get_debug() and access_log and access_log.name == 'aiohttp.access': + if access_log.level == logging.NOTSET: + access_log.setLevel(logging.DEBUG) + if not access_log.hasHandlers(): + access_log.addHandler(logging.StreamHandler()) + + try: + loop.run_until_complete(_run_app(app, + host=host, + port=port, + path=path, + sock=sock, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + print=print, + backlog=backlog, + access_log_class=access_log_class, + access_log_format=access_log_format, + access_log=access_log, + handle_signals=handle_signals, + reuse_address=reuse_address, + reuse_port=reuse_port)) + except (GracefulExit, KeyboardInterrupt): # pragma: no cover + pass + finally: + _cancel_all_tasks(loop) + if sys.version_info >= (3, 6): # don't use PY_36 to pass mypy + loop.run_until_complete(loop.shutdown_asyncgens()) + loop.close() + + +def main(argv: List[str]) -> None: + arg_parser = ArgumentParser( + description="aiohttp.web Application server", + prog="aiohttp.web" + ) + arg_parser.add_argument( + "entry_func", + help=("Callable returning the `aiohttp.web.Application` instance to " + "run. Should be specified in the 'module:function' syntax."), + metavar="entry-func" + ) + arg_parser.add_argument( + "-H", "--hostname", + help="TCP/IP hostname to serve on (default: %(default)r)", + default="localhost" + ) + arg_parser.add_argument( + "-P", "--port", + help="TCP/IP port to serve on (default: %(default)r)", + type=int, + default="8080" + ) + arg_parser.add_argument( + "-U", "--path", + help="Unix file system path to serve on. Specifying a path will cause " + "hostname and port arguments to be ignored.", + ) + args, extra_argv = arg_parser.parse_known_args(argv) + + # Import logic + mod_str, _, func_str = args.entry_func.partition(":") + if not func_str or not mod_str: + arg_parser.error( + "'entry-func' not in 'module:function' syntax" + ) + if mod_str.startswith("."): + arg_parser.error("relative module names not supported") + try: + module = import_module(mod_str) + except ImportError as ex: + arg_parser.error("unable to import %s: %s" % (mod_str, ex)) + try: + func = getattr(module, func_str) + except AttributeError: + arg_parser.error("module %r has no attribute %r" % (mod_str, func_str)) + + # Compatibility logic + if args.path is not None and not hasattr(socket, 'AF_UNIX'): + arg_parser.error("file system paths not supported by your operating" + " environment") + + logging.basicConfig(level=logging.DEBUG) + + app = func(extra_argv) + run_app(app, host=args.hostname, port=args.port, path=args.path) + arg_parser.exit(message="Stopped\n") + + +if __name__ == "__main__": # pragma: no branch + main(sys.argv[1:]) # pragma: no cover diff --git a/venv/lib/python3.7/site-packages/aiohttp/web_app.py b/venv/lib/python3.7/site-packages/aiohttp/web_app.py new file mode 100644 index 0000000..afe53f5 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/web_app.py @@ -0,0 +1,511 @@ +import asyncio +import logging +import warnings +from functools import partial +from typing import ( # noqa + TYPE_CHECKING, + Any, + AsyncIterator, + Awaitable, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from . import hdrs +from .abc import ( + AbstractAccessLogger, + AbstractMatchInfo, + AbstractRouter, + AbstractStreamWriter, +) +from .frozenlist import FrozenList +from .helpers import DEBUG +from .http_parser import RawRequestMessage +from .log import web_logger +from .signals import Signal +from .streams import StreamReader +from .web_log import AccessLogger +from .web_middlewares import _fix_request_current_app +from .web_protocol import RequestHandler +from .web_request import Request +from .web_response import StreamResponse +from .web_routedef import AbstractRouteDef +from .web_server import Server +from .web_urldispatcher import ( + AbstractResource, + Domain, + MaskDomain, + MatchedSubAppResource, + PrefixedSubAppResource, + UrlDispatcher, +) + +__all__ = ('Application', 'CleanupError') + + +if TYPE_CHECKING: # pragma: no cover + _AppSignal = Signal[Callable[['Application'], Awaitable[None]]] + _RespPrepareSignal = Signal[Callable[[Request, StreamResponse], + Awaitable[None]]] + _Handler = Callable[[Request], Awaitable[StreamResponse]] + _Middleware = Union[Callable[[Request, _Handler], + Awaitable[StreamResponse]], + Callable[['Application', _Handler], # old-style + Awaitable[_Handler]]] + _Middlewares = FrozenList[_Middleware] + _MiddlewaresHandlers = Optional[Sequence[Tuple[_Middleware, bool]]] + _Subapps = List['Application'] +else: + # No type checker mode, skip types + _AppSignal = Signal + _RespPrepareSignal = Signal + _Handler = Callable + _Middleware = Callable + _Middlewares = FrozenList + _MiddlewaresHandlers = Optional[Sequence] + _Subapps = List + + +class Application(MutableMapping[str, Any]): + ATTRS = frozenset([ + 'logger', '_debug', '_router', '_loop', '_handler_args', + '_middlewares', '_middlewares_handlers', '_run_middlewares', + '_state', '_frozen', '_pre_frozen', '_subapps', + '_on_response_prepare', '_on_startup', '_on_shutdown', + '_on_cleanup', '_client_max_size', '_cleanup_ctx']) + + def __init__(self, *, + logger: logging.Logger=web_logger, + router: Optional[UrlDispatcher]=None, + middlewares: Sequence[_Middleware]=(), + handler_args: Mapping[str, Any]=None, + client_max_size: int=1024**2, + loop: Optional[asyncio.AbstractEventLoop]=None, + debug: Any=... # mypy doesn't support ellipsis + ) -> None: + if router is None: + router = UrlDispatcher() + else: + warnings.warn("router argument is deprecated", DeprecationWarning, + stacklevel=2) + assert isinstance(router, AbstractRouter), router + + if loop is not None: + warnings.warn("loop argument is deprecated", DeprecationWarning, + stacklevel=2) + + if debug is not ...: + warnings.warn("debug argument is deprecated", + DeprecationWarning, + stacklevel=2) + self._debug = debug + self._router = router # type: UrlDispatcher + self._loop = loop + self._handler_args = handler_args + self.logger = logger + + self._middlewares = FrozenList(middlewares) # type: _Middlewares + + # initialized on freezing + self._middlewares_handlers = None # type: _MiddlewaresHandlers + # initialized on freezing + self._run_middlewares = None # type: Optional[bool] + + self._state = {} # type: Dict[str, Any] + self._frozen = False + self._pre_frozen = False + self._subapps = [] # type: _Subapps + + self._on_response_prepare = Signal(self) # type: _RespPrepareSignal + self._on_startup = Signal(self) # type: _AppSignal + self._on_shutdown = Signal(self) # type: _AppSignal + self._on_cleanup = Signal(self) # type: _AppSignal + self._cleanup_ctx = CleanupContext() + self._on_startup.append(self._cleanup_ctx._on_startup) + self._on_cleanup.append(self._cleanup_ctx._on_cleanup) + self._client_max_size = client_max_size + + def __init_subclass__(cls: Type['Application']) -> None: + warnings.warn("Inheritance class {} from web.Application " + "is discouraged".format(cls.__name__), + DeprecationWarning, + stacklevel=2) + + if DEBUG: # pragma: no cover + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn("Setting custom web.Application.{} attribute " + "is discouraged".format(name), + DeprecationWarning, + stacklevel=2) + super().__setattr__(name, val) + + # MutableMapping API + + def __eq__(self, other: object) -> bool: + return self is other + + def __getitem__(self, key: str) -> Any: + return self._state[key] + + def _check_frozen(self) -> None: + if self._frozen: + warnings.warn("Changing state of started or joined " + "application is deprecated", + DeprecationWarning, + stacklevel=3) + + def __setitem__(self, key: str, value: Any) -> None: + self._check_frozen() + self._state[key] = value + + def __delitem__(self, key: str) -> None: + self._check_frozen() + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[str]: + return iter(self._state) + + ######## + @property + def loop(self) -> asyncio.AbstractEventLoop: + # Technically the loop can be None + # but we mask it by explicit type cast + # to provide more convinient type annotation + warnings.warn("loop property is deprecated", + DeprecationWarning, + stacklevel=2) + return cast(asyncio.AbstractEventLoop, self._loop) + + def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None: + if loop is None: + loop = asyncio.get_event_loop() + if self._loop is not None and self._loop is not loop: + raise RuntimeError( + "web.Application instance initialized with different loop") + + self._loop = loop + + # set loop debug + if self._debug is ...: + self._debug = loop.get_debug() + + # set loop to sub applications + for subapp in self._subapps: + subapp._set_loop(loop) + + @property + def pre_frozen(self) -> bool: + return self._pre_frozen + + def pre_freeze(self) -> None: + if self._pre_frozen: + return + + self._pre_frozen = True + self._middlewares.freeze() + self._router.freeze() + self._on_response_prepare.freeze() + self._cleanup_ctx.freeze() + self._on_startup.freeze() + self._on_shutdown.freeze() + self._on_cleanup.freeze() + self._middlewares_handlers = tuple(self._prepare_middleware()) + + # If current app and any subapp do not have middlewares avoid run all + # of the code footprint that it implies, which have a middleware + # hardcoded per app that sets up the current_app attribute. If no + # middlewares are configured the handler will receive the proper + # current_app without needing all of this code. + self._run_middlewares = True if self.middlewares else False + + for subapp in self._subapps: + subapp.pre_freeze() + self._run_middlewares = (self._run_middlewares or + subapp._run_middlewares) + + @property + def frozen(self) -> bool: + return self._frozen + + def freeze(self) -> None: + if self._frozen: + return + + self.pre_freeze() + self._frozen = True + for subapp in self._subapps: + subapp.freeze() + + @property + def debug(self) -> bool: + warnings.warn("debug property is deprecated", + DeprecationWarning, + stacklevel=2) + return self._debug + + def _reg_subapp_signals(self, subapp: 'Application') -> None: + + def reg_handler(signame: str) -> None: + subsig = getattr(subapp, signame) + + async def handler(app: 'Application') -> None: + await subsig.send(subapp) + appsig = getattr(self, signame) + appsig.append(handler) + + reg_handler('on_startup') + reg_handler('on_shutdown') + reg_handler('on_cleanup') + + def add_subapp(self, prefix: str, + subapp: 'Application') -> AbstractResource: + if not isinstance(prefix, str): + raise TypeError("Prefix must be str") + prefix = prefix.rstrip('/') + if not prefix: + raise ValueError("Prefix cannot be empty") + factory = partial(PrefixedSubAppResource, prefix, subapp) + return self._add_subapp(factory, subapp) + + def _add_subapp(self, + resource_factory: Callable[[], AbstractResource], + subapp: 'Application') -> AbstractResource: + if self.frozen: + raise RuntimeError( + "Cannot add sub application to frozen application") + if subapp.frozen: + raise RuntimeError("Cannot add frozen application") + resource = resource_factory() + self.router.register_resource(resource) + self._reg_subapp_signals(subapp) + self._subapps.append(subapp) + subapp.pre_freeze() + if self._loop is not None: + subapp._set_loop(self._loop) + return resource + + def add_domain(self, domain: str, + subapp: 'Application') -> AbstractResource: + if not isinstance(domain, str): + raise TypeError("Domain must be str") + elif '*' in domain: + rule = MaskDomain(domain) # type: Domain + else: + rule = Domain(domain) + factory = partial(MatchedSubAppResource, rule, subapp) + return self._add_subapp(factory, subapp) + + def add_routes(self, routes: Iterable[AbstractRouteDef]) -> None: + self.router.add_routes(routes) + + @property + def on_response_prepare(self) -> _RespPrepareSignal: + return self._on_response_prepare + + @property + def on_startup(self) -> _AppSignal: + return self._on_startup + + @property + def on_shutdown(self) -> _AppSignal: + return self._on_shutdown + + @property + def on_cleanup(self) -> _AppSignal: + return self._on_cleanup + + @property + def cleanup_ctx(self) -> 'CleanupContext': + return self._cleanup_ctx + + @property + def router(self) -> UrlDispatcher: + return self._router + + @property + def middlewares(self) -> _Middlewares: + return self._middlewares + + def _make_handler(self, *, + loop: Optional[asyncio.AbstractEventLoop]=None, + access_log_class: Type[ + AbstractAccessLogger]=AccessLogger, + **kwargs: Any) -> Server: + + if not issubclass(access_log_class, AbstractAccessLogger): + raise TypeError( + 'access_log_class must be subclass of ' + 'aiohttp.abc.AbstractAccessLogger, got {}'.format( + access_log_class)) + + self._set_loop(loop) + self.freeze() + + kwargs['debug'] = self._debug + kwargs['access_log_class'] = access_log_class + if self._handler_args: + for k, v in self._handler_args.items(): + kwargs[k] = v + + return Server(self._handle, # type: ignore + request_factory=self._make_request, + loop=self._loop, **kwargs) + + def make_handler(self, *, + loop: Optional[asyncio.AbstractEventLoop]=None, + access_log_class: Type[ + AbstractAccessLogger]=AccessLogger, + **kwargs: Any) -> Server: + + warnings.warn("Application.make_handler(...) is deprecated, " + "use AppRunner API instead", + DeprecationWarning, + stacklevel=2) + + return self._make_handler(loop=loop, + access_log_class=access_log_class, + **kwargs) + + async def startup(self) -> None: + """Causes on_startup signal + + Should be called in the event loop along with the request handler. + """ + await self.on_startup.send(self) + + async def shutdown(self) -> None: + """Causes on_shutdown signal + + Should be called before cleanup() + """ + await self.on_shutdown.send(self) + + async def cleanup(self) -> None: + """Causes on_cleanup signal + + Should be called after shutdown() + """ + await self.on_cleanup.send(self) + + def _make_request(self, message: RawRequestMessage, + payload: StreamReader, + protocol: RequestHandler, + writer: AbstractStreamWriter, + task: 'asyncio.Task[None]', + _cls: Type[Request]=Request) -> Request: + return _cls( + message, payload, protocol, writer, task, + self._loop, + client_max_size=self._client_max_size) + + def _prepare_middleware(self) -> Iterator[Tuple[_Middleware, bool]]: + for m in reversed(self._middlewares): + if getattr(m, '__middleware_version__', None) == 1: + yield m, True + else: + warnings.warn('old-style middleware "{!r}" deprecated, ' + 'see #2252'.format(m), + DeprecationWarning, stacklevel=2) + yield m, False + + yield _fix_request_current_app(self), True + + async def _handle(self, request: Request) -> StreamResponse: + loop = asyncio.get_event_loop() + debug = loop.get_debug() + match_info = await self._router.resolve(request) + if debug: # pragma: no cover + if not isinstance(match_info, AbstractMatchInfo): + raise TypeError("match_info should be AbstractMatchInfo " + "instance, not {!r}".format(match_info)) + match_info.add_app(self) + + match_info.freeze() + + resp = None + request._match_info = match_info # type: ignore + expect = request.headers.get(hdrs.EXPECT) + if expect: + resp = await match_info.expect_handler(request) + await request.writer.drain() + + if resp is None: + handler = match_info.handler + + if self._run_middlewares: + for app in match_info.apps[::-1]: + for m, new_style in app._middlewares_handlers: # type: ignore # noqa + if new_style: + handler = partial(m, handler=handler) + else: + handler = await m(app, handler) # type: ignore + + resp = await handler(request) + + return resp + + def __call__(self) -> 'Application': + """gunicorn compatibility""" + return self + + def __repr__(self) -> str: + return "".format(id(self)) + + +class CleanupError(RuntimeError): + @property + def exceptions(self) -> List[BaseException]: + return self.args[1] + + +if TYPE_CHECKING: # pragma: no cover + _CleanupContextBase = FrozenList[Callable[[Application], + AsyncIterator[None]]] +else: + _CleanupContextBase = FrozenList + + +class CleanupContext(_CleanupContextBase): + + def __init__(self) -> None: + super().__init__() + self._exits = [] # type: List[AsyncIterator[None]] + + async def _on_startup(self, app: Application) -> None: + for cb in self: + it = cb(app).__aiter__() + await it.__anext__() + self._exits.append(it) + + async def _on_cleanup(self, app: Application) -> None: + errors = [] + for it in reversed(self._exits): + try: + await it.__anext__() + except StopAsyncIteration: + pass + except Exception as exc: + errors.append(exc) + else: + errors.append(RuntimeError("{!r} has more than one 'yield'" + .format(it))) + if errors: + if len(errors) == 1: + raise errors[0] + else: + raise CleanupError("Multiple errors on cleanup stage", errors) diff --git a/venv/lib/python3.7/site-packages/aiohttp/web_exceptions.py b/venv/lib/python3.7/site-packages/aiohttp/web_exceptions.py new file mode 100644 index 0000000..9768c4f --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/web_exceptions.py @@ -0,0 +1,411 @@ +import warnings +from typing import Any, Dict, Iterable, List, Optional, Set # noqa + +from .typedefs import LooseHeaders, StrOrURL +from .web_response import Response + +__all__ = ( + 'HTTPException', + 'HTTPError', + 'HTTPRedirection', + 'HTTPSuccessful', + 'HTTPOk', + 'HTTPCreated', + 'HTTPAccepted', + 'HTTPNonAuthoritativeInformation', + 'HTTPNoContent', + 'HTTPResetContent', + 'HTTPPartialContent', + 'HTTPMultipleChoices', + 'HTTPMovedPermanently', + 'HTTPFound', + 'HTTPSeeOther', + 'HTTPNotModified', + 'HTTPUseProxy', + 'HTTPTemporaryRedirect', + 'HTTPPermanentRedirect', + 'HTTPClientError', + 'HTTPBadRequest', + 'HTTPUnauthorized', + 'HTTPPaymentRequired', + 'HTTPForbidden', + 'HTTPNotFound', + 'HTTPMethodNotAllowed', + 'HTTPNotAcceptable', + 'HTTPProxyAuthenticationRequired', + 'HTTPRequestTimeout', + 'HTTPConflict', + 'HTTPGone', + 'HTTPLengthRequired', + 'HTTPPreconditionFailed', + 'HTTPRequestEntityTooLarge', + 'HTTPRequestURITooLong', + 'HTTPUnsupportedMediaType', + 'HTTPRequestRangeNotSatisfiable', + 'HTTPExpectationFailed', + 'HTTPMisdirectedRequest', + 'HTTPUnprocessableEntity', + 'HTTPFailedDependency', + 'HTTPUpgradeRequired', + 'HTTPPreconditionRequired', + 'HTTPTooManyRequests', + 'HTTPRequestHeaderFieldsTooLarge', + 'HTTPUnavailableForLegalReasons', + 'HTTPServerError', + 'HTTPInternalServerError', + 'HTTPNotImplemented', + 'HTTPBadGateway', + 'HTTPServiceUnavailable', + 'HTTPGatewayTimeout', + 'HTTPVersionNotSupported', + 'HTTPVariantAlsoNegotiates', + 'HTTPInsufficientStorage', + 'HTTPNotExtended', + 'HTTPNetworkAuthenticationRequired', +) + + +############################################################ +# HTTP Exceptions +############################################################ + +class HTTPException(Response, Exception): + + # You should set in subclasses: + # status = 200 + + status_code = -1 + empty_body = False + + __http_exception__ = True + + def __init__(self, *, + headers: Optional[LooseHeaders]=None, + reason: Optional[str]=None, + body: Any=None, + text: Optional[str]=None, + content_type: Optional[str]=None) -> None: + if body is not None: + warnings.warn( + "body argument is deprecated for http web exceptions", + DeprecationWarning) + Response.__init__(self, status=self.status_code, + headers=headers, reason=reason, + body=body, text=text, content_type=content_type) + Exception.__init__(self, self.reason) + if self.body is None and not self.empty_body: + self.text = "{}: {}".format(self.status, self.reason) + + def __bool__(self) -> bool: + return True + + +class HTTPError(HTTPException): + """Base class for exceptions with status codes in the 400s and 500s.""" + + +class HTTPRedirection(HTTPException): + """Base class for exceptions with status codes in the 300s.""" + + +class HTTPSuccessful(HTTPException): + """Base class for exceptions with status codes in the 200s.""" + + +class HTTPOk(HTTPSuccessful): + status_code = 200 + + +class HTTPCreated(HTTPSuccessful): + status_code = 201 + + +class HTTPAccepted(HTTPSuccessful): + status_code = 202 + + +class HTTPNonAuthoritativeInformation(HTTPSuccessful): + status_code = 203 + + +class HTTPNoContent(HTTPSuccessful): + status_code = 204 + empty_body = True + + +class HTTPResetContent(HTTPSuccessful): + status_code = 205 + empty_body = True + + +class HTTPPartialContent(HTTPSuccessful): + status_code = 206 + + +############################################################ +# 3xx redirection +############################################################ + + +class _HTTPMove(HTTPRedirection): + + def __init__(self, + location: StrOrURL, + *, + headers: Optional[LooseHeaders]=None, + reason: Optional[str]=None, + body: Any=None, + text: Optional[str]=None, + content_type: Optional[str]=None) -> None: + if not location: + raise ValueError("HTTP redirects need a location to redirect to.") + super().__init__(headers=headers, reason=reason, + body=body, text=text, content_type=content_type) + self.headers['Location'] = str(location) + self.location = location + + +class HTTPMultipleChoices(_HTTPMove): + status_code = 300 + + +class HTTPMovedPermanently(_HTTPMove): + status_code = 301 + + +class HTTPFound(_HTTPMove): + status_code = 302 + + +# This one is safe after a POST (the redirected location will be +# retrieved with GET): +class HTTPSeeOther(_HTTPMove): + status_code = 303 + + +class HTTPNotModified(HTTPRedirection): + # FIXME: this should include a date or etag header + status_code = 304 + empty_body = True + + +class HTTPUseProxy(_HTTPMove): + # Not a move, but looks a little like one + status_code = 305 + + +class HTTPTemporaryRedirect(_HTTPMove): + status_code = 307 + + +class HTTPPermanentRedirect(_HTTPMove): + status_code = 308 + + +############################################################ +# 4xx client error +############################################################ + + +class HTTPClientError(HTTPError): + pass + + +class HTTPBadRequest(HTTPClientError): + status_code = 400 + + +class HTTPUnauthorized(HTTPClientError): + status_code = 401 + + +class HTTPPaymentRequired(HTTPClientError): + status_code = 402 + + +class HTTPForbidden(HTTPClientError): + status_code = 403 + + +class HTTPNotFound(HTTPClientError): + status_code = 404 + + +class HTTPMethodNotAllowed(HTTPClientError): + status_code = 405 + + def __init__(self, + method: str, + allowed_methods: Iterable[str], + *, + headers: Optional[LooseHeaders]=None, + reason: Optional[str]=None, + body: Any=None, + text: Optional[str]=None, + content_type: Optional[str]=None) -> None: + allow = ','.join(sorted(allowed_methods)) + super().__init__(headers=headers, reason=reason, + body=body, text=text, content_type=content_type) + self.headers['Allow'] = allow + self.allowed_methods = set(allowed_methods) # type: Set[str] + self.method = method.upper() + + +class HTTPNotAcceptable(HTTPClientError): + status_code = 406 + + +class HTTPProxyAuthenticationRequired(HTTPClientError): + status_code = 407 + + +class HTTPRequestTimeout(HTTPClientError): + status_code = 408 + + +class HTTPConflict(HTTPClientError): + status_code = 409 + + +class HTTPGone(HTTPClientError): + status_code = 410 + + +class HTTPLengthRequired(HTTPClientError): + status_code = 411 + + +class HTTPPreconditionFailed(HTTPClientError): + status_code = 412 + + +class HTTPRequestEntityTooLarge(HTTPClientError): + status_code = 413 + + def __init__(self, + max_size: float, + actual_size: float, + **kwargs: Any) -> None: + kwargs.setdefault( + 'text', + 'Maximum request body size {} exceeded, ' + 'actual body size {}'.format(max_size, actual_size) + ) + super().__init__(**kwargs) + + +class HTTPRequestURITooLong(HTTPClientError): + status_code = 414 + + +class HTTPUnsupportedMediaType(HTTPClientError): + status_code = 415 + + +class HTTPRequestRangeNotSatisfiable(HTTPClientError): + status_code = 416 + + +class HTTPExpectationFailed(HTTPClientError): + status_code = 417 + + +class HTTPMisdirectedRequest(HTTPClientError): + status_code = 421 + + +class HTTPUnprocessableEntity(HTTPClientError): + status_code = 422 + + +class HTTPFailedDependency(HTTPClientError): + status_code = 424 + + +class HTTPUpgradeRequired(HTTPClientError): + status_code = 426 + + +class HTTPPreconditionRequired(HTTPClientError): + status_code = 428 + + +class HTTPTooManyRequests(HTTPClientError): + status_code = 429 + + +class HTTPRequestHeaderFieldsTooLarge(HTTPClientError): + status_code = 431 + + +class HTTPUnavailableForLegalReasons(HTTPClientError): + status_code = 451 + + def __init__(self, + link: str, + *, + headers: Optional[LooseHeaders]=None, + reason: Optional[str]=None, + body: Any=None, + text: Optional[str]=None, + content_type: Optional[str]=None) -> None: + super().__init__(headers=headers, reason=reason, + body=body, text=text, content_type=content_type) + self.headers['Link'] = '<%s>; rel="blocked-by"' % link + self.link = link + + +############################################################ +# 5xx Server Error +############################################################ +# Response status codes beginning with the digit "5" indicate cases in +# which the server is aware that it has erred or is incapable of +# performing the request. Except when responding to a HEAD request, the +# server SHOULD include an entity containing an explanation of the error +# situation, and whether it is a temporary or permanent condition. User +# agents SHOULD display any included entity to the user. These response +# codes are applicable to any request method. + + +class HTTPServerError(HTTPError): + pass + + +class HTTPInternalServerError(HTTPServerError): + status_code = 500 + + +class HTTPNotImplemented(HTTPServerError): + status_code = 501 + + +class HTTPBadGateway(HTTPServerError): + status_code = 502 + + +class HTTPServiceUnavailable(HTTPServerError): + status_code = 503 + + +class HTTPGatewayTimeout(HTTPServerError): + status_code = 504 + + +class HTTPVersionNotSupported(HTTPServerError): + status_code = 505 + + +class HTTPVariantAlsoNegotiates(HTTPServerError): + status_code = 506 + + +class HTTPInsufficientStorage(HTTPServerError): + status_code = 507 + + +class HTTPNotExtended(HTTPServerError): + status_code = 510 + + +class HTTPNetworkAuthenticationRequired(HTTPServerError): + status_code = 511 diff --git a/venv/lib/python3.7/site-packages/aiohttp/web_fileresponse.py b/venv/lib/python3.7/site-packages/aiohttp/web_fileresponse.py new file mode 100644 index 0000000..97c4205 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/web_fileresponse.py @@ -0,0 +1,344 @@ +import asyncio +import mimetypes +import os +import pathlib +from functools import partial +from typing import ( # noqa + IO, + TYPE_CHECKING, + Any, + Awaitable, + Callable, + List, + Optional, + Union, + cast, +) + +from . import hdrs +from .abc import AbstractStreamWriter +from .base_protocol import BaseProtocol +from .helpers import set_exception, set_result +from .http_writer import StreamWriter +from .log import server_logger +from .typedefs import LooseHeaders +from .web_exceptions import ( + HTTPNotModified, + HTTPOk, + HTTPPartialContent, + HTTPPreconditionFailed, + HTTPRequestRangeNotSatisfiable, +) +from .web_response import StreamResponse + +__all__ = ('FileResponse',) + +if TYPE_CHECKING: # pragma: no cover + from .web_request import BaseRequest # noqa + + +_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] + + +NOSENDFILE = bool(os.environ.get("AIOHTTP_NOSENDFILE")) + + +class SendfileStreamWriter(StreamWriter): + + def __init__(self, + protocol: BaseProtocol, + loop: asyncio.AbstractEventLoop, + fobj: IO[Any], + count: int, + on_chunk_sent: _T_OnChunkSent=None) -> None: + super().__init__(protocol, loop, on_chunk_sent) + self._sendfile_buffer = [] # type: List[bytes] + self._fobj = fobj + self._count = count + self._offset = fobj.tell() + self._in_fd = fobj.fileno() + + def _write(self, chunk: bytes) -> None: + # we overwrite StreamWriter._write, so nothing can be appended to + # _buffer, and nothing is written to the transport directly by the + # parent class + self.output_size += len(chunk) + self._sendfile_buffer.append(chunk) + + def _sendfile_cb(self, fut: 'asyncio.Future[None]', out_fd: int) -> None: + if fut.cancelled(): + return + try: + if self._do_sendfile(out_fd): + set_result(fut, None) + except Exception as exc: + set_exception(fut, exc) + + def _do_sendfile(self, out_fd: int) -> bool: + try: + n = os.sendfile(out_fd, + self._in_fd, + self._offset, + self._count) + if n == 0: # in_fd EOF reached + n = self._count + except (BlockingIOError, InterruptedError): + n = 0 + self.output_size += n + self._offset += n + self._count -= n + assert self._count >= 0 + return self._count == 0 + + def _done_fut(self, out_fd: int, fut: 'asyncio.Future[None]') -> None: + self.loop.remove_writer(out_fd) + + async def sendfile(self) -> None: + assert self.transport is not None + out_socket = self.transport.get_extra_info('socket').dup() + out_socket.setblocking(False) + out_fd = out_socket.fileno() + + loop = self.loop + data = b''.join(self._sendfile_buffer) + try: + await loop.sock_sendall(out_socket, data) + if not self._do_sendfile(out_fd): + fut = loop.create_future() + fut.add_done_callback(partial(self._done_fut, out_fd)) + loop.add_writer(out_fd, self._sendfile_cb, fut, out_fd) + await fut + except asyncio.CancelledError: + raise + except Exception: + server_logger.debug('Socket error') + self.transport.close() + finally: + out_socket.close() + + await super().write_eof() + + async def write_eof(self, chunk: bytes=b'') -> None: + pass + + +class FileResponse(StreamResponse): + """A response object can be used to send files.""" + + def __init__(self, path: Union[str, pathlib.Path], + chunk_size: int=256*1024, + status: int=200, + reason: Optional[str]=None, + headers: Optional[LooseHeaders]=None) -> None: + super().__init__(status=status, reason=reason, headers=headers) + + if isinstance(path, str): + path = pathlib.Path(path) + + self._path = path + self._chunk_size = chunk_size + + async def _sendfile_system(self, request: 'BaseRequest', + fobj: IO[Any], + count: int) -> AbstractStreamWriter: + # Write count bytes of fobj to resp using + # the os.sendfile system call. + # + # For details check + # https://github.com/KeepSafe/aiohttp/issues/1177 + # See https://github.com/KeepSafe/aiohttp/issues/958 for details + # + # request should be an aiohttp.web.Request instance. + # fobj should be an open file object. + # count should be an integer > 0. + + transport = request.transport + assert transport is not None + if (transport.get_extra_info("sslcontext") or + transport.get_extra_info("socket") is None or + self.compression): + writer = await self._sendfile_fallback(request, fobj, count) + else: + writer = SendfileStreamWriter( + request.protocol, + request._loop, + fobj, + count + ) + request._payload_writer = writer + + await super().prepare(request) + await writer.sendfile() + + return writer + + async def _sendfile_fallback(self, request: 'BaseRequest', + fobj: IO[Any], + count: int) -> AbstractStreamWriter: + # Mimic the _sendfile_system() method, but without using the + # os.sendfile() system call. This should be used on systems + # that don't support the os.sendfile(). + + # To keep memory usage low,fobj is transferred in chunks + # controlled by the constructor's chunk_size argument. + + writer = await super().prepare(request) + assert writer is not None + + chunk_size = self._chunk_size + loop = asyncio.get_event_loop() + + chunk = await loop.run_in_executor(None, fobj.read, chunk_size) + while chunk: + await writer.write(chunk) + count = count - chunk_size + if count <= 0: + break + chunk = await loop.run_in_executor( + None, fobj.read, min(chunk_size, count) + ) + + await writer.drain() + return writer + + if hasattr(os, "sendfile") and not NOSENDFILE: # pragma: no cover + _sendfile = _sendfile_system + else: # pragma: no cover + _sendfile = _sendfile_fallback + + async def prepare( + self, + request: 'BaseRequest' + ) -> Optional[AbstractStreamWriter]: + filepath = self._path + + gzip = False + if 'gzip' in request.headers.get(hdrs.ACCEPT_ENCODING, ''): + gzip_path = filepath.with_name(filepath.name + '.gz') + + if gzip_path.is_file(): + filepath = gzip_path + gzip = True + + loop = asyncio.get_event_loop() + st = await loop.run_in_executor(None, filepath.stat) + + modsince = request.if_modified_since + if modsince is not None and st.st_mtime <= modsince.timestamp(): + self.set_status(HTTPNotModified.status_code) + self._length_check = False + # Delete any Content-Length headers provided by user. HTTP 304 + # should always have empty response body + return await super().prepare(request) + + unmodsince = request.if_unmodified_since + if unmodsince is not None and st.st_mtime > unmodsince.timestamp(): + self.set_status(HTTPPreconditionFailed.status_code) + return await super().prepare(request) + + if hdrs.CONTENT_TYPE not in self.headers: + ct, encoding = mimetypes.guess_type(str(filepath)) + if not ct: + ct = 'application/octet-stream' + should_set_ct = True + else: + encoding = 'gzip' if gzip else None + should_set_ct = False + + status = HTTPOk.status_code + file_size = st.st_size + count = file_size + + start = None + + ifrange = request.if_range + if ifrange is None or st.st_mtime <= ifrange.timestamp(): + # If-Range header check: + # condition = cached date >= last modification date + # return 206 if True else 200. + # if False: + # Range header would not be processed, return 200 + # if True but Range header missing + # return 200 + try: + rng = request.http_range + start = rng.start + end = rng.stop + except ValueError: + # https://tools.ietf.org/html/rfc7233: + # A server generating a 416 (Range Not Satisfiable) response to + # a byte-range request SHOULD send a Content-Range header field + # with an unsatisfied-range value. + # The complete-length in a 416 response indicates the current + # length of the selected representation. + # + # Will do the same below. Many servers ignore this and do not + # send a Content-Range header with HTTP 416 + self.headers[hdrs.CONTENT_RANGE] = 'bytes */{0}'.format( + file_size) + self.set_status(HTTPRequestRangeNotSatisfiable.status_code) + return await super().prepare(request) + + # If a range request has been made, convert start, end slice + # notation into file pointer offset and count + if start is not None or end is not None: + if start < 0 and end is None: # return tail of file + start += file_size + if start < 0: + # if Range:bytes=-1000 in request header but file size + # is only 200, there would be trouble without this + start = 0 + count = file_size - start + else: + # rfc7233:If the last-byte-pos value is + # absent, or if the value is greater than or equal to + # the current length of the representation data, + # the byte range is interpreted as the remainder + # of the representation (i.e., the server replaces the + # value of last-byte-pos with a value that is one less than + # the current length of the selected representation). + count = min(end if end is not None else file_size, + file_size) - start + + if start >= file_size: + # HTTP 416 should be returned in this case. + # + # According to https://tools.ietf.org/html/rfc7233: + # If a valid byte-range-set includes at least one + # byte-range-spec with a first-byte-pos that is less than + # the current length of the representation, or at least one + # suffix-byte-range-spec with a non-zero suffix-length, + # then the byte-range-set is satisfiable. Otherwise, the + # byte-range-set is unsatisfiable. + self.headers[hdrs.CONTENT_RANGE] = 'bytes */{0}'.format( + file_size) + self.set_status(HTTPRequestRangeNotSatisfiable.status_code) + return await super().prepare(request) + + status = HTTPPartialContent.status_code + # Even though you are sending the whole file, you should still + # return a HTTP 206 for a Range request. + + self.set_status(status) + if should_set_ct: + self.content_type = ct # type: ignore + if encoding: + self.headers[hdrs.CONTENT_ENCODING] = encoding + if gzip: + self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING + self.last_modified = st.st_mtime # type: ignore + self.content_length = count + + self.headers[hdrs.ACCEPT_RANGES] = 'bytes' + + real_start = cast(int, start) + + if status == HTTPPartialContent.status_code: + self.headers[hdrs.CONTENT_RANGE] = 'bytes {0}-{1}/{2}'.format( + real_start, real_start + count - 1, file_size) + + with (await loop.run_in_executor(None, filepath.open, 'rb')) as fobj: + if start: # be aware that start could be None or int=0 here. + await loop.run_in_executor(None, fobj.seek, start) + + return await self._sendfile(request, fobj, count) diff --git a/venv/lib/python3.7/site-packages/aiohttp/web_log.py b/venv/lib/python3.7/site-packages/aiohttp/web_log.py new file mode 100644 index 0000000..a1a4576 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/web_log.py @@ -0,0 +1,235 @@ +import datetime +import functools +import logging +import os +import re +from collections import namedtuple +from typing import Callable, Dict, Iterable, List, Tuple # noqa + +from .abc import AbstractAccessLogger +from .web_request import BaseRequest +from .web_response import StreamResponse + +KeyMethod = namedtuple('KeyMethod', 'key method') + + +class AccessLogger(AbstractAccessLogger): + """Helper object to log access. + + Usage: + log = logging.getLogger("spam") + log_format = "%a %{User-Agent}i" + access_logger = AccessLogger(log, log_format) + access_logger.log(request, response, time) + + Format: + %% The percent sign + %a Remote IP-address (IP-address of proxy if using reverse proxy) + %t Time when the request was started to process + %P The process ID of the child that serviced the request + %r First line of request + %s Response status code + %b Size of response in bytes, including HTTP headers + %T Time taken to serve the request, in seconds + %Tf Time taken to serve the request, in seconds with floating fraction + in .06f format + %D Time taken to serve the request, in microseconds + %{FOO}i request.headers['FOO'] + %{FOO}o response.headers['FOO'] + %{FOO}e os.environ['FOO'] + + """ + LOG_FORMAT_MAP = { + 'a': 'remote_address', + 't': 'request_start_time', + 'P': 'process_id', + 'r': 'first_request_line', + 's': 'response_status', + 'b': 'response_size', + 'T': 'request_time', + 'Tf': 'request_time_frac', + 'D': 'request_time_micro', + 'i': 'request_header', + 'o': 'response_header', + } + + LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"' + FORMAT_RE = re.compile(r'%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)') + CLEANUP_RE = re.compile(r'(%[^s])') + _FORMAT_CACHE = {} # type: Dict[str, Tuple[str, List[KeyMethod]]] + + def __init__(self, logger: logging.Logger, + log_format: str=LOG_FORMAT) -> None: + """Initialise the logger. + + logger is a logger object to be used for logging. + log_format is a string with apache compatible log format description. + + """ + super().__init__(logger, log_format=log_format) + + _compiled_format = AccessLogger._FORMAT_CACHE.get(log_format) + if not _compiled_format: + _compiled_format = self.compile_format(log_format) + AccessLogger._FORMAT_CACHE[log_format] = _compiled_format + + self._log_format, self._methods = _compiled_format + + def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]: + """Translate log_format into form usable by modulo formatting + + All known atoms will be replaced with %s + Also methods for formatting of those atoms will be added to + _methods in appropriate order + + For example we have log_format = "%a %t" + This format will be translated to "%s %s" + Also contents of _methods will be + [self._format_a, self._format_t] + These method will be called and results will be passed + to translated string format. + + Each _format_* method receive 'args' which is list of arguments + given to self.log + + Exceptions are _format_e, _format_i and _format_o methods which + also receive key name (by functools.partial) + + """ + # list of (key, method) tuples, we don't use an OrderedDict as users + # can repeat the same key more than once + methods = list() + + for atom in self.FORMAT_RE.findall(log_format): + if atom[1] == '': + format_key1 = self.LOG_FORMAT_MAP[atom[0]] + m = getattr(AccessLogger, '_format_%s' % atom[0]) + key_method = KeyMethod(format_key1, m) + else: + format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1]) + m = getattr(AccessLogger, '_format_%s' % atom[2]) + key_method = KeyMethod(format_key2, + functools.partial(m, atom[1])) + + methods.append(key_method) + + log_format = self.FORMAT_RE.sub(r'%s', log_format) + log_format = self.CLEANUP_RE.sub(r'%\1', log_format) + return log_format, methods + + @staticmethod + def _format_i(key: str, + request: BaseRequest, + response: StreamResponse, + time: float) -> str: + if request is None: + return '(no headers)' + + # suboptimal, make istr(key) once + return request.headers.get(key, '-') + + @staticmethod + def _format_o(key: str, + request: BaseRequest, + response: StreamResponse, + time: float) -> str: + # suboptimal, make istr(key) once + return response.headers.get(key, '-') + + @staticmethod + def _format_a(request: BaseRequest, + response: StreamResponse, + time: float) -> str: + if request is None: + return '-' + ip = request.remote + return ip if ip is not None else '-' + + @staticmethod + def _format_t(request: BaseRequest, + response: StreamResponse, + time: float) -> str: + now = datetime.datetime.utcnow() + start_time = now - datetime.timedelta(seconds=time) + return start_time.strftime('[%d/%b/%Y:%H:%M:%S +0000]') + + @staticmethod + def _format_P(request: BaseRequest, + response: StreamResponse, + time: float) -> str: + return "<%s>" % os.getpid() + + @staticmethod + def _format_r(request: BaseRequest, + response: StreamResponse, + time: float) -> str: + if request is None: + return '-' + return '%s %s HTTP/%s.%s' % (request.method, request.path_qs, + request.version.major, + request.version.minor) + + @staticmethod + def _format_s(request: BaseRequest, + response: StreamResponse, + time: float) -> int: + return response.status + + @staticmethod + def _format_b(request: BaseRequest, + response: StreamResponse, + time: float) -> int: + return response.body_length + + @staticmethod + def _format_T(request: BaseRequest, + response: StreamResponse, + time: float) -> str: + return str(round(time)) + + @staticmethod + def _format_Tf(request: BaseRequest, + response: StreamResponse, + time: float) -> str: + return '%06f' % time + + @staticmethod + def _format_D(request: BaseRequest, + response: StreamResponse, + time: float) -> str: + return str(round(time * 1000000)) + + def _format_line(self, + request: BaseRequest, + response: StreamResponse, + time: float) -> Iterable[Tuple[str, + Callable[[BaseRequest, + StreamResponse, + float], + str]]]: + return [(key, method(request, response, time)) + for key, method in self._methods] + + def log(self, + request: BaseRequest, + response: StreamResponse, + time: float) -> None: + try: + fmt_info = self._format_line(request, response, time) + + values = list() + extra = dict() + for key, value in fmt_info: + values.append(value) + + if key.__class__ is str: + extra[key] = value + else: + k1, k2 = key + dct = extra.get(k1, {}) + dct[k2] = value # type: ignore + extra[k1] = dct # type: ignore + + self.logger.info(self._log_format % tuple(values), extra=extra) + except Exception: + self.logger.exception("Error in logging") diff --git a/venv/lib/python3.7/site-packages/aiohttp/web_middlewares.py b/venv/lib/python3.7/site-packages/aiohttp/web_middlewares.py new file mode 100644 index 0000000..7c2e7a4 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/web_middlewares.py @@ -0,0 +1,120 @@ +import re +from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, Type, TypeVar + +from .web_exceptions import HTTPMovedPermanently, _HTTPMove +from .web_request import Request +from .web_response import StreamResponse +from .web_urldispatcher import SystemRoute + +__all__ = ( + 'middleware', + 'normalize_path_middleware', +) + +if TYPE_CHECKING: # pragma: no cover + from .web_app import Application # noqa + +_Func = TypeVar('_Func') + + +async def _check_request_resolves(request: Request, + path: str) -> Tuple[bool, Request]: + alt_request = request.clone(rel_url=path) + + match_info = await request.app.router.resolve(alt_request) + alt_request._match_info = match_info # type: ignore + + if match_info.http_exception is None: + return True, alt_request + + return False, request + + +def middleware(f: _Func) -> _Func: + f.__middleware_version__ = 1 # type: ignore + return f + + +_Handler = Callable[[Request], Awaitable[StreamResponse]] +_Middleware = Callable[[Request, _Handler], Awaitable[StreamResponse]] + + +def normalize_path_middleware( + *, append_slash: bool=True, remove_slash: bool=False, + merge_slashes: bool=True, + redirect_class: Type[_HTTPMove]=HTTPMovedPermanently) -> _Middleware: + """ + Middleware factory which produces a middleware that normalizes + the path of a request. By normalizing it means: + + - Add or remove a trailing slash to the path. + - Double slashes are replaced by one. + + The middleware returns as soon as it finds a path that resolves + correctly. The order if both merge and append/remove are enabled is + 1) merge slashes + 2) append/remove slash + 3) both merge slashes and append/remove slash. + If the path resolves with at least one of those conditions, it will + redirect to the new path. + + Only one of `append_slash` and `remove_slash` can be enabled. If both + are `True` the factory will raise an assertion error + + If `append_slash` is `True` the middleware will append a slash when + needed. If a resource is defined with trailing slash and the request + comes without it, it will append it automatically. + + If `remove_slash` is `True`, `append_slash` must be `False`. When enabled + the middleware will remove trailing slashes and redirect if the resource + is defined + + If merge_slashes is True, merge multiple consecutive slashes in the + path into one. + """ + + correct_configuration = not (append_slash and remove_slash) + assert correct_configuration, "Cannot both remove and append slash" + + @middleware + async def impl(request: Request, handler: _Handler) -> StreamResponse: + if isinstance(request.match_info.route, SystemRoute): + paths_to_check = [] + if '?' in request.raw_path: + path, query = request.raw_path.split('?', 1) + query = '?' + query + else: + query = '' + path = request.raw_path + + if merge_slashes: + paths_to_check.append(re.sub('//+', '/', path)) + if append_slash and not request.path.endswith('/'): + paths_to_check.append(path + '/') + if remove_slash and request.path.endswith('/'): + paths_to_check.append(path[:-1]) + if merge_slashes and append_slash: + paths_to_check.append( + re.sub('//+', '/', path + '/')) + if merge_slashes and remove_slash: + merged_slashes = re.sub('//+', '/', path) + paths_to_check.append(merged_slashes[:-1]) + + for path in paths_to_check: + resolves, request = await _check_request_resolves( + request, path) + if resolves: + raise redirect_class(request.raw_path + query) + + return await handler(request) + + return impl + + +def _fix_request_current_app(app: 'Application') -> _Middleware: + + @middleware + async def impl(request: Request, handler: _Handler) -> StreamResponse: + with request.match_info.set_current_app(app): + return await handler(request) + return impl diff --git a/venv/lib/python3.7/site-packages/aiohttp/web_protocol.py b/venv/lib/python3.7/site-packages/aiohttp/web_protocol.py new file mode 100644 index 0000000..c736a78 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/web_protocol.py @@ -0,0 +1,582 @@ +import asyncio +import asyncio.streams +import traceback +import warnings +from collections import deque +from contextlib import suppress +from html import escape as html_escape +from http import HTTPStatus +from logging import Logger +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Optional, + Type, + cast, +) + +import yarl + +from .abc import AbstractAccessLogger, AbstractStreamWriter +from .base_protocol import BaseProtocol +from .helpers import CeilTimeout, current_task +from .http import ( + HttpProcessingError, + HttpRequestParser, + HttpVersion10, + RawRequestMessage, + StreamWriter, +) +from .log import access_logger, server_logger +from .streams import EMPTY_PAYLOAD, StreamReader +from .tcp_helpers import tcp_keepalive +from .web_exceptions import HTTPException +from .web_log import AccessLogger +from .web_request import BaseRequest +from .web_response import Response, StreamResponse + +__all__ = ('RequestHandler', 'RequestPayloadError', 'PayloadAccessError') + +if TYPE_CHECKING: # pragma: no cover + from .web_server import Server # noqa + + +_RequestFactory = Callable[[RawRequestMessage, + StreamReader, + 'RequestHandler', + AbstractStreamWriter, + 'asyncio.Task[None]'], + BaseRequest] + +_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]] + + +ERROR = RawRequestMessage( + 'UNKNOWN', '/', HttpVersion10, {}, + {}, True, False, False, False, yarl.URL('/')) + + +class RequestPayloadError(Exception): + """Payload parsing error.""" + + +class PayloadAccessError(Exception): + """Payload was accessed after response was sent.""" + + +class RequestHandler(BaseProtocol): + """HTTP protocol implementation. + + RequestHandler handles incoming HTTP request. It reads request line, + request headers and request payload and calls handle_request() method. + By default it always returns with 404 response. + + RequestHandler handles errors in incoming request, like bad + status line, bad headers or incomplete payload. If any error occurs, + connection gets closed. + + :param keepalive_timeout: number of seconds before closing + keep-alive connection + :type keepalive_timeout: int or None + + :param bool tcp_keepalive: TCP keep-alive is on, default is on + + :param bool debug: enable debug mode + + :param logger: custom logger object + :type logger: aiohttp.log.server_logger + + :param access_log_class: custom class for access_logger + :type access_log_class: aiohttp.abc.AbstractAccessLogger + + :param access_log: custom logging object + :type access_log: aiohttp.log.server_logger + + :param str access_log_format: access log format string + + :param loop: Optional event loop + + :param int max_line_size: Optional maximum header line size + + :param int max_field_size: Optional maximum header field size + + :param int max_headers: Optional maximum header size + + """ + KEEPALIVE_RESCHEDULE_DELAY = 1 + + __slots__ = ('_request_count', '_keep_alive', '_manager', + '_request_handler', '_request_factory', '_tcp_keepalive', + '_keepalive_time', '_keepalive_handle', '_keepalive_timeout', + '_lingering_time', '_messages', '_message_tail', + '_waiter', '_error_handler', '_task_handler', + '_upgrade', '_payload_parser', '_request_parser', + '_reading_paused', 'logger', 'debug', 'access_log', + 'access_logger', '_close', '_force_close') + + def __init__(self, manager: 'Server', *, + loop: asyncio.AbstractEventLoop, + keepalive_timeout: float=75., # NGINX default is 75 secs + tcp_keepalive: bool=True, + logger: Logger=server_logger, + access_log_class: Type[AbstractAccessLogger]=AccessLogger, + access_log: Logger=access_logger, + access_log_format: str=AccessLogger.LOG_FORMAT, + debug: bool=False, + max_line_size: int=8190, + max_headers: int=32768, + max_field_size: int=8190, + lingering_time: float=10.0): + + super().__init__(loop) + + self._request_count = 0 + self._keepalive = False + self._manager = manager # type: Optional[Server] + self._request_handler = manager.request_handler # type: Optional[_RequestHandler] # noqa + self._request_factory = manager.request_factory # type: Optional[_RequestFactory] # noqa + + self._tcp_keepalive = tcp_keepalive + # placeholder to be replaced on keepalive timeout setup + self._keepalive_time = 0.0 + self._keepalive_handle = None # type: Optional[asyncio.Handle] + self._keepalive_timeout = keepalive_timeout + self._lingering_time = float(lingering_time) + + self._messages = deque() # type: Any # Python 3.5 has no typing.Deque + self._message_tail = b'' + + self._waiter = None # type: Optional[asyncio.Future[None]] + self._error_handler = None # type: Optional[asyncio.Task[None]] + self._task_handler = None # type: Optional[asyncio.Task[None]] + + self._upgrade = False + self._payload_parser = None # type: Any + self._request_parser = HttpRequestParser( + self, loop, + max_line_size=max_line_size, + max_field_size=max_field_size, + max_headers=max_headers, + payload_exception=RequestPayloadError) # type: Optional[HttpRequestParser] # noqa + + self.logger = logger + self.debug = debug + self.access_log = access_log + if access_log: + self.access_logger = access_log_class( + access_log, access_log_format) # type: Optional[AbstractAccessLogger] # noqa + else: + self.access_logger = None + + self._close = False + self._force_close = False + + def __repr__(self) -> str: + return "<{} {}>".format( + self.__class__.__name__, + 'connected' if self.transport is not None else 'disconnected') + + @property + def keepalive_timeout(self) -> float: + return self._keepalive_timeout + + async def shutdown(self, timeout: Optional[float]=15.0) -> None: + """Worker process is about to exit, we need cleanup everything and + stop accepting requests. It is especially important for keep-alive + connections.""" + self._force_close = True + + if self._keepalive_handle is not None: + self._keepalive_handle.cancel() + + if self._waiter: + self._waiter.cancel() + + # wait for handlers + with suppress(asyncio.CancelledError, asyncio.TimeoutError): + with CeilTimeout(timeout, loop=self._loop): + if (self._error_handler is not None and + not self._error_handler.done()): + await self._error_handler + + if (self._task_handler is not None and + not self._task_handler.done()): + await self._task_handler + + # force-close non-idle handler + if self._task_handler is not None: + self._task_handler.cancel() + + if self.transport is not None: + self.transport.close() + self.transport = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + super().connection_made(transport) + + real_transport = cast(asyncio.Transport, transport) + if self._tcp_keepalive: + tcp_keepalive(real_transport) + + self._task_handler = self._loop.create_task(self.start()) + assert self._manager is not None + self._manager.connection_made(self, real_transport) + + def connection_lost(self, exc: Optional[BaseException]) -> None: + if self._manager is None: + return + self._manager.connection_lost(self, exc) + + super().connection_lost(exc) + + self._manager = None + self._force_close = True + self._request_factory = None + self._request_handler = None + self._request_parser = None + + if self._keepalive_handle is not None: + self._keepalive_handle.cancel() + + if self._task_handler is not None: + self._task_handler.cancel() + + if self._error_handler is not None: + self._error_handler.cancel() + + self._task_handler = None + + if self._payload_parser is not None: + self._payload_parser.feed_eof() + self._payload_parser = None + + def set_parser(self, parser: Any) -> None: + # Actual type is WebReader + assert self._payload_parser is None + + self._payload_parser = parser + + if self._message_tail: + self._payload_parser.feed_data(self._message_tail) + self._message_tail = b'' + + def eof_received(self) -> None: + pass + + def data_received(self, data: bytes) -> None: + if self._force_close or self._close: + return + # parse http messages + if self._payload_parser is None and not self._upgrade: + assert self._request_parser is not None + try: + messages, upgraded, tail = self._request_parser.feed_data(data) + except HttpProcessingError as exc: + # something happened during parsing + self._error_handler = self._loop.create_task( + self.handle_parse_error( + StreamWriter(self, self._loop), + 400, exc, exc.message)) + self.close() + except Exception as exc: + # 500: internal error + self._error_handler = self._loop.create_task( + self.handle_parse_error( + StreamWriter(self, self._loop), + 500, exc)) + self.close() + else: + if messages: + # sometimes the parser returns no messages + for (msg, payload) in messages: + self._request_count += 1 + self._messages.append((msg, payload)) + + waiter = self._waiter + if waiter is not None: + if not waiter.done(): + # don't set result twice + waiter.set_result(None) + + self._upgrade = upgraded + if upgraded and tail: + self._message_tail = tail + + # no parser, just store + elif self._payload_parser is None and self._upgrade and data: + self._message_tail += data + + # feed payload + elif data: + eof, tail = self._payload_parser.feed_data(data) + if eof: + self.close() + + def keep_alive(self, val: bool) -> None: + """Set keep-alive connection mode. + + :param bool val: new state. + """ + self._keepalive = val + if self._keepalive_handle: + self._keepalive_handle.cancel() + self._keepalive_handle = None + + def close(self) -> None: + """Stop accepting new pipelinig messages and close + connection when handlers done processing messages""" + self._close = True + if self._waiter: + self._waiter.cancel() + + def force_close(self) -> None: + """Force close connection""" + self._force_close = True + if self._waiter: + self._waiter.cancel() + if self.transport is not None: + self.transport.close() + self.transport = None + + def log_access(self, + request: BaseRequest, + response: StreamResponse, + time: float) -> None: + if self.access_logger is not None: + self.access_logger.log(request, response, time) + + def log_debug(self, *args: Any, **kw: Any) -> None: + if self.debug: + self.logger.debug(*args, **kw) + + def log_exception(self, *args: Any, **kw: Any) -> None: + self.logger.exception(*args, **kw) + + def _process_keepalive(self) -> None: + if self._force_close or not self._keepalive: + return + + next = self._keepalive_time + self._keepalive_timeout + + # handler in idle state + if self._waiter: + if self._loop.time() > next: + self.force_close() + return + + # not all request handlers are done, + # reschedule itself to next second + self._keepalive_handle = self._loop.call_later( + self.KEEPALIVE_RESCHEDULE_DELAY, self._process_keepalive) + + async def start(self) -> None: + """Process incoming request. + + It reads request line, request headers and request payload, then + calls handle_request() method. Subclass has to override + handle_request(). start() handles various exceptions in request + or response handling. Connection is being closed always unless + keep_alive(True) specified. + """ + loop = self._loop + handler = self._task_handler + assert handler is not None + manager = self._manager + assert manager is not None + keepalive_timeout = self._keepalive_timeout + resp = None + assert self._request_factory is not None + assert self._request_handler is not None + + while not self._force_close: + if not self._messages: + try: + # wait for next request + self._waiter = loop.create_future() + await self._waiter + except asyncio.CancelledError: + break + finally: + self._waiter = None + + message, payload = self._messages.popleft() + + if self.access_log: + now = loop.time() + + manager.requests_count += 1 + writer = StreamWriter(self, loop) + request = self._request_factory( + message, payload, self, writer, handler) + try: + try: + # a new task is used for copy context vars (#3406) + task = self._loop.create_task( + self._request_handler(request)) + resp = await task + except HTTPException as exc: + resp = exc + except asyncio.CancelledError: + self.log_debug('Ignored premature client disconnection') + break + except asyncio.TimeoutError as exc: + self.log_debug('Request handler timed out.', exc_info=exc) + resp = self.handle_error(request, 504) + except Exception as exc: + resp = self.handle_error(request, 500, exc) + else: + # Deprecation warning (See #2415) + if getattr(resp, '__http_exception__', False): + warnings.warn( + "returning HTTPException object is deprecated " + "(#2415) and will be removed, " + "please raise the exception instead", + DeprecationWarning) + + if self.debug: + if not isinstance(resp, StreamResponse): + if resp is None: + raise RuntimeError("Missing return " + "statement on request handler") + else: + raise RuntimeError("Web-handler should return " + "a response instance, " + "got {!r}".format(resp)) + await resp.prepare(request) + await resp.write_eof() + + # notify server about keep-alive + self._keepalive = bool(resp.keep_alive) + + # log access + if self.access_log: + self.log_access(request, resp, loop.time() - now) + + # check payload + if not payload.is_eof(): + lingering_time = self._lingering_time + if not self._force_close and lingering_time: + self.log_debug( + 'Start lingering close timer for %s sec.', + lingering_time) + + now = loop.time() + end_t = now + lingering_time + + with suppress( + asyncio.TimeoutError, asyncio.CancelledError): + while not payload.is_eof() and now < end_t: + with CeilTimeout(end_t - now, loop=loop): + # read and ignore + await payload.readany() + now = loop.time() + + # if payload still uncompleted + if not payload.is_eof() and not self._force_close: + self.log_debug('Uncompleted request.') + self.close() + + payload.set_exception(PayloadAccessError()) + + except asyncio.CancelledError: + self.log_debug('Ignored premature client disconnection ') + break + except RuntimeError as exc: + if self.debug: + self.log_exception( + 'Unhandled runtime exception', exc_info=exc) + self.force_close() + except Exception as exc: + self.log_exception('Unhandled exception', exc_info=exc) + self.force_close() + finally: + if self.transport is None and resp is not None: + self.log_debug('Ignored premature client disconnection.') + elif not self._force_close: + if self._keepalive and not self._close: + # start keep-alive timer + if keepalive_timeout is not None: + now = self._loop.time() + self._keepalive_time = now + if self._keepalive_handle is None: + self._keepalive_handle = loop.call_at( + now + keepalive_timeout, + self._process_keepalive) + else: + break + + # remove handler, close transport if no handlers left + if not self._force_close: + self._task_handler = None + if self.transport is not None and self._error_handler is None: + self.transport.close() + + def handle_error(self, + request: BaseRequest, + status: int=500, + exc: Optional[BaseException]=None, + message: Optional[str]=None) -> StreamResponse: + """Handle errors. + + Returns HTTP response with specific status code. Logs additional + information. It always closes current connection.""" + self.log_exception("Error handling request", exc_info=exc) + + ct = 'text/plain' + if status == HTTPStatus.INTERNAL_SERVER_ERROR: + title = '{0.value} {0.phrase}'.format( + HTTPStatus.INTERNAL_SERVER_ERROR + ) + msg = HTTPStatus.INTERNAL_SERVER_ERROR.description + tb = None + if self.debug: + with suppress(Exception): + tb = traceback.format_exc() + + if 'text/html' in request.headers.get('Accept', ''): + if tb: + tb = html_escape(tb) + msg = '

Traceback:

\n
{}
'.format(tb) + message = ( + "" + "{title}" + "\n

{title}

" + "\n{msg}\n\n" + ).format(title=title, msg=msg) + ct = 'text/html' + else: + if tb: + msg = tb + message = title + '\n\n' + msg + + resp = Response(status=status, text=message, content_type=ct) + resp.force_close() + + # some data already got sent, connection is broken + if request.writer.output_size > 0 or self.transport is None: + self.force_close() + + return resp + + async def handle_parse_error(self, + writer: AbstractStreamWriter, + status: int, + exc: Optional[BaseException]=None, + message: Optional[str]=None) -> None: + request = BaseRequest( # type: ignore + ERROR, + EMPTY_PAYLOAD, + self, writer, + current_task(), + self._loop) + + resp = self.handle_error(request, status, exc, message) + await resp.prepare(request) + await resp.write_eof() + + if self.transport is not None: + self.transport.close() + + self._error_handler = None diff --git a/venv/lib/python3.7/site-packages/aiohttp/web_request.py b/venv/lib/python3.7/site-packages/aiohttp/web_request.py new file mode 100644 index 0000000..0534f7e --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/web_request.py @@ -0,0 +1,743 @@ +import asyncio +import datetime +import io +import re +import socket +import string +import tempfile +import types +import warnings +from email.utils import parsedate +from http.cookies import SimpleCookie +from types import MappingProxyType +from typing import ( # noqa + TYPE_CHECKING, + Any, + Dict, + Iterator, + Mapping, + MutableMapping, + Optional, + Tuple, + Union, + cast, +) +from urllib.parse import parse_qsl + +import attr +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy +from yarl import URL + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import DEBUG, ChainMapProxy, HeadersMixin, reify, sentinel +from .http_parser import RawRequestMessage +from .multipart import MultipartReader +from .streams import EmptyStreamReader, StreamReader +from .typedefs import ( + DEFAULT_JSON_DECODER, + JSONDecoder, + LooseHeaders, + RawHeaders, + StrOrURL, +) +from .web_exceptions import HTTPRequestEntityTooLarge +from .web_response import StreamResponse + +__all__ = ('BaseRequest', 'FileField', 'Request') + + +if TYPE_CHECKING: # pragma: no cover + from .web_app import Application # noqa + from .web_urldispatcher import UrlMappingMatchInfo # noqa + from .web_protocol import RequestHandler # noqa + + +@attr.s(frozen=True, slots=True) +class FileField: + name = attr.ib(type=str) + filename = attr.ib(type=str) + file = attr.ib(type=io.BufferedReader) + content_type = attr.ib(type=str) + headers = attr.ib(type=CIMultiDictProxy) # type: CIMultiDictProxy[str] + + +_TCHAR = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" +# '-' at the end to prevent interpretation as range in a char class + +_TOKEN = r'[{tchar}]+'.format(tchar=_TCHAR) + +_QDTEXT = r'[{}]'.format( + r''.join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F)))) +# qdtext includes 0x5C to escape 0x5D ('\]') +# qdtext excludes obs-text (because obsoleted, and encoding not specified) + +_QUOTED_PAIR = r'\\[\t !-~]' + +_QUOTED_STRING = r'"(?:{quoted_pair}|{qdtext})*"'.format( + qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR) + +_FORWARDED_PAIR = ( + r'({token})=({token}|{quoted_string})(:\d{{1,4}})?'.format( + token=_TOKEN, + quoted_string=_QUOTED_STRING)) + +_QUOTED_PAIR_REPLACE_RE = re.compile(r'\\([\t !-~])') +# same pattern as _QUOTED_PAIR but contains a capture group + +_FORWARDED_PAIR_RE = re.compile(_FORWARDED_PAIR) + +############################################################ +# HTTP Request +############################################################ + + +class BaseRequest(MutableMapping[str, Any], HeadersMixin): + + POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT, + hdrs.METH_TRACE, hdrs.METH_DELETE} + + ATTRS = HeadersMixin.ATTRS | frozenset([ + '_message', '_protocol', '_payload_writer', '_payload', '_headers', + '_method', '_version', '_rel_url', '_post', '_read_bytes', + '_state', '_cache', '_task', '_client_max_size', '_loop', + '_transport_sslcontext', '_transport_peername']) + + def __init__(self, message: RawRequestMessage, + payload: StreamReader, protocol: 'RequestHandler', + payload_writer: AbstractStreamWriter, + task: 'asyncio.Task[None]', + loop: asyncio.AbstractEventLoop, + *, client_max_size: int=1024**2, + state: Optional[Dict[str, Any]]=None, + scheme: Optional[str]=None, + host: Optional[str]=None, + remote: Optional[str]=None) -> None: + if state is None: + state = {} + self._message = message + self._protocol = protocol + self._payload_writer = payload_writer + + self._payload = payload + self._headers = message.headers + self._method = message.method + self._version = message.version + self._rel_url = message.url + self._post = None # type: Optional[MultiDictProxy[Union[str, bytes, FileField]]] # noqa + self._read_bytes = None # type: Optional[bytes] + + self._state = state + self._cache = {} # type: Dict[str, Any] + self._task = task + self._client_max_size = client_max_size + self._loop = loop + + transport = self._protocol.transport + assert transport is not None + self._transport_sslcontext = transport.get_extra_info('sslcontext') + self._transport_peername = transport.get_extra_info('peername') + + if scheme is not None: + self._cache['scheme'] = scheme + if host is not None: + self._cache['host'] = host + if remote is not None: + self._cache['remote'] = remote + + def clone(self, *, method: str=sentinel, rel_url: StrOrURL=sentinel, + headers: LooseHeaders=sentinel, scheme: str=sentinel, + host: str=sentinel, + remote: str=sentinel) -> 'BaseRequest': + """Clone itself with replacement some attributes. + + Creates and returns a new instance of Request object. If no parameters + are given, an exact copy is returned. If a parameter is not passed, it + will reuse the one from the current request object. + + """ + + if self._read_bytes: + raise RuntimeError("Cannot clone request " + "after reading its content") + + dct = {} # type: Dict[str, Any] + if method is not sentinel: + dct['method'] = method + if rel_url is not sentinel: + new_url = URL(rel_url) + dct['url'] = new_url + dct['path'] = str(new_url) + if headers is not sentinel: + # a copy semantic + dct['headers'] = CIMultiDictProxy(CIMultiDict(headers)) + dct['raw_headers'] = tuple((k.encode('utf-8'), v.encode('utf-8')) + for k, v in headers.items()) + + message = self._message._replace(**dct) + + kwargs = {} + if scheme is not sentinel: + kwargs['scheme'] = scheme + if host is not sentinel: + kwargs['host'] = host + if remote is not sentinel: + kwargs['remote'] = remote + + return self.__class__( + message, + self._payload, + self._protocol, + self._payload_writer, + self._task, + self._loop, + client_max_size=self._client_max_size, + state=self._state.copy(), + **kwargs) + + @property + def task(self) -> 'asyncio.Task[None]': + return self._task + + @property + def protocol(self) -> 'RequestHandler': + return self._protocol + + @property + def transport(self) -> Optional[asyncio.Transport]: + if self._protocol is None: + return None + return self._protocol.transport + + @property + def writer(self) -> AbstractStreamWriter: + return self._payload_writer + + @reify + def message(self) -> RawRequestMessage: + warnings.warn("Request.message is deprecated", + DeprecationWarning, + stacklevel=3) + return self._message + + @reify + def rel_url(self) -> URL: + return self._rel_url + + @reify + def loop(self) -> asyncio.AbstractEventLoop: + warnings.warn("request.loop property is deprecated", + DeprecationWarning, + stacklevel=2) + return self._loop + + # MutableMapping API + + def __getitem__(self, key: str) -> Any: + return self._state[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._state[key] = value + + def __delitem__(self, key: str) -> None: + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[str]: + return iter(self._state) + + ######## + + @reify + def secure(self) -> bool: + """A bool indicating if the request is handled with SSL.""" + return self.scheme == 'https' + + @reify + def forwarded(self) -> Tuple[Mapping[str, str], ...]: + """A tuple containing all parsed Forwarded header(s). + + Makes an effort to parse Forwarded headers as specified by RFC 7239: + + - It adds one (immutable) dictionary per Forwarded 'field-value', ie + per proxy. The element corresponds to the data in the Forwarded + field-value added by the first proxy encountered by the client. Each + subsequent item corresponds to those added by later proxies. + - It checks that every value has valid syntax in general as specified + in section 4: either a 'token' or a 'quoted-string'. + - It un-escapes found escape sequences. + - It does NOT validate 'by' and 'for' contents as specified in section + 6. + - It does NOT validate 'host' contents (Host ABNF). + - It does NOT validate 'proto' contents for valid URI scheme names. + + Returns a tuple containing one or more immutable dicts + """ + elems = [] + for field_value in self._message.headers.getall(hdrs.FORWARDED, ()): + length = len(field_value) + pos = 0 + need_separator = False + elem = {} # type: Dict[str, str] + elems.append(types.MappingProxyType(elem)) + while 0 <= pos < length: + match = _FORWARDED_PAIR_RE.match(field_value, pos) + if match is not None: # got a valid forwarded-pair + if need_separator: + # bad syntax here, skip to next comma + pos = field_value.find(',', pos) + else: + name, value, port = match.groups() + if value[0] == '"': + # quoted string: remove quotes and unescape + value = _QUOTED_PAIR_REPLACE_RE.sub(r'\1', + value[1:-1]) + if port: + value += port + elem[name.lower()] = value + pos += len(match.group(0)) + need_separator = True + elif field_value[pos] == ',': # next forwarded-element + need_separator = False + elem = {} + elems.append(types.MappingProxyType(elem)) + pos += 1 + elif field_value[pos] == ';': # next forwarded-pair + need_separator = False + pos += 1 + elif field_value[pos] in ' \t': + # Allow whitespace even between forwarded-pairs, though + # RFC 7239 doesn't. This simplifies code and is in line + # with Postel's law. + pos += 1 + else: + # bad syntax here, skip to next comma + pos = field_value.find(',', pos) + return tuple(elems) + + @reify + def scheme(self) -> str: + """A string representing the scheme of the request. + + Hostname is resolved in this order: + + - overridden value by .clone(scheme=new_scheme) call. + - type of connection to peer: HTTPS if socket is SSL, HTTP otherwise. + + 'http' or 'https'. + """ + if self._transport_sslcontext: + return 'https' + else: + return 'http' + + @reify + def method(self) -> str: + """Read only property for getting HTTP method. + + The value is upper-cased str like 'GET', 'POST', 'PUT' etc. + """ + return self._method + + @reify + def version(self) -> Tuple[int, int]: + """Read only property for getting HTTP version of request. + + Returns aiohttp.protocol.HttpVersion instance. + """ + return self._version + + @reify + def host(self) -> str: + """Hostname of the request. + + Hostname is resolved in this order: + + - overridden value by .clone(host=new_host) call. + - HOST HTTP header + - socket.getfqdn() value + """ + host = self._message.headers.get(hdrs.HOST) + if host is not None: + return host + else: + return socket.getfqdn() + + @reify + def remote(self) -> Optional[str]: + """Remote IP of client initiated HTTP request. + + The IP is resolved in this order: + + - overridden value by .clone(remote=new_remote) call. + - peername of opened socket + """ + if isinstance(self._transport_peername, (list, tuple)): + return self._transport_peername[0] + else: + return self._transport_peername + + @reify + def url(self) -> URL: + url = URL.build(scheme=self.scheme, host=self.host) + return url.join(self._rel_url) + + @reify + def path(self) -> str: + """The URL including *PATH INFO* without the host or scheme. + + E.g., ``/app/blog`` + """ + return self._rel_url.path + + @reify + def path_qs(self) -> str: + """The URL including PATH_INFO and the query string. + + E.g, /app/blog?id=10 + """ + return str(self._rel_url) + + @reify + def raw_path(self) -> str: + """ The URL including raw *PATH INFO* without the host or scheme. + Warning, the path is unquoted and may contains non valid URL characters + + E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters`` + """ + return self._message.path + + @reify + def query(self) -> 'MultiDictProxy[str]': + """A multidict with all the variables in the query string.""" + return self._rel_url.query + + @reify + def query_string(self) -> str: + """The query string in the URL. + + E.g., id=10 + """ + return self._rel_url.query_string + + @reify + def headers(self) -> 'CIMultiDictProxy[str]': + """A case-insensitive multidict proxy with all headers.""" + return self._headers + + @reify + def raw_headers(self) -> RawHeaders: + """A sequence of pairs for all headers.""" + return self._message.raw_headers + + @staticmethod + def _http_date(_date_str: str) -> Optional[datetime.datetime]: + """Process a date string, return a datetime object + """ + if _date_str is not None: + timetuple = parsedate(_date_str) + if timetuple is not None: + return datetime.datetime(*timetuple[:6], + tzinfo=datetime.timezone.utc) + return None + + @reify + def if_modified_since(self) -> Optional[datetime.datetime]: + """The value of If-Modified-Since HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return self._http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE)) + + @reify + def if_unmodified_since(self) -> Optional[datetime.datetime]: + """The value of If-Unmodified-Since HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return self._http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE)) + + @reify + def if_range(self) -> Optional[datetime.datetime]: + """The value of If-Range HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return self._http_date(self.headers.get(hdrs.IF_RANGE)) + + @reify + def keep_alive(self) -> bool: + """Is keepalive enabled by client?""" + return not self._message.should_close + + @reify + def cookies(self) -> Mapping[str, str]: + """Return request cookies. + + A read-only dictionary-like object. + """ + raw = self.headers.get(hdrs.COOKIE, '') + parsed = SimpleCookie(raw) + return MappingProxyType( + {key: val.value for key, val in parsed.items()}) + + @reify + def http_range(self) -> slice: + """The content of Range HTTP header. + + Return a slice instance. + + """ + rng = self._headers.get(hdrs.RANGE) + start, end = None, None + if rng is not None: + try: + pattern = r'^bytes=(\d*)-(\d*)$' + start, end = re.findall(pattern, rng)[0] + except IndexError: # pattern was not found in header + raise ValueError("range not in acceptable format") + + end = int(end) if end else None + start = int(start) if start else None + + if start is None and end is not None: + # end with no start is to return tail of content + start = -end + end = None + + if start is not None and end is not None: + # end is inclusive in range header, exclusive for slice + end += 1 + + if start >= end: + raise ValueError('start cannot be after end') + + if start is end is None: # No valid range supplied + raise ValueError('No start or end of range specified') + + return slice(start, end, 1) + + @reify + def content(self) -> StreamReader: + """Return raw payload stream.""" + return self._payload + + @property + def has_body(self) -> bool: + """Return True if request's HTTP BODY can be read, False otherwise.""" + warnings.warn( + "Deprecated, use .can_read_body #2005", + DeprecationWarning, stacklevel=2) + return not self._payload.at_eof() + + @property + def can_read_body(self) -> bool: + """Return True if request's HTTP BODY can be read, False otherwise.""" + return not self._payload.at_eof() + + @reify + def body_exists(self) -> bool: + """Return True if request has HTTP BODY, False otherwise.""" + return type(self._payload) is not EmptyStreamReader + + async def release(self) -> None: + """Release request. + + Eat unread part of HTTP BODY if present. + """ + while not self._payload.at_eof(): + await self._payload.readany() + + async def read(self) -> bytes: + """Read request body if present. + + Returns bytes object with full request content. + """ + if self._read_bytes is None: + body = bytearray() + while True: + chunk = await self._payload.readany() + body.extend(chunk) + if self._client_max_size: + body_size = len(body) + if body_size >= self._client_max_size: + raise HTTPRequestEntityTooLarge( + max_size=self._client_max_size, + actual_size=body_size + ) + if not chunk: + break + self._read_bytes = bytes(body) + return self._read_bytes + + async def text(self) -> str: + """Return BODY as text using encoding from .charset.""" + bytes_body = await self.read() + encoding = self.charset or 'utf-8' + return bytes_body.decode(encoding) + + async def json(self, *, loads: JSONDecoder=DEFAULT_JSON_DECODER) -> Any: + """Return BODY as JSON.""" + body = await self.text() + return loads(body) + + async def multipart(self) -> MultipartReader: + """Return async iterator to process BODY as multipart.""" + return MultipartReader(self._headers, self._payload) + + async def post(self) -> 'MultiDictProxy[Union[str, bytes, FileField]]': + """Return POST parameters.""" + if self._post is not None: + return self._post + if self._method not in self.POST_METHODS: + self._post = MultiDictProxy(MultiDict()) + return self._post + + content_type = self.content_type + if (content_type not in ('', + 'application/x-www-form-urlencoded', + 'multipart/form-data')): + self._post = MultiDictProxy(MultiDict()) + return self._post + + out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]] + + if content_type == 'multipart/form-data': + multipart = await self.multipart() + max_size = self._client_max_size + + field = await multipart.next() + while field is not None: + size = 0 + content_type = field.headers.get(hdrs.CONTENT_TYPE) + + if field.filename: + # store file in temp file + tmp = tempfile.TemporaryFile() + chunk = await field.read_chunk(size=2**16) + while chunk: + chunk = field.decode(chunk) + tmp.write(chunk) + size += len(chunk) + if 0 < max_size < size: + raise HTTPRequestEntityTooLarge( + max_size=max_size, + actual_size=size + ) + chunk = await field.read_chunk(size=2**16) + tmp.seek(0) + + ff = FileField(field.name, field.filename, + cast(io.BufferedReader, tmp), + content_type, field.headers) + out.add(field.name, ff) + else: + value = await field.read(decode=True) + if content_type is None or \ + content_type.startswith('text/'): + charset = field.get_charset(default='utf-8') + value = value.decode(charset) + out.add(field.name, value) + size += len(value) + if 0 < max_size < size: + raise HTTPRequestEntityTooLarge( + max_size=max_size, + actual_size=size + ) + + field = await multipart.next() + else: + data = await self.read() + if data: + charset = self.charset or 'utf-8' + out.extend( + parse_qsl( + data.rstrip().decode(charset), + keep_blank_values=True, + encoding=charset)) + + self._post = MultiDictProxy(out) + return self._post + + def __repr__(self) -> str: + ascii_encodable_path = self.path.encode('ascii', 'backslashreplace') \ + .decode('ascii') + return "<{} {} {} >".format(self.__class__.__name__, + self._method, ascii_encodable_path) + + def __eq__(self, other: object) -> bool: + return id(self) == id(other) + + async def _prepare_hook(self, response: StreamResponse) -> None: + return + + +class Request(BaseRequest): + + ATTRS = BaseRequest.ATTRS | frozenset(['_match_info']) + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + # matchdict, route_name, handler + # or information about traversal lookup + + # initialized after route resolving + self._match_info = None # type: Optional[UrlMappingMatchInfo] + + if DEBUG: + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn("Setting custom {}.{} attribute " + "is discouraged".format(self.__class__.__name__, + name), + DeprecationWarning, + stacklevel=2) + super().__setattr__(name, val) + + def clone(self, *, method: str=sentinel, rel_url: + StrOrURL=sentinel, headers: LooseHeaders=sentinel, + scheme: str=sentinel, host: str=sentinel, remote: + str=sentinel) -> 'Request': + ret = super().clone(method=method, + rel_url=rel_url, + headers=headers, + scheme=scheme, + host=host, + remote=remote) + new_ret = cast(Request, ret) + new_ret._match_info = self._match_info + return new_ret + + @reify + def match_info(self) -> 'UrlMappingMatchInfo': + """Result of route resolving.""" + match_info = self._match_info + assert match_info is not None + return match_info + + @property + def app(self) -> 'Application': + """Application instance.""" + match_info = self._match_info + assert match_info is not None + return match_info.current_app + + @property + def config_dict(self) -> ChainMapProxy: + match_info = self._match_info + assert match_info is not None + lst = match_info.apps + app = self.app + idx = lst.index(app) + sublist = list(reversed(lst[:idx + 1])) + return ChainMapProxy(sublist) + + async def _prepare_hook(self, response: StreamResponse) -> None: + match_info = self._match_info + if match_info is None: + return + for app in match_info._apps: + await app.on_response_prepare.send(self, response) diff --git a/venv/lib/python3.7/site-packages/aiohttp/web_response.py b/venv/lib/python3.7/site-packages/aiohttp/web_response.py new file mode 100644 index 0000000..750e040 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/web_response.py @@ -0,0 +1,717 @@ +import asyncio # noqa +import collections.abc # noqa +import datetime +import enum +import json +import math +import time +import warnings +import zlib +from concurrent.futures import Executor +from email.utils import parsedate +from http.cookies import SimpleCookie +from typing import ( # noqa + TYPE_CHECKING, + Any, + Dict, + Iterator, + Mapping, + MutableMapping, + Optional, + Tuple, + Union, + cast, +) + +from multidict import CIMultiDict, istr + +from . import hdrs, payload +from .abc import AbstractStreamWriter +from .helpers import HeadersMixin, rfc822_formatted_time, sentinel +from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11 +from .payload import Payload +from .typedefs import JSONEncoder, LooseHeaders + +__all__ = ('ContentCoding', 'StreamResponse', 'Response', 'json_response') + + +if TYPE_CHECKING: # pragma: no cover + from .web_request import BaseRequest # noqa + BaseClass = MutableMapping[str, Any] +else: + BaseClass = collections.abc.MutableMapping + + +class ContentCoding(enum.Enum): + # The content codings that we have support for. + # + # Additional registered codings are listed at: + # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding + deflate = 'deflate' + gzip = 'gzip' + identity = 'identity' + + +############################################################ +# HTTP Response classes +############################################################ + + +class StreamResponse(BaseClass, HeadersMixin): + + _length_check = True + + def __init__(self, *, + status: int=200, + reason: Optional[str]=None, + headers: Optional[LooseHeaders]=None) -> None: + self._body = None + self._keep_alive = None # type: Optional[bool] + self._chunked = False + self._compression = False + self._compression_force = None # type: Optional[ContentCoding] + self._cookies = SimpleCookie() + + self._req = None # type: Optional[BaseRequest] + self._payload_writer = None # type: Optional[AbstractStreamWriter] + self._eof_sent = False + self._body_length = 0 + self._state = {} # type: Dict[str, Any] + + if headers is not None: + self._headers = CIMultiDict(headers) # type: CIMultiDict[str] + else: + self._headers = CIMultiDict() # type: CIMultiDict[str] + + self.set_status(status, reason) + + @property + def prepared(self) -> bool: + return self._payload_writer is not None + + @property + def task(self) -> 'asyncio.Task[None]': + return getattr(self._req, 'task', None) + + @property + def status(self) -> int: + return self._status + + @property + def chunked(self) -> bool: + return self._chunked + + @property + def compression(self) -> bool: + return self._compression + + @property + def reason(self) -> str: + return self._reason + + def set_status(self, status: int, + reason: Optional[str]=None, + _RESPONSES: Mapping[int, + Tuple[str, str]]=RESPONSES) -> None: + assert not self.prepared, \ + 'Cannot change the response status code after ' \ + 'the headers have been sent' + self._status = int(status) + if reason is None: + try: + reason = _RESPONSES[self._status][0] + except Exception: + reason = '' + self._reason = reason + + @property + def keep_alive(self) -> Optional[bool]: + return self._keep_alive + + def force_close(self) -> None: + self._keep_alive = False + + @property + def body_length(self) -> int: + return self._body_length + + @property + def output_length(self) -> int: + warnings.warn('output_length is deprecated', DeprecationWarning) + assert self._payload_writer + return self._payload_writer.buffer_size + + def enable_chunked_encoding(self, chunk_size: Optional[int]=None) -> None: + """Enables automatic chunked transfer encoding.""" + self._chunked = True + + if hdrs.CONTENT_LENGTH in self._headers: + raise RuntimeError("You can't enable chunked encoding when " + "a content length is set") + if chunk_size is not None: + warnings.warn('Chunk size is deprecated #1615', DeprecationWarning) + + def enable_compression(self, + force: Optional[Union[bool, ContentCoding]]=None + ) -> None: + """Enables response compression encoding.""" + # Backwards compatibility for when force was a bool <0.17. + if type(force) == bool: + force = ContentCoding.deflate if force else ContentCoding.identity + warnings.warn("Using boolean for force is deprecated #3318", + DeprecationWarning) + elif force is not None: + assert isinstance(force, ContentCoding), ("force should one of " + "None, bool or " + "ContentEncoding") + + self._compression = True + self._compression_force = force + + @property + def headers(self) -> 'CIMultiDict[str]': + return self._headers + + @property + def cookies(self) -> SimpleCookie: + return self._cookies + + def set_cookie(self, name: str, value: str, *, + expires: Optional[str]=None, + domain: Optional[str]=None, + max_age: Optional[Union[int, str]]=None, + path: str='/', + secure: Optional[str]=None, + httponly: Optional[str]=None, + version: Optional[str]=None) -> None: + """Set or update response cookie. + + Sets new cookie or updates existent with new value. + Also updates only those params which are not None. + """ + + old = self._cookies.get(name) + if old is not None and old.coded_value == '': + # deleted cookie + self._cookies.pop(name, None) + + self._cookies[name] = value + c = self._cookies[name] + + if expires is not None: + c['expires'] = expires + elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT': + del c['expires'] + + if domain is not None: + c['domain'] = domain + + if max_age is not None: + c['max-age'] = str(max_age) + elif 'max-age' in c: + del c['max-age'] + + c['path'] = path + + if secure is not None: + c['secure'] = secure + if httponly is not None: + c['httponly'] = httponly + if version is not None: + c['version'] = version + + def del_cookie(self, name: str, *, + domain: Optional[str]=None, + path: str='/') -> None: + """Delete cookie. + + Creates new empty expired cookie. + """ + # TODO: do we need domain/path here? + self._cookies.pop(name, None) + self.set_cookie(name, '', max_age=0, + expires="Thu, 01 Jan 1970 00:00:00 GMT", + domain=domain, path=path) + + @property + def content_length(self) -> Optional[int]: + # Just a placeholder for adding setter + return super().content_length + + @content_length.setter + def content_length(self, value: Optional[int]) -> None: + if value is not None: + value = int(value) + if self._chunked: + raise RuntimeError("You can't set content length when " + "chunked encoding is enable") + self._headers[hdrs.CONTENT_LENGTH] = str(value) + else: + self._headers.pop(hdrs.CONTENT_LENGTH, None) + + @property + def content_type(self) -> str: + # Just a placeholder for adding setter + return super().content_type + + @content_type.setter + def content_type(self, value: str) -> None: + self.content_type # read header values if needed + self._content_type = str(value) + self._generate_content_type_header() + + @property + def charset(self) -> Optional[str]: + # Just a placeholder for adding setter + return super().charset + + @charset.setter + def charset(self, value: Optional[str]) -> None: + ctype = self.content_type # read header values if needed + if ctype == 'application/octet-stream': + raise RuntimeError("Setting charset for application/octet-stream " + "doesn't make sense, setup content_type first") + assert self._content_dict is not None + if value is None: + self._content_dict.pop('charset', None) + else: + self._content_dict['charset'] = str(value).lower() + self._generate_content_type_header() + + @property + def last_modified(self) -> Optional[datetime.datetime]: + """The value of Last-Modified HTTP header, or None. + + This header is represented as a `datetime` object. + """ + httpdate = self._headers.get(hdrs.LAST_MODIFIED) + if httpdate is not None: + timetuple = parsedate(httpdate) + if timetuple is not None: + return datetime.datetime(*timetuple[:6], + tzinfo=datetime.timezone.utc) + return None + + @last_modified.setter + def last_modified(self, + value: Optional[ + Union[int, float, datetime.datetime, str]]) -> None: + if value is None: + self._headers.pop(hdrs.LAST_MODIFIED, None) + elif isinstance(value, (int, float)): + self._headers[hdrs.LAST_MODIFIED] = time.strftime( + "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))) + elif isinstance(value, datetime.datetime): + self._headers[hdrs.LAST_MODIFIED] = time.strftime( + "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()) + elif isinstance(value, str): + self._headers[hdrs.LAST_MODIFIED] = value + + def _generate_content_type_header( + self, + CONTENT_TYPE: istr=hdrs.CONTENT_TYPE) -> None: + assert self._content_dict is not None + assert self._content_type is not None + params = '; '.join("{}={}".format(k, v) + for k, v in self._content_dict.items()) + if params: + ctype = self._content_type + '; ' + params + else: + ctype = self._content_type + self._headers[CONTENT_TYPE] = ctype + + async def _do_start_compression(self, coding: ContentCoding) -> None: + if coding != ContentCoding.identity: + assert self._payload_writer is not None + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._payload_writer.enable_compression(coding.value) + # Compressed payload may have different content length, + # remove the header + self._headers.popall(hdrs.CONTENT_LENGTH, None) + + async def _start_compression(self, request: 'BaseRequest') -> None: + if self._compression_force: + await self._do_start_compression(self._compression_force) + else: + accept_encoding = request.headers.get( + hdrs.ACCEPT_ENCODING, '').lower() + for coding in ContentCoding: + if coding.value in accept_encoding: + await self._do_start_compression(coding) + return + + async def prepare( + self, + request: 'BaseRequest' + ) -> Optional[AbstractStreamWriter]: + if self._eof_sent: + return None + if self._payload_writer is not None: + return self._payload_writer + + await request._prepare_hook(self) + return await self._start(request) + + async def _start(self, request: 'BaseRequest') -> AbstractStreamWriter: + self._req = request + + keep_alive = self._keep_alive + if keep_alive is None: + keep_alive = request.keep_alive + self._keep_alive = keep_alive + + version = request.version + writer = self._payload_writer = request._payload_writer + + headers = self._headers + for cookie in self._cookies.values(): + value = cookie.output(header='')[1:] + headers.add(hdrs.SET_COOKIE, value) + + if self._compression: + await self._start_compression(request) + + if self._chunked: + if version != HttpVersion11: + raise RuntimeError( + "Using chunked encoding is forbidden " + "for HTTP/{0.major}.{0.minor}".format(request.version)) + writer.enable_chunking() + headers[hdrs.TRANSFER_ENCODING] = 'chunked' + if hdrs.CONTENT_LENGTH in headers: + del headers[hdrs.CONTENT_LENGTH] + elif self._length_check: + writer.length = self.content_length + if writer.length is None: + if version >= HttpVersion11: + writer.enable_chunking() + headers[hdrs.TRANSFER_ENCODING] = 'chunked' + if hdrs.CONTENT_LENGTH in headers: + del headers[hdrs.CONTENT_LENGTH] + else: + keep_alive = False + + headers.setdefault(hdrs.CONTENT_TYPE, 'application/octet-stream') + headers.setdefault(hdrs.DATE, rfc822_formatted_time()) + headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE) + + # connection header + if hdrs.CONNECTION not in headers: + if keep_alive: + if version == HttpVersion10: + headers[hdrs.CONNECTION] = 'keep-alive' + else: + if version == HttpVersion11: + headers[hdrs.CONNECTION] = 'close' + + # status line + status_line = 'HTTP/{}.{} {} {}'.format( + version[0], version[1], self._status, self._reason) + await writer.write_headers(status_line, headers) + + return writer + + async def write(self, data: bytes) -> None: + assert isinstance(data, (bytes, bytearray, memoryview)), \ + "data argument must be byte-ish (%r)" % type(data) + + if self._eof_sent: + raise RuntimeError("Cannot call write() after write_eof()") + if self._payload_writer is None: + raise RuntimeError("Cannot call write() before prepare()") + + await self._payload_writer.write(data) + + async def drain(self) -> None: + assert not self._eof_sent, "EOF has already been sent" + assert self._payload_writer is not None, \ + "Response has not been started" + warnings.warn("drain method is deprecated, use await resp.write()", + DeprecationWarning, + stacklevel=2) + await self._payload_writer.drain() + + async def write_eof(self, data: bytes=b'') -> None: + assert isinstance(data, (bytes, bytearray, memoryview)), \ + "data argument must be byte-ish (%r)" % type(data) + + if self._eof_sent: + return + + assert self._payload_writer is not None, \ + "Response has not been started" + + await self._payload_writer.write_eof(data) + self._eof_sent = True + self._req = None + self._body_length = self._payload_writer.output_size + self._payload_writer = None + + def __repr__(self) -> str: + if self._eof_sent: + info = "eof" + elif self.prepared: + assert self._req is not None + info = "{} {} ".format(self._req.method, self._req.path) + else: + info = "not prepared" + return "<{} {} {}>".format(self.__class__.__name__, + self.reason, info) + + def __getitem__(self, key: str) -> Any: + return self._state[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._state[key] = value + + def __delitem__(self, key: str) -> None: + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[str]: + return iter(self._state) + + def __hash__(self) -> int: + return hash(id(self)) + + def __eq__(self, other: object) -> bool: + return self is other + + +class Response(StreamResponse): + + def __init__(self, *, + body: Any=None, + status: int=200, + reason: Optional[str]=None, + text: Optional[str]=None, + headers: Optional[LooseHeaders]=None, + content_type: Optional[str]=None, + charset: Optional[str]=None, + zlib_executor_size: Optional[int]=None, + zlib_executor: Executor=None) -> None: + if body is not None and text is not None: + raise ValueError("body and text are not allowed together") + + if headers is None: + real_headers = CIMultiDict() # type: CIMultiDict[str] + elif not isinstance(headers, CIMultiDict): + real_headers = CIMultiDict(headers) + else: + real_headers = headers # = cast('CIMultiDict[str]', headers) + + if content_type is not None and "charset" in content_type: + raise ValueError("charset must not be in content_type " + "argument") + + if text is not None: + if hdrs.CONTENT_TYPE in real_headers: + if content_type or charset: + raise ValueError("passing both Content-Type header and " + "content_type or charset params " + "is forbidden") + else: + # fast path for filling headers + if not isinstance(text, str): + raise TypeError("text argument must be str (%r)" % + type(text)) + if content_type is None: + content_type = 'text/plain' + if charset is None: + charset = 'utf-8' + real_headers[hdrs.CONTENT_TYPE] = ( + content_type + '; charset=' + charset) + body = text.encode(charset) + text = None + else: + if hdrs.CONTENT_TYPE in real_headers: + if content_type is not None or charset is not None: + raise ValueError("passing both Content-Type header and " + "content_type or charset params " + "is forbidden") + else: + if content_type is not None: + if charset is not None: + content_type += '; charset=' + charset + real_headers[hdrs.CONTENT_TYPE] = content_type + + super().__init__(status=status, reason=reason, headers=real_headers) + + if text is not None: + self.text = text + else: + self.body = body + + self._compressed_body = None # type: Optional[bytes] + self._zlib_executor_size = zlib_executor_size + self._zlib_executor = zlib_executor + + @property + def body(self) -> Optional[Union[bytes, Payload]]: + return self._body + + @body.setter + def body(self, body: bytes, + CONTENT_TYPE: istr=hdrs.CONTENT_TYPE, + CONTENT_LENGTH: istr=hdrs.CONTENT_LENGTH) -> None: + if body is None: + self._body = None # type: Optional[bytes] + self._body_payload = False # type: bool + elif isinstance(body, (bytes, bytearray)): + self._body = body + self._body_payload = False + else: + try: + self._body = body = payload.PAYLOAD_REGISTRY.get(body) + except payload.LookupError: + raise ValueError('Unsupported body type %r' % type(body)) + + self._body_payload = True + + headers = self._headers + + # set content-length header if needed + if not self._chunked and CONTENT_LENGTH not in headers: + size = body.size + if size is not None: + headers[CONTENT_LENGTH] = str(size) + + # set content-type + if CONTENT_TYPE not in headers: + headers[CONTENT_TYPE] = body.content_type + + # copy payload headers + if body.headers: + for (key, value) in body.headers.items(): + if key not in headers: + headers[key] = value + + self._compressed_body = None + + @property + def text(self) -> Optional[str]: + if self._body is None: + return None + return self._body.decode(self.charset or 'utf-8') + + @text.setter + def text(self, text: str) -> None: + assert text is None or isinstance(text, str), \ + "text argument must be str (%r)" % type(text) + + if self.content_type == 'application/octet-stream': + self.content_type = 'text/plain' + if self.charset is None: + self.charset = 'utf-8' + + self._body = text.encode(self.charset) + self._body_payload = False + self._compressed_body = None + + @property + def content_length(self) -> Optional[int]: + if self._chunked: + return None + + if hdrs.CONTENT_LENGTH in self._headers: + return super().content_length + + if self._compressed_body is not None: + # Return length of the compressed body + return len(self._compressed_body) + elif self._body_payload: + # A payload without content length, or a compressed payload + return None + elif self._body is not None: + return len(self._body) + else: + return 0 + + @content_length.setter + def content_length(self, value: Optional[int]) -> None: + raise RuntimeError("Content length is set automatically") + + async def write_eof(self, data: bytes=b'') -> None: + if self._eof_sent: + return + if self._compressed_body is None: + body = self._body # type: Optional[Union[bytes, Payload]] + else: + body = self._compressed_body + assert not data, "data arg is not supported, got {!r}".format(data) + assert self._req is not None + assert self._payload_writer is not None + if body is not None: + if (self._req._method == hdrs.METH_HEAD or + self._status in [204, 304]): + await super().write_eof() + elif self._body_payload: + payload = cast(Payload, body) + await payload.write(self._payload_writer) + await super().write_eof() + else: + await super().write_eof(cast(bytes, body)) + else: + await super().write_eof() + + async def _start(self, request: 'BaseRequest') -> AbstractStreamWriter: + if not self._chunked and hdrs.CONTENT_LENGTH not in self._headers: + if not self._body_payload: + if self._body is not None: + self._headers[hdrs.CONTENT_LENGTH] = str(len(self._body)) + else: + self._headers[hdrs.CONTENT_LENGTH] = '0' + + return await super()._start(request) + + def _compress_body(self, zlib_mode: int) -> None: + compressobj = zlib.compressobj(wbits=zlib_mode) + body_in = self._body + assert body_in is not None + self._compressed_body = \ + compressobj.compress(body_in) + compressobj.flush() + + async def _do_start_compression(self, coding: ContentCoding) -> None: + if self._body_payload or self._chunked: + return await super()._do_start_compression(coding) + + if coding != ContentCoding.identity: + # Instead of using _payload_writer.enable_compression, + # compress the whole body + zlib_mode = (16 + zlib.MAX_WBITS + if coding == ContentCoding.gzip else -zlib.MAX_WBITS) + body_in = self._body + assert body_in is not None + if self._zlib_executor_size is not None and \ + len(body_in) > self._zlib_executor_size: + await asyncio.get_event_loop().run_in_executor( + self._zlib_executor, self._compress_body, zlib_mode) + else: + self._compress_body(zlib_mode) + + body_out = self._compressed_body + assert body_out is not None + + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._headers[hdrs.CONTENT_LENGTH] = str(len(body_out)) + + +def json_response(data: Any=sentinel, *, + text: str=None, + body: bytes=None, + status: int=200, + reason: Optional[str]=None, + headers: LooseHeaders=None, + content_type: str='application/json', + dumps: JSONEncoder=json.dumps) -> Response: + if data is not sentinel: + if text or body: + raise ValueError( + "only one of data, text, or body should be specified" + ) + else: + text = dumps(data) + return Response(text=text, body=body, status=status, reason=reason, + headers=headers, content_type=content_type) diff --git a/venv/lib/python3.7/site-packages/aiohttp/web_routedef.py b/venv/lib/python3.7/site-packages/aiohttp/web_routedef.py new file mode 100644 index 0000000..ffa8f1b --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/web_routedef.py @@ -0,0 +1,193 @@ +import abc +import os # noqa +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Dict, + Iterator, + List, + Optional, + Sequence, + Union, + overload, +) + +import attr + +from . import hdrs +from .abc import AbstractView +from .typedefs import PathLike + +if TYPE_CHECKING: # pragma: no cover + from .web_urldispatcher import UrlDispatcher + from .web_request import Request + from .web_response import StreamResponse +else: + Request = StreamResponse = UrlDispatcher = None + + +__all__ = ('AbstractRouteDef', 'RouteDef', 'StaticDef', 'RouteTableDef', + 'head', 'options', 'get', 'post', 'patch', 'put', 'delete', + 'route', 'view', 'static') + + +class AbstractRouteDef(abc.ABC): + @abc.abstractmethod + def register(self, router: UrlDispatcher) -> None: + pass # pragma: no cover + + +_SimpleHandler = Callable[[Request], Awaitable[StreamResponse]] +_HandlerType = Union[AbstractView, _SimpleHandler] + + +@attr.s(frozen=True, repr=False, slots=True) +class RouteDef(AbstractRouteDef): + method = attr.ib(type=str) + path = attr.ib(type=str) + handler = attr.ib() # type: _HandlerType + kwargs = attr.ib(type=Dict[str, Any]) + + def __repr__(self) -> str: + info = [] + for name, value in sorted(self.kwargs.items()): + info.append(", {}={!r}".format(name, value)) + return (" {handler.__name__!r}" + "{info}>".format(method=self.method, path=self.path, + handler=self.handler, info=''.join(info))) + + def register(self, router: UrlDispatcher) -> None: + if self.method in hdrs.METH_ALL: + reg = getattr(router, 'add_'+self.method.lower()) + reg(self.path, self.handler, **self.kwargs) + else: + router.add_route(self.method, self.path, self.handler, + **self.kwargs) + + +@attr.s(frozen=True, repr=False, slots=True) +class StaticDef(AbstractRouteDef): + prefix = attr.ib(type=str) + path = attr.ib() # type: PathLike + kwargs = attr.ib(type=Dict[str, Any]) + + def __repr__(self) -> str: + info = [] + for name, value in sorted(self.kwargs.items()): + info.append(", {}={!r}".format(name, value)) + return (" {path}" + "{info}>".format(prefix=self.prefix, path=self.path, + info=''.join(info))) + + def register(self, router: UrlDispatcher) -> None: + router.add_static(self.prefix, self.path, **self.kwargs) + + +def route(method: str, path: str, handler: _HandlerType, + **kwargs: Any) -> RouteDef: + return RouteDef(method, path, handler, kwargs) + + +def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_HEAD, path, handler, **kwargs) + + +def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_OPTIONS, path, handler, **kwargs) + + +def get(path: str, handler: _HandlerType, *, name: Optional[str]=None, + allow_head: bool=True, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_GET, path, handler, name=name, + allow_head=allow_head, **kwargs) + + +def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_POST, path, handler, **kwargs) + + +def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_PUT, path, handler, **kwargs) + + +def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_PATCH, path, handler, **kwargs) + + +def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_DELETE, path, handler, **kwargs) + + +def view(path: str, handler: AbstractView, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_ANY, path, handler, **kwargs) + + +def static(prefix: str, path: PathLike, + **kwargs: Any) -> StaticDef: + return StaticDef(prefix, path, kwargs) + + +_Deco = Callable[[_HandlerType], _HandlerType] + + +class RouteTableDef(Sequence[AbstractRouteDef]): + """Route definition table""" + def __init__(self) -> None: + self._items = [] # type: List[AbstractRouteDef] + + def __repr__(self) -> str: + return "".format(len(self._items)) + + @overload + def __getitem__(self, index: int) -> AbstractRouteDef: ... # noqa + + @overload # noqa + def __getitem__(self, index: slice) -> List[AbstractRouteDef]: ... # noqa + + def __getitem__(self, index): # type: ignore # noqa + return self._items[index] + + def __iter__(self) -> Iterator[AbstractRouteDef]: + return iter(self._items) + + def __len__(self) -> int: + return len(self._items) + + def __contains__(self, item: object) -> bool: + return item in self._items + + def route(self, + method: str, + path: str, + **kwargs: Any) -> _Deco: + def inner(handler: _HandlerType) -> _HandlerType: + self._items.append(RouteDef(method, path, handler, kwargs)) + return handler + return inner + + def head(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_HEAD, path, **kwargs) + + def get(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_GET, path, **kwargs) + + def post(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_POST, path, **kwargs) + + def put(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_PUT, path, **kwargs) + + def patch(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_PATCH, path, **kwargs) + + def delete(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_DELETE, path, **kwargs) + + def view(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_ANY, path, **kwargs) + + def static(self, prefix: str, path: PathLike, + **kwargs: Any) -> None: + self._items.append(StaticDef(prefix, path, kwargs)) diff --git a/venv/lib/python3.7/site-packages/aiohttp/web_runner.py b/venv/lib/python3.7/site-packages/aiohttp/web_runner.py new file mode 100644 index 0000000..391adb5 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/web_runner.py @@ -0,0 +1,308 @@ +import asyncio +import signal +import socket +from abc import ABC, abstractmethod +from typing import Any, List, Optional, Set + +from yarl import URL + +from .web_app import Application +from .web_server import Server + +try: + from ssl import SSLContext +except ImportError: + SSLContext = object # type: ignore + + +__all__ = ('BaseSite', 'TCPSite', 'UnixSite', 'SockSite', 'BaseRunner', + 'AppRunner', 'ServerRunner', 'GracefulExit') + + +class GracefulExit(SystemExit): + code = 1 + + +def _raise_graceful_exit() -> None: + raise GracefulExit() + + +class BaseSite(ABC): + __slots__ = ('_runner', '_shutdown_timeout', '_ssl_context', '_backlog', + '_server') + + def __init__(self, runner: 'BaseRunner', *, + shutdown_timeout: float=60.0, + ssl_context: Optional[SSLContext]=None, + backlog: int=128) -> None: + if runner.server is None: + raise RuntimeError("Call runner.setup() before making a site") + self._runner = runner + self._shutdown_timeout = shutdown_timeout + self._ssl_context = ssl_context + self._backlog = backlog + self._server = None # type: Optional[asyncio.AbstractServer] + + @property + @abstractmethod + def name(self) -> str: + pass # pragma: no cover + + @abstractmethod + async def start(self) -> None: + self._runner._reg_site(self) + + async def stop(self) -> None: + self._runner._check_site(self) + if self._server is None: + self._runner._unreg_site(self) + return # not started yet + self._server.close() + await self._server.wait_closed() + await self._runner.shutdown() + assert self._runner.server + await self._runner.server.shutdown(self._shutdown_timeout) + self._runner._unreg_site(self) + + +class TCPSite(BaseSite): + __slots__ = ('_host', '_port', '_reuse_address', '_reuse_port') + + def __init__(self, runner: 'BaseRunner', + host: str=None, port: int=None, *, + shutdown_timeout: float=60.0, + ssl_context: Optional[SSLContext]=None, + backlog: int=128, reuse_address: Optional[bool]=None, + reuse_port: Optional[bool]=None) -> None: + super().__init__(runner, shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, backlog=backlog) + if host is None: + host = "0.0.0.0" + self._host = host + if port is None: + port = 8443 if self._ssl_context else 8080 + self._port = port + self._reuse_address = reuse_address + self._reuse_port = reuse_port + + @property + def name(self) -> str: + scheme = 'https' if self._ssl_context else 'http' + return str(URL.build(scheme=scheme, host=self._host, port=self._port)) + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + self._server = await loop.create_server( + server, self._host, self._port, + ssl=self._ssl_context, backlog=self._backlog, + reuse_address=self._reuse_address, + reuse_port=self._reuse_port) + + +class UnixSite(BaseSite): + __slots__ = ('_path', ) + + def __init__(self, runner: 'BaseRunner', path: str, *, + shutdown_timeout: float=60.0, + ssl_context: Optional[SSLContext]=None, + backlog: int=128) -> None: + super().__init__(runner, shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, backlog=backlog) + self._path = path + + @property + def name(self) -> str: + scheme = 'https' if self._ssl_context else 'http' + return '{}://unix:{}:'.format(scheme, self._path) + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + self._server = await loop.create_unix_server( + server, self._path, + ssl=self._ssl_context, backlog=self._backlog) + + +class SockSite(BaseSite): + __slots__ = ('_sock', '_name') + + def __init__(self, runner: 'BaseRunner', sock: socket.socket, *, + shutdown_timeout: float=60.0, + ssl_context: Optional[SSLContext]=None, + backlog: int=128) -> None: + super().__init__(runner, shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, backlog=backlog) + self._sock = sock + scheme = 'https' if self._ssl_context else 'http' + if hasattr(socket, 'AF_UNIX') and sock.family == socket.AF_UNIX: + name = '{}://unix:{}:'.format(scheme, sock.getsockname()) + else: + host, port = sock.getsockname()[:2] + name = str(URL.build(scheme=scheme, host=host, port=port)) + self._name = name + + @property + def name(self) -> str: + return self._name + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + self._server = await loop.create_server( + server, sock=self._sock, + ssl=self._ssl_context, backlog=self._backlog) + + +class BaseRunner(ABC): + __slots__ = ('_handle_signals', '_kwargs', '_server', '_sites') + + def __init__(self, *, handle_signals: bool=False, **kwargs: Any) -> None: + self._handle_signals = handle_signals + self._kwargs = kwargs + self._server = None # type: Optional[Server] + self._sites = [] # type: List[BaseSite] + + @property + def server(self) -> Optional[Server]: + return self._server + + @property + def addresses(self) -> List[str]: + ret = [] # type: List[str] + for site in self._sites: + server = site._server + if server is not None: + sockets = server.sockets + if sockets is not None: + for sock in sockets: + ret.append(sock.getsockname()) + return ret + + @property + def sites(self) -> Set[BaseSite]: + return set(self._sites) + + async def setup(self) -> None: + loop = asyncio.get_event_loop() + + if self._handle_signals: + try: + loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit) + loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit) + except NotImplementedError: # pragma: no cover + # add_signal_handler is not implemented on Windows + pass + + self._server = await self._make_server() + + @abstractmethod + async def shutdown(self) -> None: + pass # pragma: no cover + + async def cleanup(self) -> None: + loop = asyncio.get_event_loop() + + if self._server is None: + # no started yet, do nothing + return + + # The loop over sites is intentional, an exception on gather() + # leaves self._sites in unpredictable state. + # The loop guaranties that a site is either deleted on success or + # still present on failure + for site in list(self._sites): + await site.stop() + await self._cleanup_server() + self._server = None + if self._handle_signals: + try: + loop.remove_signal_handler(signal.SIGINT) + loop.remove_signal_handler(signal.SIGTERM) + except NotImplementedError: # pragma: no cover + # remove_signal_handler is not implemented on Windows + pass + + @abstractmethod + async def _make_server(self) -> Server: + pass # pragma: no cover + + @abstractmethod + async def _cleanup_server(self) -> None: + pass # pragma: no cover + + def _reg_site(self, site: BaseSite) -> None: + if site in self._sites: + raise RuntimeError("Site {} is already registered in runner {}" + .format(site, self)) + self._sites.append(site) + + def _check_site(self, site: BaseSite) -> None: + if site not in self._sites: + raise RuntimeError("Site {} is not registered in runner {}" + .format(site, self)) + + def _unreg_site(self, site: BaseSite) -> None: + if site not in self._sites: + raise RuntimeError("Site {} is not registered in runner {}" + .format(site, self)) + self._sites.remove(site) + + +class ServerRunner(BaseRunner): + """Low-level web server runner""" + + __slots__ = ('_web_server',) + + def __init__(self, web_server: Server, *, + handle_signals: bool=False, **kwargs: Any) -> None: + super().__init__(handle_signals=handle_signals, **kwargs) + self._web_server = web_server + + async def shutdown(self) -> None: + pass + + async def _make_server(self) -> Server: + return self._web_server + + async def _cleanup_server(self) -> None: + pass + + +class AppRunner(BaseRunner): + """Web Application runner""" + + __slots__ = ('_app',) + + def __init__(self, app: Application, *, + handle_signals: bool=False, **kwargs: Any) -> None: + super().__init__(handle_signals=handle_signals, **kwargs) + if not isinstance(app, Application): + raise TypeError("The first argument should be web.Application " + "instance, got {!r}".format(app)) + self._app = app + + @property + def app(self) -> Application: + return self._app + + async def shutdown(self) -> None: + await self._app.shutdown() + + async def _make_server(self) -> Server: + loop = asyncio.get_event_loop() + self._app._set_loop(loop) + self._app.on_startup.freeze() + await self._app.startup() + self._app.freeze() + + return self._app._make_handler(loop=loop, **self._kwargs) + + async def _cleanup_server(self) -> None: + await self._app.cleanup() diff --git a/venv/lib/python3.7/site-packages/aiohttp/web_server.py b/venv/lib/python3.7/site-packages/aiohttp/web_server.py new file mode 100644 index 0000000..ad746ed --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/web_server.py @@ -0,0 +1,57 @@ +"""Low level HTTP server.""" +import asyncio +from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa + +from .abc import AbstractStreamWriter +from .helpers import get_running_loop +from .http_parser import RawRequestMessage +from .streams import StreamReader +from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler +from .web_request import BaseRequest + +__all__ = ('Server',) + + +class Server: + + def __init__(self, + handler: _RequestHandler, + *, + request_factory: Optional[_RequestFactory]=None, + loop: Optional[asyncio.AbstractEventLoop]=None, + **kwargs: Any) -> None: + self._loop = get_running_loop(loop) + self._connections = {} # type: Dict[RequestHandler, asyncio.Transport] + self._kwargs = kwargs + self.requests_count = 0 + self.request_handler = handler + self.request_factory = request_factory or self._make_request + + @property + def connections(self) -> List[RequestHandler]: + return list(self._connections.keys()) + + def connection_made(self, handler: RequestHandler, + transport: asyncio.Transport) -> None: + self._connections[handler] = transport + + def connection_lost(self, handler: RequestHandler, + exc: Optional[BaseException]=None) -> None: + if handler in self._connections: + del self._connections[handler] + + def _make_request(self, message: RawRequestMessage, + payload: StreamReader, + protocol: RequestHandler, + writer: AbstractStreamWriter, + task: 'asyncio.Task[None]') -> BaseRequest: + return BaseRequest( + message, payload, protocol, writer, task, self._loop) + + async def shutdown(self, timeout: Optional[float]=None) -> None: + coros = [conn.shutdown(timeout) for conn in self._connections] + await asyncio.gather(*coros, loop=self._loop) + self._connections.clear() + + def __call__(self) -> RequestHandler: + return RequestHandler(self, loop=self._loop, **self._kwargs) diff --git a/venv/lib/python3.7/site-packages/aiohttp/web_urldispatcher.py b/venv/lib/python3.7/site-packages/aiohttp/web_urldispatcher.py new file mode 100644 index 0000000..182c98e --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/web_urldispatcher.py @@ -0,0 +1,1133 @@ +import abc +import asyncio +import base64 +import hashlib +import inspect +import keyword +import os +import re +import warnings +from contextlib import contextmanager +from functools import wraps +from pathlib import Path +from types import MappingProxyType +from typing import ( # noqa + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Container, + Dict, + Generator, + Iterable, + Iterator, + List, + Mapping, + Optional, + Set, + Sized, + Tuple, + Union, + cast, +) + +from yarl import URL + +from . import hdrs +from .abc import AbstractMatchInfo, AbstractRouter, AbstractView +from .helpers import DEBUG +from .http import HttpVersion11 +from .typedefs import PathLike +from .web_exceptions import ( + HTTPException, + HTTPExpectationFailed, + HTTPForbidden, + HTTPMethodNotAllowed, + HTTPNotFound, +) +from .web_fileresponse import FileResponse +from .web_request import Request +from .web_response import Response, StreamResponse +from .web_routedef import AbstractRouteDef + +__all__ = ('UrlDispatcher', 'UrlMappingMatchInfo', + 'AbstractResource', 'Resource', 'PlainResource', 'DynamicResource', + 'AbstractRoute', 'ResourceRoute', + 'StaticResource', 'View') + + +if TYPE_CHECKING: # pragma: no cover + from .web_app import Application # noqa + BaseDict = Dict[str, str] +else: + BaseDict = dict + +HTTP_METHOD_RE = re.compile(r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$") +ROUTE_RE = re.compile(r'(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})') +PATH_SEP = re.escape('/') + + +_WebHandler = Callable[[Request], Awaitable[StreamResponse]] +_ExpectHandler = Callable[[Request], Awaitable[None]] +_Resolve = Tuple[Optional[AbstractMatchInfo], Set[str]] + + +class AbstractResource(Sized, Iterable['AbstractRoute']): + + def __init__(self, *, name: Optional[str]=None) -> None: + self._name = name + + @property + def name(self) -> Optional[str]: + return self._name + + @property + @abc.abstractmethod + def canonical(self) -> str: + """Exposes the resource's canonical path. + + For example '/foo/bar/{name}' + + """ + + @abc.abstractmethod # pragma: no branch + def url_for(self, **kwargs: str) -> URL: + """Construct url for resource with additional params.""" + + @abc.abstractmethod # pragma: no branch + async def resolve(self, request: Request) -> _Resolve: + """Resolve resource + + Return (UrlMappingMatchInfo, allowed_methods) pair.""" + + @abc.abstractmethod + def add_prefix(self, prefix: str) -> None: + """Add a prefix to processed URLs. + + Required for subapplications support. + + """ + + @abc.abstractmethod + def get_info(self) -> Dict[str, Any]: + """Return a dict with additional info useful for introspection""" + + def freeze(self) -> None: + pass + + @abc.abstractmethod + def raw_match(self, path: str) -> bool: + """Perform a raw match against path""" + + +class AbstractRoute(abc.ABC): + + def __init__(self, method: str, + handler: Union[_WebHandler, AbstractView], *, + expect_handler: _ExpectHandler=None, + resource: AbstractResource=None) -> None: + + if expect_handler is None: + expect_handler = _default_expect_handler + + assert asyncio.iscoroutinefunction(expect_handler), \ + 'Coroutine is expected, got {!r}'.format(expect_handler) + + method = method.upper() + if not HTTP_METHOD_RE.match(method): + raise ValueError("{} is not allowed HTTP method".format(method)) + + assert callable(handler), handler + if asyncio.iscoroutinefunction(handler): + pass + elif inspect.isgeneratorfunction(handler): + warnings.warn("Bare generators are deprecated, " + "use @coroutine wrapper", DeprecationWarning) + elif (isinstance(handler, type) and + issubclass(handler, AbstractView)): + pass + else: + warnings.warn("Bare functions are deprecated, " + "use async ones", DeprecationWarning) + + @wraps(handler) + async def handler_wrapper(request: Request) -> StreamResponse: + result = old_handler(request) + if asyncio.iscoroutine(result): + result = await result + return result + old_handler = handler + handler = handler_wrapper + + self._method = method + self._handler = handler + self._expect_handler = expect_handler + self._resource = resource + + @property + def method(self) -> str: + return self._method + + @property + def handler(self) -> _WebHandler: + return self._handler + + @property + @abc.abstractmethod + def name(self) -> Optional[str]: + """Optional route's name, always equals to resource's name.""" + + @property + def resource(self) -> Optional[AbstractResource]: + return self._resource + + @abc.abstractmethod + def get_info(self) -> Dict[str, Any]: + """Return a dict with additional info useful for introspection""" + + @abc.abstractmethod # pragma: no branch + def url_for(self, *args: str, **kwargs: str) -> URL: + """Construct url for route with additional params.""" + + async def handle_expect_header(self, request: Request) -> None: + await self._expect_handler(request) + + +class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo): + + def __init__(self, match_dict: Dict[str, str], route: AbstractRoute): + super().__init__(match_dict) + self._route = route + self._apps = [] # type: List[Application] + self._current_app = None # type: Optional[Application] + self._frozen = False + + @property + def handler(self) -> _WebHandler: + return self._route.handler + + @property + def route(self) -> AbstractRoute: + return self._route + + @property + def expect_handler(self) -> _ExpectHandler: + return self._route.handle_expect_header + + @property + def http_exception(self) -> Optional[HTTPException]: + return None + + def get_info(self) -> Dict[str, str]: + return self._route.get_info() + + @property + def apps(self) -> Tuple['Application', ...]: + return tuple(self._apps) + + def add_app(self, app: 'Application') -> None: + if self._frozen: + raise RuntimeError("Cannot change apps stack after .freeze() call") + if self._current_app is None: + self._current_app = app + self._apps.insert(0, app) + + @property + def current_app(self) -> 'Application': + app = self._current_app + assert app is not None + return app + + @contextmanager + def set_current_app(self, + app: 'Application') -> Generator[None, None, None]: + if DEBUG: # pragma: no cover + if app not in self._apps: + raise RuntimeError( + "Expected one of the following apps {!r}, got {!r}" + .format(self._apps, app)) + prev = self._current_app + self._current_app = app + try: + yield + finally: + self._current_app = prev + + def freeze(self) -> None: + self._frozen = True + + def __repr__(self) -> str: + return "".format(super().__repr__(), self._route) + + +class MatchInfoError(UrlMappingMatchInfo): + + def __init__(self, http_exception: HTTPException) -> None: + self._exception = http_exception + super().__init__({}, SystemRoute(self._exception)) + + @property + def http_exception(self) -> HTTPException: + return self._exception + + def __repr__(self) -> str: + return "".format(self._exception.status, + self._exception.reason) + + +async def _default_expect_handler(request: Request) -> None: + """Default handler for Expect header. + + Just send "100 Continue" to client. + raise HTTPExpectationFailed if value of header is not "100-continue" + """ + expect = request.headers.get(hdrs.EXPECT) + if request.version == HttpVersion11: + if expect.lower() == "100-continue": + await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") + else: + raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) + + +class Resource(AbstractResource): + + def __init__(self, *, name: Optional[str]=None) -> None: + super().__init__(name=name) + self._routes = [] # type: List[ResourceRoute] + + def add_route(self, method: str, + handler: Union[AbstractView, _WebHandler], *, + expect_handler: Optional[_ExpectHandler]=None + ) -> 'ResourceRoute': + + for route_obj in self._routes: + if route_obj.method == method or route_obj.method == hdrs.METH_ANY: + raise RuntimeError("Added route will never be executed, " + "method {route.method} is already " + "registered".format(route=route_obj)) + + route_obj = ResourceRoute(method, handler, self, + expect_handler=expect_handler) + self.register_route(route_obj) + return route_obj + + def register_route(self, route: 'ResourceRoute') -> None: + assert isinstance(route, ResourceRoute), \ + 'Instance of Route class is required, got {!r}'.format(route) + self._routes.append(route) + + async def resolve(self, request: Request) -> _Resolve: + allowed_methods = set() # type: Set[str] + + match_dict = self._match(request.rel_url.raw_path) + if match_dict is None: + return None, allowed_methods + + for route_obj in self._routes: + route_method = route_obj.method + allowed_methods.add(route_method) + + if (route_method == request.method or + route_method == hdrs.METH_ANY): + return (UrlMappingMatchInfo(match_dict, route_obj), + allowed_methods) + else: + return None, allowed_methods + + @abc.abstractmethod + def _match(self, path: str) -> Optional[Dict[str, str]]: + pass # pragma: no cover + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator[AbstractRoute]: + return iter(self._routes) + + # TODO: implement all abstract methods + + +class PlainResource(Resource): + + def __init__(self, path: str, *, name: Optional[str]=None) -> None: + super().__init__(name=name) + assert not path or path.startswith('/') + self._path = path + + @property + def canonical(self) -> str: + return self._path + + def freeze(self) -> None: + if not self._path: + self._path = '/' + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith('/') + assert not prefix.endswith('/') + assert len(prefix) > 1 + self._path = prefix + self._path + + def _match(self, path: str) -> Optional[Dict[str, str]]: + # string comparison is about 10 times faster than regexp matching + if self._path == path: + return {} + else: + return None + + def raw_match(self, path: str) -> bool: + return self._path == path + + def get_info(self) -> Dict[str, Any]: + return {'path': self._path} + + def url_for(self) -> URL: # type: ignore + return URL.build(path=self._path, encoded=True) + + def __repr__(self) -> str: + name = "'" + self.name + "' " if self.name is not None else "" + return "".format(name=name, + path=self._path) + + +class DynamicResource(Resource): + + DYN = re.compile(r'\{(?P[_a-zA-Z][_a-zA-Z0-9]*)\}') + DYN_WITH_RE = re.compile( + r'\{(?P[_a-zA-Z][_a-zA-Z0-9]*):(?P.+)\}') + GOOD = r'[^{}/]+' + + def __init__(self, path: str, *, name: Optional[str]=None) -> None: + super().__init__(name=name) + pattern = '' + formatter = '' + for part in ROUTE_RE.split(path): + match = self.DYN.fullmatch(part) + if match: + pattern += '(?P<{}>{})'.format(match.group('var'), self.GOOD) + formatter += '{' + match.group('var') + '}' + continue + + match = self.DYN_WITH_RE.fullmatch(part) + if match: + pattern += '(?P<{var}>{re})'.format(**match.groupdict()) + formatter += '{' + match.group('var') + '}' + continue + + if '{' in part or '}' in part: + raise ValueError("Invalid path '{}'['{}']".format(path, part)) + + path = URL.build(path=part).raw_path + formatter += path + pattern += re.escape(path) + + try: + compiled = re.compile(pattern) + except re.error as exc: + raise ValueError( + "Bad pattern '{}': {}".format(pattern, exc)) from None + assert compiled.pattern.startswith(PATH_SEP) + assert formatter.startswith('/') + self._pattern = compiled + self._formatter = formatter + + @property + def canonical(self) -> str: + return self._formatter + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith('/') + assert not prefix.endswith('/') + assert len(prefix) > 1 + self._pattern = re.compile(re.escape(prefix)+self._pattern.pattern) + self._formatter = prefix + self._formatter + + def _match(self, path: str) -> Optional[Dict[str, str]]: + match = self._pattern.fullmatch(path) + if match is None: + return None + else: + return {key: URL.build(path=value, encoded=True).path + for key, value in match.groupdict().items()} + + def raw_match(self, path: str) -> bool: + return self._formatter == path + + def get_info(self) -> Dict[str, Any]: + return {'formatter': self._formatter, + 'pattern': self._pattern} + + def url_for(self, **parts: str) -> URL: + url = self._formatter.format_map({k: URL.build(path=v).raw_path + for k, v in parts.items()}) + return URL.build(path=url) + + def __repr__(self) -> str: + name = "'" + self.name + "' " if self.name is not None else "" + return ("" + .format(name=name, formatter=self._formatter)) + + +class PrefixResource(AbstractResource): + + def __init__(self, prefix: str, *, name: Optional[str]=None) -> None: + assert not prefix or prefix.startswith('/'), prefix + assert prefix in ('', '/') or not prefix.endswith('/'), prefix + super().__init__(name=name) + self._prefix = URL.build(path=prefix).raw_path + + @property + def canonical(self) -> str: + return self._prefix + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith('/') + assert not prefix.endswith('/') + assert len(prefix) > 1 + self._prefix = prefix + self._prefix + + def raw_match(self, prefix: str) -> bool: + return False + + # TODO: impl missing abstract methods + + +class StaticResource(PrefixResource): + VERSION_KEY = 'v' + + def __init__(self, prefix: str, directory: PathLike, + *, name: Optional[str]=None, + expect_handler: Optional[_ExpectHandler]=None, + chunk_size: int=256 * 1024, + show_index: bool=False, follow_symlinks: bool=False, + append_version: bool=False)-> None: + super().__init__(prefix, name=name) + try: + directory = Path(directory) + if str(directory).startswith('~'): + directory = Path(os.path.expanduser(str(directory))) + directory = directory.resolve() + if not directory.is_dir(): + raise ValueError('Not a directory') + except (FileNotFoundError, ValueError) as error: + raise ValueError( + "No directory exists at '{}'".format(directory)) from error + self._directory = directory + self._show_index = show_index + self._chunk_size = chunk_size + self._follow_symlinks = follow_symlinks + self._expect_handler = expect_handler + self._append_version = append_version + + self._routes = {'GET': ResourceRoute('GET', self._handle, self, + expect_handler=expect_handler), + + 'HEAD': ResourceRoute('HEAD', self._handle, self, + expect_handler=expect_handler)} + + def url_for(self, *, filename: Union[str, Path], # type: ignore + append_version: Optional[bool]=None) -> URL: + if append_version is None: + append_version = self._append_version + if isinstance(filename, Path): + filename = str(filename) + while filename.startswith('/'): + filename = filename[1:] + filename = '/' + filename + + # filename is not encoded + url = URL.build(path=self._prefix + filename) + + if append_version: + try: + if filename.startswith('/'): + filename = filename[1:] + filepath = self._directory.joinpath(filename).resolve() + if not self._follow_symlinks: + filepath.relative_to(self._directory) + except (ValueError, FileNotFoundError): + # ValueError for case when path point to symlink + # with follow_symlinks is False + return url # relatively safe + if filepath.is_file(): + # TODO cache file content + # with file watcher for cache invalidation + with open(str(filepath), mode='rb') as f: + file_bytes = f.read() + h = self._get_file_hash(file_bytes) + url = url.with_query({self.VERSION_KEY: h}) + return url + return url + + @staticmethod + def _get_file_hash(byte_array: bytes) -> str: + m = hashlib.sha256() # todo sha256 can be configurable param + m.update(byte_array) + b64 = base64.urlsafe_b64encode(m.digest()) + return b64.decode('ascii') + + def get_info(self) -> Dict[str, Any]: + return {'directory': self._directory, + 'prefix': self._prefix} + + def set_options_route(self, handler: _WebHandler) -> None: + if 'OPTIONS' in self._routes: + raise RuntimeError('OPTIONS route was set already') + self._routes['OPTIONS'] = ResourceRoute( + 'OPTIONS', handler, self, + expect_handler=self._expect_handler) + + async def resolve(self, request: Request) -> _Resolve: + path = request.rel_url.raw_path + method = request.method + allowed_methods = set(self._routes) + if not path.startswith(self._prefix): + return None, set() + + if method not in allowed_methods: + return None, allowed_methods + + match_dict = {'filename': URL.build(path=path[len(self._prefix)+1:], + encoded=True).path} + return (UrlMappingMatchInfo(match_dict, self._routes[method]), + allowed_methods) + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator[AbstractRoute]: + return iter(self._routes.values()) + + async def _handle(self, request: Request) -> StreamResponse: + rel_url = request.match_info['filename'] + try: + filename = Path(rel_url) + if filename.anchor: + # rel_url is an absolute name like + # /static/\\machine_name\c$ or /static/D:\path + # where the static dir is totally different + raise HTTPForbidden() + filepath = self._directory.joinpath(filename).resolve() + if not self._follow_symlinks: + filepath.relative_to(self._directory) + except (ValueError, FileNotFoundError) as error: + # relatively safe + raise HTTPNotFound() from error + except HTTPForbidden: + raise + except Exception as error: + # perm error or other kind! + request.app.logger.exception(error) + raise HTTPNotFound() from error + + # on opening a dir, load its contents if allowed + if filepath.is_dir(): + if self._show_index: + try: + return Response(text=self._directory_as_html(filepath), + content_type="text/html") + except PermissionError: + raise HTTPForbidden() + else: + raise HTTPForbidden() + elif filepath.is_file(): + return FileResponse(filepath, chunk_size=self._chunk_size) + else: + raise HTTPNotFound + + def _directory_as_html(self, filepath: Path) -> str: + # returns directory's index as html + + # sanity check + assert filepath.is_dir() + + relative_path_to_dir = filepath.relative_to(self._directory).as_posix() + index_of = "Index of /{}".format(relative_path_to_dir) + h1 = "

{}

".format(index_of) + + index_list = [] + dir_index = filepath.iterdir() + for _file in sorted(dir_index): + # show file url as relative to static path + rel_path = _file.relative_to(self._directory).as_posix() + file_url = self._prefix + '/' + rel_path + + # if file is a directory, add '/' to the end of the name + if _file.is_dir(): + file_name = "{}/".format(_file.name) + else: + file_name = _file.name + + index_list.append( + '
  • {name}
  • '.format(url=file_url, + name=file_name) + ) + ul = "
      \n{}\n
    ".format('\n'.join(index_list)) + body = "\n{}\n{}\n".format(h1, ul) + + head_str = "\n{}\n".format(index_of) + html = "\n{}\n{}\n".format(head_str, body) + + return html + + def __repr__(self) -> str: + name = "'" + self.name + "'" if self.name is not None else "" + return " {directory!r}>".format( + name=name, path=self._prefix, directory=self._directory) + + +class PrefixedSubAppResource(PrefixResource): + + def __init__(self, prefix: str, app: 'Application') -> None: + super().__init__(prefix) + self._app = app + for resource in app.router.resources(): + resource.add_prefix(prefix) + + def add_prefix(self, prefix: str) -> None: + super().add_prefix(prefix) + for resource in self._app.router.resources(): + resource.add_prefix(prefix) + + def url_for(self, *args: str, **kwargs: str) -> URL: + raise RuntimeError(".url_for() is not supported " + "by sub-application root") + + def get_info(self) -> Dict[str, Any]: + return {'app': self._app, + 'prefix': self._prefix} + + async def resolve(self, request: Request) -> _Resolve: + if not request.url.raw_path.startswith(self._prefix): + return None, set() + match_info = await self._app.router.resolve(request) + match_info.add_app(self._app) + if isinstance(match_info.http_exception, HTTPMethodNotAllowed): + methods = match_info.http_exception.allowed_methods + else: + methods = set() + return match_info, methods + + def __len__(self) -> int: + return len(self._app.router.routes()) + + def __iter__(self) -> Iterator[AbstractRoute]: + return iter(self._app.router.routes()) + + def __repr__(self) -> str: + return " {app!r}>".format( + prefix=self._prefix, app=self._app) + + +class AbstractRuleMatching(abc.ABC): + @abc.abstractmethod # pragma: no branch + async def match(self, request: Request) -> bool: + """Return bool if the request satisfies the criteria""" + + @abc.abstractmethod # pragma: no branch + def get_info(self) -> Dict[str, Any]: + """Return a dict with additional info useful for introspection""" + + @property + @abc.abstractmethod # pragma: no branch + def canonical(self) -> str: + """Return a str""" + + +class Domain(AbstractRuleMatching): + re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(? None: + super().__init__() + self._domain = self.validation(domain) + + @property + def canonical(self) -> str: + return self._domain + + def validation(self, domain: str) -> str: + if not isinstance(domain, str): + raise TypeError("Domain must be str") + domain = domain.rstrip('.').lower() + if not domain: + raise ValueError("Domain cannot be empty") + elif '://' in domain: + raise ValueError("Scheme not supported") + url = URL('http://' + domain) + if not all( + self.re_part.fullmatch(x) + for x in url.raw_host.split(".")): # type: ignore + raise ValueError("Domain not valid") + if url.port == 80: + return url.raw_host # type: ignore + return '{}:{}'.format(url.raw_host, url.port) + + async def match(self, request: Request) -> bool: + host = request.headers.get(hdrs.HOST) + return host and self.match_domain(host) + + def match_domain(self, host: str) -> bool: + return host.lower() == self._domain + + def get_info(self) -> Dict[str, Any]: + return {'domain': self._domain} + + +class MaskDomain(Domain): + re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(? None: + super().__init__(domain) + mask = self._domain.replace('.', r'\.').replace('*', '.*') + self._mask = re.compile(mask) + + @property + def canonical(self) -> str: + return self._mask.pattern + + def match_domain(self, host: str) -> bool: + return self._mask.fullmatch(host) is not None + + +class MatchedSubAppResource(PrefixedSubAppResource): + + def __init__(self, rule: AbstractRuleMatching, app: 'Application') -> None: + AbstractResource.__init__(self) + self._prefix = '' + self._app = app + self._rule = rule + + @property + def canonical(self) -> str: + return self._rule.canonical + + def get_info(self) -> Dict[str, Any]: + return {'app': self._app, + 'rule': self._rule} + + async def resolve(self, request: Request) -> _Resolve: + if not await self._rule.match(request): + return None, set() + match_info = await self._app.router.resolve(request) + match_info.add_app(self._app) + if isinstance(match_info.http_exception, HTTPMethodNotAllowed): + methods = match_info.http_exception.allowed_methods + else: + methods = set() + return match_info, methods + + def __repr__(self) -> str: + return " {app!r}>" \ + "".format(app=self._app) + + +class ResourceRoute(AbstractRoute): + """A route with resource""" + + def __init__(self, method: str, + handler: Union[_WebHandler, AbstractView], + resource: AbstractResource, *, + expect_handler: Optional[_ExpectHandler]=None) -> None: + super().__init__(method, handler, expect_handler=expect_handler, + resource=resource) + + def __repr__(self) -> str: + return " {handler!r}".format( + method=self.method, resource=self._resource, + handler=self.handler) + + @property + def name(self) -> Optional[str]: + return self._resource.name # type: ignore + + def url_for(self, *args: str, **kwargs: str) -> URL: + """Construct url for route with additional params.""" + return self._resource.url_for(*args, **kwargs) # type: ignore + + def get_info(self) -> Dict[str, Any]: + return self._resource.get_info() # type: ignore + + +class SystemRoute(AbstractRoute): + + def __init__(self, http_exception: HTTPException) -> None: + super().__init__(hdrs.METH_ANY, self._handle) + self._http_exception = http_exception + + def url_for(self, *args: str, **kwargs: str) -> URL: + raise RuntimeError(".url_for() is not allowed for SystemRoute") + + @property + def name(self) -> Optional[str]: + return None + + def get_info(self) -> Dict[str, Any]: + return {'http_exception': self._http_exception} + + async def _handle(self, request: Request) -> StreamResponse: + raise self._http_exception + + @property + def status(self) -> int: + return self._http_exception.status + + @property + def reason(self) -> str: + return self._http_exception.reason + + def __repr__(self) -> str: + return "".format(self=self) + + +class View(AbstractView): + + async def _iter(self) -> StreamResponse: + if self.request.method not in hdrs.METH_ALL: + self._raise_allowed_methods() + method = getattr(self, self.request.method.lower(), None) + if method is None: + self._raise_allowed_methods() + resp = await method() + return resp + + def __await__(self) -> Generator[Any, None, StreamResponse]: + return self._iter().__await__() + + def _raise_allowed_methods(self) -> None: + allowed_methods = { + m for m in hdrs.METH_ALL if hasattr(self, m.lower())} + raise HTTPMethodNotAllowed(self.request.method, allowed_methods) + + +class ResourcesView(Sized, + Iterable[AbstractResource], + Container[AbstractResource]): + + def __init__(self, resources: List[AbstractResource]) -> None: + self._resources = resources + + def __len__(self) -> int: + return len(self._resources) + + def __iter__(self) -> Iterator[AbstractResource]: + yield from self._resources + + def __contains__(self, resource: object) -> bool: + return resource in self._resources + + +class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]): + + def __init__(self, resources: List[AbstractResource]): + self._routes = [] # type: List[AbstractRoute] + for resource in resources: + for route in resource: + self._routes.append(route) + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator[AbstractRoute]: + yield from self._routes + + def __contains__(self, route: object) -> bool: + return route in self._routes + + +class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]): + + NAME_SPLIT_RE = re.compile(r'[.:-]') + + def __init__(self) -> None: + super().__init__() + self._resources = [] # type: List[AbstractResource] + self._named_resources = {} # type: Dict[str, AbstractResource] + + async def resolve(self, request: Request) -> AbstractMatchInfo: + method = request.method + allowed_methods = set() # type: Set[str] + + for resource in self._resources: + match_dict, allowed = await resource.resolve(request) + if match_dict is not None: + return match_dict + else: + allowed_methods |= allowed + else: + if allowed_methods: + return MatchInfoError(HTTPMethodNotAllowed(method, + allowed_methods)) + else: + return MatchInfoError(HTTPNotFound()) + + def __iter__(self) -> Iterator[str]: + return iter(self._named_resources) + + def __len__(self) -> int: + return len(self._named_resources) + + def __contains__(self, resource: object) -> bool: + return resource in self._named_resources + + def __getitem__(self, name: str) -> AbstractResource: + return self._named_resources[name] + + def resources(self) -> ResourcesView: + return ResourcesView(self._resources) + + def routes(self) -> RoutesView: + return RoutesView(self._resources) + + def named_resources(self) -> Mapping[str, AbstractResource]: + return MappingProxyType(self._named_resources) + + def register_resource(self, resource: AbstractResource) -> None: + assert isinstance(resource, AbstractResource), \ + 'Instance of AbstractResource class is required, got {!r}'.format( + resource) + if self.frozen: + raise RuntimeError( + "Cannot register a resource into frozen router.") + + name = resource.name + + if name is not None: + parts = self.NAME_SPLIT_RE.split(name) + for part in parts: + if not part.isidentifier() or keyword.iskeyword(part): + raise ValueError('Incorrect route name {!r}, ' + 'the name should be a sequence of ' + 'python identifiers separated ' + 'by dash, dot or column'.format(name)) + if name in self._named_resources: + raise ValueError('Duplicate {!r}, ' + 'already handled by {!r}' + .format(name, self._named_resources[name])) + self._named_resources[name] = resource + self._resources.append(resource) + + def add_resource(self, path: str, *, + name: Optional[str]=None) -> Resource: + if path and not path.startswith('/'): + raise ValueError("path should be started with / or be empty") + # Reuse last added resource if path and name are the same + if self._resources: + resource = self._resources[-1] + if resource.name == name and resource.raw_match(path): + return cast(Resource, resource) + if not ('{' in path or '}' in path or ROUTE_RE.search(path)): + url = URL.build(path=path) + resource = PlainResource(url.raw_path, name=name) + self.register_resource(resource) + return resource + resource = DynamicResource(path, name=name) + self.register_resource(resource) + return resource + + def add_route(self, method: str, path: str, + handler: Union[_WebHandler, AbstractView], + *, name: Optional[str]=None, + expect_handler: Optional[_ExpectHandler]=None + ) -> AbstractRoute: + resource = self.add_resource(path, name=name) + return resource.add_route(method, handler, + expect_handler=expect_handler) + + def add_static(self, prefix: str, path: PathLike, *, + name: Optional[str]=None, + expect_handler: Optional[_ExpectHandler]=None, + chunk_size: int=256 * 1024, + show_index: bool=False, follow_symlinks: bool=False, + append_version: bool=False) -> AbstractResource: + """Add static files view. + + prefix - url prefix + path - folder with files + + """ + assert prefix.startswith('/') + if prefix.endswith('/'): + prefix = prefix[:-1] + resource = StaticResource(prefix, path, + name=name, + expect_handler=expect_handler, + chunk_size=chunk_size, + show_index=show_index, + follow_symlinks=follow_symlinks, + append_version=append_version) + self.register_resource(resource) + return resource + + def add_head(self, path: str, handler: _WebHandler, + **kwargs: Any) -> AbstractRoute: + """ + Shortcut for add_route with method HEAD + """ + return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs) + + def add_options(self, path: str, handler: _WebHandler, + **kwargs: Any) -> AbstractRoute: + """ + Shortcut for add_route with method OPTIONS + """ + return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs) + + def add_get(self, path: str, handler: _WebHandler, *, + name: Optional[str]=None, allow_head: bool=True, + **kwargs: Any) -> AbstractRoute: + """ + Shortcut for add_route with method GET, if allow_head is true another + route is added allowing head requests to the same endpoint + """ + resource = self.add_resource(path, name=name) + if allow_head: + resource.add_route(hdrs.METH_HEAD, handler, **kwargs) + return resource.add_route(hdrs.METH_GET, handler, **kwargs) + + def add_post(self, path: str, handler: _WebHandler, + **kwargs: Any) -> AbstractRoute: + """ + Shortcut for add_route with method POST + """ + return self.add_route(hdrs.METH_POST, path, handler, **kwargs) + + def add_put(self, path: str, handler: _WebHandler, + **kwargs: Any) -> AbstractRoute: + """ + Shortcut for add_route with method PUT + """ + return self.add_route(hdrs.METH_PUT, path, handler, **kwargs) + + def add_patch(self, path: str, handler: _WebHandler, + **kwargs: Any) -> AbstractRoute: + """ + Shortcut for add_route with method PATCH + """ + return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs) + + def add_delete(self, path: str, handler: _WebHandler, + **kwargs: Any) -> AbstractRoute: + """ + Shortcut for add_route with method DELETE + """ + return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs) + + def add_view(self, path: str, handler: AbstractView, + **kwargs: Any) -> AbstractRoute: + """ + Shortcut for add_route with ANY methods for a class-based view + """ + return self.add_route(hdrs.METH_ANY, path, handler, **kwargs) + + def freeze(self) -> None: + super().freeze() + for resource in self._resources: + resource.freeze() + + def add_routes(self, routes: Iterable[AbstractRouteDef]) -> None: + """Append routes to route table. + + Parameter should be a sequence of RouteDef objects. + """ + for route_def in routes: + route_def.register(self) diff --git a/venv/lib/python3.7/site-packages/aiohttp/web_ws.py b/venv/lib/python3.7/site-packages/aiohttp/web_ws.py new file mode 100644 index 0000000..faa05ab --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/web_ws.py @@ -0,0 +1,458 @@ +import asyncio +import base64 +import binascii +import hashlib +import json +from typing import Any, Iterable, Optional, Tuple + +import async_timeout +import attr +from multidict import CIMultiDict + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import call_later, set_result +from .http import ( + WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE, + WS_KEY, + WebSocketError, + WebSocketReader, + WebSocketWriter, + WSMessage, + WSMsgType, + ws_ext_gen, + ws_ext_parse, +) +from .log import ws_logger +from .streams import EofStream, FlowControlDataQueue +from .typedefs import JSONDecoder, JSONEncoder +from .web_exceptions import HTTPBadRequest, HTTPException, HTTPMethodNotAllowed +from .web_request import BaseRequest +from .web_response import StreamResponse + +__all__ = ('WebSocketResponse', 'WebSocketReady', 'WSMsgType',) + +THRESHOLD_CONNLOST_ACCESS = 5 + + +@attr.s(frozen=True, slots=True) +class WebSocketReady: + ok = attr.ib(type=bool) + protocol = attr.ib(type=Optional[str]) + + def __bool__(self) -> bool: + return self.ok + + +class WebSocketResponse(StreamResponse): + + def __init__(self, *, + timeout: float=10.0, receive_timeout: Optional[float]=None, + autoclose: bool=True, autoping: bool=True, + heartbeat: Optional[float]=None, + protocols: Iterable[str]=(), + compress: bool=True, max_msg_size: int=4*1024*1024) -> None: + super().__init__(status=101) + self._protocols = protocols + self._ws_protocol = None # type: Optional[str] + self._writer = None # type: Optional[WebSocketWriter] + self._reader = None # type: Optional[FlowControlDataQueue[WSMessage]] + self._closed = False + self._closing = False + self._conn_lost = 0 + self._close_code = None # type: Optional[int] + self._loop = None # type: Optional[asyncio.AbstractEventLoop] + self._waiting = None # type: Optional[asyncio.Future[bool]] + self._exception = None # type: Optional[BaseException] + self._timeout = timeout + self._receive_timeout = receive_timeout + self._autoclose = autoclose + self._autoping = autoping + self._heartbeat = heartbeat + self._heartbeat_cb = None + if heartbeat is not None: + self._pong_heartbeat = heartbeat / 2.0 + self._pong_response_cb = None + self._compress = compress + self._max_msg_size = max_msg_size + + def _cancel_heartbeat(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None + + if self._heartbeat_cb is not None: + self._heartbeat_cb.cancel() + self._heartbeat_cb = None + + def _reset_heartbeat(self) -> None: + self._cancel_heartbeat() + + if self._heartbeat is not None: + self._heartbeat_cb = call_later( + self._send_heartbeat, self._heartbeat, self._loop) + + def _send_heartbeat(self) -> None: + if self._heartbeat is not None and not self._closed: + # fire-and-forget a task is not perfect but maybe ok for + # sending ping. Otherwise we need a long-living heartbeat + # task in the class. + self._loop.create_task(self._writer.ping()) # type: ignore + + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = call_later( + self._pong_not_received, self._pong_heartbeat, self._loop) + + def _pong_not_received(self) -> None: + if self._req is not None and self._req.transport is not None: + self._closed = True + self._close_code = 1006 + self._exception = asyncio.TimeoutError() + self._req.transport.close() + + async def prepare(self, request: BaseRequest) -> AbstractStreamWriter: + # make pre-check to don't hide it by do_handshake() exceptions + if self._payload_writer is not None: + return self._payload_writer + + protocol, writer = self._pre_start(request) + payload_writer = await super().prepare(request) + assert payload_writer is not None + self._post_start(request, protocol, writer) + await payload_writer.drain() + return payload_writer + + def _handshake(self, request: BaseRequest) -> Tuple['CIMultiDict[str]', + str, + bool, + bool]: + headers = request.headers + if request.method != hdrs.METH_GET: + raise HTTPMethodNotAllowed(request.method, [hdrs.METH_GET]) + if 'websocket' != headers.get(hdrs.UPGRADE, '').lower().strip(): + raise HTTPBadRequest( + text=('No WebSocket UPGRADE hdr: {}\n Can ' + '"Upgrade" only to "WebSocket".') + .format(headers.get(hdrs.UPGRADE))) + + if 'upgrade' not in headers.get(hdrs.CONNECTION, '').lower(): + raise HTTPBadRequest( + text='No CONNECTION upgrade hdr: {}'.format( + headers.get(hdrs.CONNECTION))) + + # find common sub-protocol between client and server + protocol = None + if hdrs.SEC_WEBSOCKET_PROTOCOL in headers: + req_protocols = [str(proto.strip()) for proto in + headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(',')] + + for proto in req_protocols: + if proto in self._protocols: + protocol = proto + break + else: + # No overlap found: Return no protocol as per spec + ws_logger.warning( + 'Client protocols %r don’t overlap server-known ones %r', + req_protocols, self._protocols) + + # check supported version + version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, '') + if version not in ('13', '8', '7'): + raise HTTPBadRequest( + text='Unsupported version: {}'.format(version)) + + # check client handshake for validity + key = headers.get(hdrs.SEC_WEBSOCKET_KEY) + try: + if not key or len(base64.b64decode(key)) != 16: + raise HTTPBadRequest( + text='Handshake error: {!r}'.format(key)) + except binascii.Error: + raise HTTPBadRequest( + text='Handshake error: {!r}'.format(key)) from None + + accept_val = base64.b64encode( + hashlib.sha1(key.encode() + WS_KEY).digest()).decode() + response_headers = CIMultiDict( # type: ignore + {hdrs.UPGRADE: 'websocket', + hdrs.CONNECTION: 'upgrade', + hdrs.TRANSFER_ENCODING: 'chunked', + hdrs.SEC_WEBSOCKET_ACCEPT: accept_val}) + + notakeover = False + compress = 0 + if self._compress: + extensions = headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) + # Server side always get return with no exception. + # If something happened, just drop compress extension + compress, notakeover = ws_ext_parse(extensions, isserver=True) + if compress: + enabledext = ws_ext_gen(compress=compress, isserver=True, + server_notakeover=notakeover) + response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext + + if protocol: + response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol + return (response_headers, # type: ignore + protocol, + compress, + notakeover) + + def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]: + self._loop = request._loop + + headers, protocol, compress, notakeover = self._handshake( + request) + + self._reset_heartbeat() + + self.set_status(101) + self.headers.update(headers) + self.force_close() + self._compress = compress + transport = request._protocol.transport + assert transport is not None + writer = WebSocketWriter(request._protocol, + transport, + compress=compress, + notakeover=notakeover) + + return protocol, writer + + def _post_start(self, request: BaseRequest, + protocol: str, writer: WebSocketWriter) -> None: + self._ws_protocol = protocol + self._writer = writer + loop = self._loop + assert loop is not None + self._reader = FlowControlDataQueue( + request._protocol, limit=2 ** 16, loop=loop) + request.protocol.set_parser(WebSocketReader( + self._reader, self._max_msg_size, compress=self._compress)) + # disable HTTP keepalive for WebSocket + request.protocol.keep_alive(False) + + def can_prepare(self, request: BaseRequest) -> WebSocketReady: + if self._writer is not None: + raise RuntimeError('Already started') + try: + _, protocol, _, _ = self._handshake(request) + except HTTPException: + return WebSocketReady(False, None) + else: + return WebSocketReady(True, protocol) + + @property + def closed(self) -> bool: + return self._closed + + @property + def close_code(self) -> Optional[int]: + return self._close_code + + @property + def ws_protocol(self) -> Optional[str]: + return self._ws_protocol + + @property + def compress(self) -> bool: + return self._compress + + def exception(self) -> Optional[BaseException]: + return self._exception + + async def ping(self, message: bytes=b'') -> None: + if self._writer is None: + raise RuntimeError('Call .prepare() first') + await self._writer.ping(message) + + async def pong(self, message: bytes=b'') -> None: + # unsolicited pong + if self._writer is None: + raise RuntimeError('Call .prepare() first') + await self._writer.pong(message) + + async def send_str(self, data: str, compress: Optional[bool]=None) -> None: + if self._writer is None: + raise RuntimeError('Call .prepare() first') + if not isinstance(data, str): + raise TypeError('data argument must be str (%r)' % type(data)) + await self._writer.send(data, binary=False, compress=compress) + + async def send_bytes(self, data: bytes, + compress: Optional[bool]=None) -> None: + if self._writer is None: + raise RuntimeError('Call .prepare() first') + if not isinstance(data, (bytes, bytearray, memoryview)): + raise TypeError('data argument must be byte-ish (%r)' % + type(data)) + await self._writer.send(data, binary=True, compress=compress) + + async def send_json(self, data: Any, compress: Optional[bool]=None, *, + dumps: JSONEncoder=json.dumps) -> None: + await self.send_str(dumps(data), compress=compress) + + async def write_eof(self) -> None: # type: ignore + if self._eof_sent: + return + if self._payload_writer is None: + raise RuntimeError("Response has not been started") + + await self.close() + self._eof_sent = True + + async def close(self, *, code: int=1000, message: bytes=b'') -> bool: + if self._writer is None: + raise RuntimeError('Call .prepare() first') + + self._cancel_heartbeat() + reader = self._reader + assert reader is not None + + # we need to break `receive()` cycle first, + # `close()` may be called from different task + if self._waiting is not None and not self._closed: + reader.feed_data(WS_CLOSING_MESSAGE, 0) + await self._waiting + + if not self._closed: + self._closed = True + try: + await self._writer.close(code, message) + writer = self._payload_writer + assert writer is not None + await writer.drain() + except (asyncio.CancelledError, asyncio.TimeoutError): + self._close_code = 1006 + raise + except Exception as exc: + self._close_code = 1006 + self._exception = exc + return True + + if self._closing: + return True + + reader = self._reader + assert reader is not None + try: + with async_timeout.timeout(self._timeout, loop=self._loop): + msg = await reader.read() + except asyncio.CancelledError: + self._close_code = 1006 + raise + except Exception as exc: + self._close_code = 1006 + self._exception = exc + return True + + if msg.type == WSMsgType.CLOSE: + self._close_code = msg.data + return True + + self._close_code = 1006 + self._exception = asyncio.TimeoutError() + return True + else: + return False + + async def receive(self, timeout: Optional[float]=None) -> WSMessage: + if self._reader is None: + raise RuntimeError('Call .prepare() first') + + loop = self._loop + assert loop is not None + while True: + if self._waiting is not None: + raise RuntimeError( + 'Concurrent call to receive() is not allowed') + + if self._closed: + self._conn_lost += 1 + if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS: + raise RuntimeError('WebSocket connection is closed.') + return WS_CLOSED_MESSAGE + elif self._closing: + return WS_CLOSING_MESSAGE + + try: + self._waiting = loop.create_future() + try: + with async_timeout.timeout( + timeout or self._receive_timeout, loop=self._loop): + msg = await self._reader.read() + self._reset_heartbeat() + finally: + waiter = self._waiting + set_result(waiter, True) + self._waiting = None + except (asyncio.CancelledError, asyncio.TimeoutError): + self._close_code = 1006 + raise + except EofStream: + self._close_code = 1000 + await self.close() + return WSMessage(WSMsgType.CLOSED, None, None) + except WebSocketError as exc: + self._close_code = exc.code + await self.close(code=exc.code) + return WSMessage(WSMsgType.ERROR, exc, None) + except Exception as exc: + self._exception = exc + self._closing = True + self._close_code = 1006 + await self.close() + return WSMessage(WSMsgType.ERROR, exc, None) + + if msg.type == WSMsgType.CLOSE: + self._closing = True + self._close_code = msg.data + if not self._closed and self._autoclose: + await self.close() + elif msg.type == WSMsgType.CLOSING: + self._closing = True + elif msg.type == WSMsgType.PING and self._autoping: + await self.pong(msg.data) + continue + elif msg.type == WSMsgType.PONG and self._autoping: + continue + + return msg + + async def receive_str(self, *, timeout: Optional[float]=None) -> str: + msg = await self.receive(timeout) + if msg.type != WSMsgType.TEXT: + raise TypeError( + "Received message {}:{!r} is not WSMsgType.TEXT".format( + msg.type, msg.data)) + return msg.data + + async def receive_bytes(self, *, timeout: Optional[float]=None) -> bytes: + msg = await self.receive(timeout) + if msg.type != WSMsgType.BINARY: + raise TypeError( + "Received message {}:{!r} is not bytes".format(msg.type, + msg.data)) + return msg.data + + async def receive_json(self, *, loads: JSONDecoder=json.loads, + timeout: Optional[float]=None) -> Any: + data = await self.receive_str(timeout=timeout) + return loads(data) + + async def write(self, data: bytes) -> None: + raise RuntimeError("Cannot call .write() for websocket") + + def __aiter__(self) -> 'WebSocketResponse': + return self + + async def __anext__(self) -> WSMessage: + msg = await self.receive() + if msg.type in (WSMsgType.CLOSE, + WSMsgType.CLOSING, + WSMsgType.CLOSED): + raise StopAsyncIteration # NOQA + return msg diff --git a/venv/lib/python3.7/site-packages/aiohttp/worker.py b/venv/lib/python3.7/site-packages/aiohttp/worker.py new file mode 100644 index 0000000..73ba6e3 --- /dev/null +++ b/venv/lib/python3.7/site-packages/aiohttp/worker.py @@ -0,0 +1,242 @@ +"""Async gunicorn worker for aiohttp.web""" + +import asyncio +import os +import re +import signal +import sys +from types import FrameType +from typing import Any, Awaitable, Callable, Optional, Union # noqa + +from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat +from gunicorn.workers import base + +from aiohttp import web + +from .helpers import set_result +from .web_app import Application +from .web_log import AccessLogger + +try: + import ssl + SSLContext = ssl.SSLContext # noqa +except ImportError: # pragma: no cover + ssl = None # type: ignore + SSLContext = object # type: ignore + + +__all__ = ('GunicornWebWorker', + 'GunicornUVLoopWebWorker', + 'GunicornTokioWebWorker') + + +class GunicornWebWorker(base.Worker): + + DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT + DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default + + def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover + super().__init__(*args, **kw) + + self._task = None # type: Optional[asyncio.Task[None]] + self.exit_code = 0 + self._notify_waiter = None # type: Optional[asyncio.Future[bool]] + + def init_process(self) -> None: + # create new event_loop after fork + asyncio.get_event_loop().close() + + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + super().init_process() + + def run(self) -> None: + self._task = self.loop.create_task(self._run()) + + try: # ignore all finalization problems + self.loop.run_until_complete(self._task) + except Exception: + self.log.exception("Exception in gunicorn worker") + if sys.version_info >= (3, 6): + self.loop.run_until_complete(self.loop.shutdown_asyncgens()) + self.loop.close() + + sys.exit(self.exit_code) + + async def _run(self) -> None: + if isinstance(self.wsgi, Application): + app = self.wsgi + elif asyncio.iscoroutinefunction(self.wsgi): + app = await self.wsgi() + else: + raise RuntimeError("wsgi app should be either Application or " + "async function returning Application, got {}" + .format(self.wsgi)) + access_log = self.log.access_log if self.cfg.accesslog else None + runner = web.AppRunner(app, + logger=self.log, + keepalive_timeout=self.cfg.keepalive, + access_log=access_log, + access_log_format=self._get_valid_log_format( + self.cfg.access_log_format)) + await runner.setup() + + ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None + + runner = runner + assert runner is not None + server = runner.server + assert server is not None + for sock in self.sockets: + site = web.SockSite( + runner, sock, ssl_context=ctx, + shutdown_timeout=self.cfg.graceful_timeout / 100 * 95) + await site.start() + + # If our parent changed then we shut down. + pid = os.getpid() + try: + while self.alive: # type: ignore + self.notify() + + cnt = server.requests_count + if self.cfg.max_requests and cnt > self.cfg.max_requests: + self.alive = False + self.log.info("Max requests, shutting down: %s", self) + + elif pid == os.getpid() and self.ppid != os.getppid(): + self.alive = False + self.log.info("Parent changed, shutting down: %s", self) + else: + await self._wait_next_notify() + except BaseException: + pass + + await runner.cleanup() + + def _wait_next_notify(self) -> 'asyncio.Future[bool]': + self._notify_waiter_done() + + loop = self.loop + assert loop is not None + self._notify_waiter = waiter = loop.create_future() + self.loop.call_later(1.0, self._notify_waiter_done, waiter) + + return waiter + + def _notify_waiter_done(self, waiter: 'asyncio.Future[bool]'=None) -> None: + if waiter is None: + waiter = self._notify_waiter + if waiter is not None: + set_result(waiter, True) + + if waiter is self._notify_waiter: + self._notify_waiter = None + + def init_signals(self) -> None: + # Set up signals through the event loop API. + + self.loop.add_signal_handler(signal.SIGQUIT, self.handle_quit, + signal.SIGQUIT, None) + + self.loop.add_signal_handler(signal.SIGTERM, self.handle_exit, + signal.SIGTERM, None) + + self.loop.add_signal_handler(signal.SIGINT, self.handle_quit, + signal.SIGINT, None) + + self.loop.add_signal_handler(signal.SIGWINCH, self.handle_winch, + signal.SIGWINCH, None) + + self.loop.add_signal_handler(signal.SIGUSR1, self.handle_usr1, + signal.SIGUSR1, None) + + self.loop.add_signal_handler(signal.SIGABRT, self.handle_abort, + signal.SIGABRT, None) + + # Don't let SIGTERM and SIGUSR1 disturb active requests + # by interrupting system calls + signal.siginterrupt(signal.SIGTERM, False) + signal.siginterrupt(signal.SIGUSR1, False) + + def handle_quit(self, sig: int, frame: FrameType) -> None: + self.alive = False + + # worker_int callback + self.cfg.worker_int(self) + + # wakeup closing process + self._notify_waiter_done() + + def handle_abort(self, sig: int, frame: FrameType) -> None: + self.alive = False + self.exit_code = 1 + self.cfg.worker_abort(self) + sys.exit(1) + + @staticmethod + def _create_ssl_context(cfg: Any) -> 'SSLContext': + """ Creates SSLContext instance for usage in asyncio.create_server. + + See ssl.SSLSocket.__init__ for more details. + """ + if ssl is None: # pragma: no cover + raise RuntimeError('SSL is not supported.') + + ctx = ssl.SSLContext(cfg.ssl_version) + ctx.load_cert_chain(cfg.certfile, cfg.keyfile) + ctx.verify_mode = cfg.cert_reqs + if cfg.ca_certs: + ctx.load_verify_locations(cfg.ca_certs) + if cfg.ciphers: + ctx.set_ciphers(cfg.ciphers) + return ctx + + def _get_valid_log_format(self, source_format: str) -> str: + if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT: + return self.DEFAULT_AIOHTTP_LOG_FORMAT + elif re.search(r'%\([^\)]+\)', source_format): + raise ValueError( + "Gunicorn's style options in form of `%(name)s` are not " + "supported for the log formatting. Please use aiohttp's " + "format specification to configure access log formatting: " + "http://docs.aiohttp.org/en/stable/logging.html" + "#format-specification" + ) + else: + return source_format + + +class GunicornUVLoopWebWorker(GunicornWebWorker): + + def init_process(self) -> None: + import uvloop + + # Close any existing event loop before setting a + # new policy. + asyncio.get_event_loop().close() + + # Setup uvloop policy, so that every + # asyncio.get_event_loop() will create an instance + # of uvloop event loop. + asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) + + super().init_process() + + +class GunicornTokioWebWorker(GunicornWebWorker): + + def init_process(self) -> None: # pragma: no cover + import tokio + + # Close any existing event loop before setting a + # new policy. + asyncio.get_event_loop().close() + + # Setup tokio policy, so that every + # asyncio.get_event_loop() will create an instance + # of tokio event loop. + asyncio.set_event_loop_policy(tokio.EventLoopPolicy()) + + super().init_process() diff --git a/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/INSTALLER b/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/LICENSE b/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/LICENSE new file mode 100644 index 0000000..8dada3e --- /dev/null +++ b/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/METADATA b/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/METADATA new file mode 100644 index 0000000..5ec05a2 --- /dev/null +++ b/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/METADATA @@ -0,0 +1,165 @@ +Metadata-Version: 2.1 +Name: async-timeout +Version: 3.0.1 +Summary: Timeout context manager for asyncio programs +Home-page: https://github.com/aio-libs/async_timeout/ +Author: Andrew Svetlov +Author-email: andrew.svetlov@gmail.com +License: Apache 2 +Platform: UNKNOWN +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Framework :: AsyncIO +Requires-Python: >=3.5.3 + +async-timeout +============= +.. image:: https://travis-ci.org/aio-libs/async-timeout.svg?branch=master + :target: https://travis-ci.org/aio-libs/async-timeout +.. image:: https://codecov.io/gh/aio-libs/async-timeout/branch/master/graph/badge.svg + :target: https://codecov.io/gh/aio-libs/async-timeout +.. image:: https://img.shields.io/pypi/v/async-timeout.svg + :target: https://pypi.python.org/pypi/async-timeout +.. image:: https://badges.gitter.im/Join%20Chat.svg + :target: https://gitter.im/aio-libs/Lobby + :alt: Chat on Gitter + +asyncio-compatible timeout context manager. + + +Usage example +------------- + + +The context manager is useful in cases when you want to apply timeout +logic around block of code or in cases when ``asyncio.wait_for()`` is +not suitable. Also it's much faster than ``asyncio.wait_for()`` +because ``timeout`` doesn't create a new task. + +The ``timeout(timeout, *, loop=None)`` call returns a context manager +that cancels a block on *timeout* expiring:: + + async with timeout(1.5): + await inner() + +1. If ``inner()`` is executed faster than in ``1.5`` seconds nothing + happens. +2. Otherwise ``inner()`` is cancelled internally by sending + ``asyncio.CancelledError`` into but ``asyncio.TimeoutError`` is + raised outside of context manager scope. + +*timeout* parameter could be ``None`` for skipping timeout functionality. + + +Context manager has ``.expired`` property for check if timeout happens +exactly in context manager:: + + async with timeout(1.5) as cm: + await inner() + print(cm.expired) + +The property is ``True`` if ``inner()`` execution is cancelled by +timeout context manager. + +If ``inner()`` call explicitly raises ``TimeoutError`` ``cm.expired`` +is ``False``. + +Installation +------------ + +:: + + $ pip install async-timeout + +The library is Python 3 only! + + + +Authors and License +------------------- + +The module is written by Andrew Svetlov. + +It's *Apache 2* licensed and freely available. + + +CHANGES +======= + +3.0.1 (2018-10-09) +------------------ + +- More aggressive typing (#48) + +3.0.0 (2018-05-05) +------------------ + +- Drop Python 3.4, the minimal supported version is Python 3.5.3 + +- Provide type annotations + +2.0.1 (2018-03-13) +------------------ + +* Fix ``PendingDeprecationWarning`` on Python 3.7 (#33) + + +2.0.0 (2017-10-09) +------------------ + +* Changed `timeout <= 0` behaviour + + * Backward incompatibility change, prior this version `0` was + shortcut for `None` + * when timeout <= 0 `TimeoutError` raised faster + +1.4.0 (2017-09-09) +------------------ + +* Implement `remaining` property (#20) + + * If timeout is not started yet or started unconstrained: + `remaining` is `None` + * If timeout is expired: `remaining` is `0.0` + * All others: roughly amount of time before `TimeoutError` is triggered + +1.3.0 (2017-08-23) +------------------ + +* Don't suppress nested exception on timeout. Exception context points + on cancelled line with suspended `await` (#13) + +* Introduce `.timeout` property (#16) + +* Add methods for using as async context manager (#9) + +1.2.1 (2017-05-02) +------------------ + +* Support unpublished event loop's "current_task" api. + + +1.2.0 (2017-03-11) +------------------ + +* Extra check on context manager exit + +* 0 is no-op timeout + + +1.1.0 (2016-10-20) +------------------ + +* Rename to `async-timeout` + +1.0.0 (2016-09-09) +------------------ + +* The first release. + + diff --git a/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/RECORD b/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/RECORD new file mode 100644 index 0000000..8b39514 --- /dev/null +++ b/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/RECORD @@ -0,0 +1,9 @@ +async_timeout-3.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +async_timeout-3.0.1.dist-info/LICENSE,sha256=tAkwu8-AdEyGxGoSvJ2gVmQdcicWw3j1ZZueVV74M-E,11357 +async_timeout-3.0.1.dist-info/METADATA,sha256=_3ByJ8L0-cU5wWu75_Rl8n0ZkbSCgW15fMAu_DzwTm0,4013 +async_timeout-3.0.1.dist-info/RECORD,, +async_timeout-3.0.1.dist-info/WHEEL,sha256=-ZFxwj8mZJPIVcZGLrsQ8UGRcxVAOExzPLVBGR7u7bE,92 +async_timeout-3.0.1.dist-info/top_level.txt,sha256=9oM4e7Twq8iD_7_Q3Mz0E6GPIB6vJvRFo-UBwUQtBDU,14 +async_timeout/__init__.py,sha256=mGvWOoRqLtScEU3kmzqtTSH7EQsHvu8zhgHxOTXCn7c,3654 +async_timeout/__pycache__/__init__.cpython-37.pyc,, +async_timeout/py.typed,sha256=9LJP7QJ0oxYYrBtmXuFirzMbS3D9_3Tz-d3tyUtNp0U,11 diff --git a/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/WHEEL b/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/WHEEL new file mode 100644 index 0000000..f87af07 --- /dev/null +++ b/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.32.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/top_level.txt b/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/top_level.txt new file mode 100644 index 0000000..ad29955 --- /dev/null +++ b/venv/lib/python3.7/site-packages/async_timeout-3.0.1.dist-info/top_level.txt @@ -0,0 +1 @@ +async_timeout diff --git a/venv/lib/python3.7/site-packages/async_timeout/__init__.py b/venv/lib/python3.7/site-packages/async_timeout/__init__.py new file mode 100644 index 0000000..dcc55f0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/async_timeout/__init__.py @@ -0,0 +1,115 @@ +import asyncio +import sys + +from types import TracebackType +from typing import Optional, Type, Any # noqa + + +__version__ = '3.0.1' + +PY_37 = sys.version_info >= (3, 7) + + +class timeout: + """timeout context manager. + + Useful in cases when you want to apply timeout logic around block + of code or in cases when asyncio.wait_for is not suitable. For example: + + >>> with timeout(0.001): + ... async with aiohttp.get('https://github.com') as r: + ... await r.text() + + + timeout - value in seconds or None to disable timeout logic + loop - asyncio compatible event loop + """ + def __init__(self, timeout: Optional[float], + *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + self._timeout = timeout + if loop is None: + loop = asyncio.get_event_loop() + self._loop = loop + self._task = None # type: Optional[asyncio.Task[Any]] + self._cancelled = False + self._cancel_handler = None # type: Optional[asyncio.Handle] + self._cancel_at = None # type: Optional[float] + + def __enter__(self) -> 'timeout': + return self._do_enter() + + def __exit__(self, + exc_type: Type[BaseException], + exc_val: BaseException, + exc_tb: TracebackType) -> Optional[bool]: + self._do_exit(exc_type) + return None + + async def __aenter__(self) -> 'timeout': + return self._do_enter() + + async def __aexit__(self, + exc_type: Type[BaseException], + exc_val: BaseException, + exc_tb: TracebackType) -> None: + self._do_exit(exc_type) + + @property + def expired(self) -> bool: + return self._cancelled + + @property + def remaining(self) -> Optional[float]: + if self._cancel_at is not None: + return max(self._cancel_at - self._loop.time(), 0.0) + else: + return None + + def _do_enter(self) -> 'timeout': + # Support Tornado 5- without timeout + # Details: https://github.com/python/asyncio/issues/392 + if self._timeout is None: + return self + + self._task = current_task(self._loop) + if self._task is None: + raise RuntimeError('Timeout context manager should be used ' + 'inside a task') + + if self._timeout <= 0: + self._loop.call_soon(self._cancel_task) + return self + + self._cancel_at = self._loop.time() + self._timeout + self._cancel_handler = self._loop.call_at( + self._cancel_at, self._cancel_task) + return self + + def _do_exit(self, exc_type: Type[BaseException]) -> None: + if exc_type is asyncio.CancelledError and self._cancelled: + self._cancel_handler = None + self._task = None + raise asyncio.TimeoutError + if self._timeout is not None and self._cancel_handler is not None: + self._cancel_handler.cancel() + self._cancel_handler = None + self._task = None + return None + + def _cancel_task(self) -> None: + if self._task is not None: + self._task.cancel() + self._cancelled = True + + +def current_task(loop: asyncio.AbstractEventLoop) -> 'asyncio.Task[Any]': + if PY_37: + task = asyncio.current_task(loop=loop) # type: ignore + else: + task = asyncio.Task.current_task(loop=loop) + if task is None: + # this should be removed, tokio must use register_task and family API + if hasattr(loop, 'current_task'): + task = loop.current_task() # type: ignore + + return task diff --git a/venv/lib/python3.7/site-packages/async_timeout/py.typed b/venv/lib/python3.7/site-packages/async_timeout/py.typed new file mode 100644 index 0000000..f6e0339 --- /dev/null +++ b/venv/lib/python3.7/site-packages/async_timeout/py.typed @@ -0,0 +1 @@ +Placeholder \ No newline at end of file diff --git a/venv/lib/python3.7/site-packages/attr/__init__.py b/venv/lib/python3.7/site-packages/attr/__init__.py new file mode 100644 index 0000000..9ff4d47 --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/__init__.py @@ -0,0 +1,68 @@ +from __future__ import absolute_import, division, print_function + +from functools import partial + +from . import converters, exceptions, filters, validators +from ._config import get_run_validators, set_run_validators +from ._funcs import asdict, assoc, astuple, evolve, has +from ._make import ( + NOTHING, + Attribute, + Factory, + attrib, + attrs, + fields, + fields_dict, + make_class, + validate, +) +from ._version_info import VersionInfo + + +__version__ = "19.3.0" +__version_info__ = VersionInfo._from_version_string(__version__) + +__title__ = "attrs" +__description__ = "Classes Without Boilerplate" +__url__ = "https://www.attrs.org/" +__uri__ = __url__ +__doc__ = __description__ + " <" + __uri__ + ">" + +__author__ = "Hynek Schlawack" +__email__ = "hs@ox.cx" + +__license__ = "MIT" +__copyright__ = "Copyright (c) 2015 Hynek Schlawack" + + +s = attributes = attrs +ib = attr = attrib +dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) + + +__all__ = [ + "Attribute", + "Factory", + "NOTHING", + "asdict", + "assoc", + "astuple", + "attr", + "attrib", + "attributes", + "attrs", + "converters", + "evolve", + "exceptions", + "fields", + "fields_dict", + "filters", + "get_run_validators", + "has", + "ib", + "make_class", + "s", + "set_run_validators", + "validate", + "validators", +] diff --git a/venv/lib/python3.7/site-packages/attr/__init__.pyi b/venv/lib/python3.7/site-packages/attr/__init__.pyi new file mode 100644 index 0000000..38f16f0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/__init__.pyi @@ -0,0 +1,278 @@ +from typing import ( + Any, + Callable, + Dict, + Generic, + List, + Optional, + Sequence, + Mapping, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +# `import X as X` is required to make these public +from . import exceptions as exceptions +from . import filters as filters +from . import converters as converters +from . import validators as validators + +from ._version_info import VersionInfo + +__version__: str +__version_info__: VersionInfo +__title__: str +__description__: str +__url__: str +__uri__: str +__author__: str +__email__: str +__license__: str +__copyright__: str + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_ValidatorType = Callable[[Any, Attribute[_T], _T], Any] +_ConverterType = Callable[[Any], _T] +_FilterType = Callable[[Attribute[_T], _T], bool] +_ReprType = Callable[[Any], str] +_ReprArgType = Union[bool, _ReprType] +# FIXME: in reality, if multiple validators are passed they must be in a list or tuple, +# but those are invariant and so would prevent subtypes of _ValidatorType from working +# when passed in a list or tuple. +_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] + +# _make -- + +NOTHING: object + +# NOTE: Factory lies about its return type to make this possible: `x: List[int] = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. +@overload +def Factory(factory: Callable[[], _T]) -> _T: ... +@overload +def Factory( + factory: Union[Callable[[Any], _T], Callable[[], _T]], + takes_self: bool = ..., +) -> _T: ... + +class Attribute(Generic[_T]): + name: str + default: Optional[_T] + validator: Optional[_ValidatorType[_T]] + repr: _ReprArgType + cmp: bool + eq: bool + order: bool + hash: Optional[bool] + init: bool + converter: Optional[_ConverterType[_T]] + metadata: Dict[Any, Any] + type: Optional[Type[_T]] + kw_only: bool + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting TypeVars +# e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + cmp: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the other arguments. +@overload +def attrib( + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType[_T]] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType[_T]] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: object = ..., + converter: Optional[_ConverterType[_T]] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., +) -> Any: ... +@overload +def attrs( + maybe_cls: _C, + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., +) -> _C: ... +@overload +def attrs( + maybe_cls: None = ..., + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., +) -> Callable[[_C], _C]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +class _Fields(Tuple[Attribute[Any], ...]): + def __getattr__(self, name: str) -> Attribute[Any]: ... + +def fields(cls: type) -> _Fields: ... +def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ... +def validate(inst: Any) -> None: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', [attr.ib()])` is valid +def make_class( + name: str, + attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], + bases: Tuple[type, ...] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. waiting on one of these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... +def has(cls: type) -> bool: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/venv/lib/python3.7/site-packages/attr/_compat.py b/venv/lib/python3.7/site-packages/attr/_compat.py new file mode 100644 index 0000000..a915db8 --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/_compat.py @@ -0,0 +1,230 @@ +from __future__ import absolute_import, division, print_function + +import platform +import sys +import types +import warnings + + +PY2 = sys.version_info[0] == 2 +PYPY = platform.python_implementation() == "PyPy" + + +if PYPY or sys.version_info[:2] >= (3, 6): + ordered_dict = dict +else: + from collections import OrderedDict + + ordered_dict = OrderedDict + + +if PY2: + from UserDict import IterableUserDict + from collections import Mapping, Sequence + + # We 'bundle' isclass instead of using inspect as importing inspect is + # fairly expensive (order of 10-15 ms for a modern machine in 2016) + def isclass(klass): + return isinstance(klass, (type, types.ClassType)) + + # TYPE is used in exceptions, repr(int) is different on Python 2 and 3. + TYPE = "type" + + def iteritems(d): + return d.iteritems() + + # Python 2 is bereft of a read-only dict proxy, so we make one! + class ReadOnlyDict(IterableUserDict): + """ + Best-effort read-only dict wrapper. + """ + + def __setitem__(self, key, val): + # We gently pretend we're a Python 3 mappingproxy. + raise TypeError( + "'mappingproxy' object does not support item assignment" + ) + + def update(self, _): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'update'" + ) + + def __delitem__(self, _): + # We gently pretend we're a Python 3 mappingproxy. + raise TypeError( + "'mappingproxy' object does not support item deletion" + ) + + def clear(self): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'clear'" + ) + + def pop(self, key, default=None): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'pop'" + ) + + def popitem(self): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'popitem'" + ) + + def setdefault(self, key, default=None): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'setdefault'" + ) + + def __repr__(self): + # Override to be identical to the Python 3 version. + return "mappingproxy(" + repr(self.data) + ")" + + def metadata_proxy(d): + res = ReadOnlyDict() + res.data.update(d) # We blocked update, so we have to do it like this. + return res + + def just_warn(*args, **kw): # pragma: nocover + """ + We only warn on Python 3 because we are not aware of any concrete + consequences of not setting the cell on Python 2. + """ + + +else: # Python 3 and later. + from collections.abc import Mapping, Sequence # noqa + + def just_warn(*args, **kw): + """ + We only warn on Python 3 because we are not aware of any concrete + consequences of not setting the cell on Python 2. + """ + warnings.warn( + "Running interpreter doesn't sufficiently support code object " + "introspection. Some features like bare super() or accessing " + "__class__ will not work with slotted classes.", + RuntimeWarning, + stacklevel=2, + ) + + def isclass(klass): + return isinstance(klass, type) + + TYPE = "class" + + def iteritems(d): + return d.items() + + def metadata_proxy(d): + return types.MappingProxyType(dict(d)) + + +def make_set_closure_cell(): + """Return a function of two arguments (cell, value) which sets + the value stored in the closure cell `cell` to `value`. + """ + # pypy makes this easy. (It also supports the logic below, but + # why not do the easy/fast thing?) + if PYPY: # pragma: no cover + + def set_closure_cell(cell, value): + cell.__setstate__((value,)) + + return set_closure_cell + + # Otherwise gotta do it the hard way. + + # Create a function that will set its first cellvar to `value`. + def set_first_cellvar_to(value): + x = value + return + + # This function will be eliminated as dead code, but + # not before its reference to `x` forces `x` to be + # represented as a closure cell rather than a local. + def force_x_to_be_a_cell(): # pragma: no cover + return x + + try: + # Extract the code object and make sure our assumptions about + # the closure behavior are correct. + if PY2: + co = set_first_cellvar_to.func_code + else: + co = set_first_cellvar_to.__code__ + if co.co_cellvars != ("x",) or co.co_freevars != (): + raise AssertionError # pragma: no cover + + # Convert this code object to a code object that sets the + # function's first _freevar_ (not cellvar) to the argument. + if sys.version_info >= (3, 8): + # CPython 3.8+ has an incompatible CodeType signature + # (added a posonlyargcount argument) but also added + # CodeType.replace() to do this without counting parameters. + set_first_freevar_code = co.replace( + co_cellvars=co.co_freevars, co_freevars=co.co_cellvars + ) + else: + args = [co.co_argcount] + if not PY2: + args.append(co.co_kwonlyargcount) + args.extend( + [ + co.co_nlocals, + co.co_stacksize, + co.co_flags, + co.co_code, + co.co_consts, + co.co_names, + co.co_varnames, + co.co_filename, + co.co_name, + co.co_firstlineno, + co.co_lnotab, + # These two arguments are reversed: + co.co_cellvars, + co.co_freevars, + ] + ) + set_first_freevar_code = types.CodeType(*args) + + def set_closure_cell(cell, value): + # Create a function using the set_first_freevar_code, + # whose first closure cell is `cell`. Calling it will + # change the value of that cell. + setter = types.FunctionType( + set_first_freevar_code, {}, "setter", (), (cell,) + ) + # And call it to set the cell. + setter(value) + + # Make sure it works on this interpreter: + def make_func_with_cell(): + x = None + + def func(): + return x # pragma: no cover + + return func + + if PY2: + cell = make_func_with_cell().func_closure[0] + else: + cell = make_func_with_cell().__closure__[0] + set_closure_cell(cell, 100) + if cell.cell_contents != 100: + raise AssertionError # pragma: no cover + + except Exception: + return just_warn + else: + return set_closure_cell + + +set_closure_cell = make_set_closure_cell() diff --git a/venv/lib/python3.7/site-packages/attr/_config.py b/venv/lib/python3.7/site-packages/attr/_config.py new file mode 100644 index 0000000..8ec9209 --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/_config.py @@ -0,0 +1,23 @@ +from __future__ import absolute_import, division, print_function + + +__all__ = ["set_run_validators", "get_run_validators"] + +_run_validators = True + + +def set_run_validators(run): + """ + Set whether or not validators are run. By default, they are run. + """ + if not isinstance(run, bool): + raise TypeError("'run' must be bool.") + global _run_validators + _run_validators = run + + +def get_run_validators(): + """ + Return whether or not validators are run. + """ + return _run_validators diff --git a/venv/lib/python3.7/site-packages/attr/_funcs.py b/venv/lib/python3.7/site-packages/attr/_funcs.py new file mode 100644 index 0000000..c077e42 --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/_funcs.py @@ -0,0 +1,290 @@ +from __future__ import absolute_import, division, print_function + +import copy + +from ._compat import iteritems +from ._make import NOTHING, _obj_setattr, fields +from .exceptions import AttrsAttributeNotFoundError + + +def asdict( + inst, + recurse=True, + filter=None, + dict_factory=dict, + retain_collection_types=False, +): + """ + Return the ``attrs`` attribute values of *inst* as a dict. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attr.Attribute` as the first argument and the + value as the second argument. + :param callable dict_factory: A callable to produce dictionaries from. For + example, to produce ordered dictionaries instead of normal Python + dictionaries, pass in ``collections.OrderedDict``. + :param bool retain_collection_types: Do not convert to ``list`` when + encountering an attribute whose type is ``tuple`` or ``set``. Only + meaningful if ``recurse`` is ``True``. + + :rtype: return type of *dict_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.0.0 *dict_factory* + .. versionadded:: 16.1.0 *retain_collection_types* + """ + attrs = fields(inst.__class__) + rv = dict_factory() + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + if recurse is True: + if has(v.__class__): + rv[a.name] = asdict( + v, True, filter, dict_factory, retain_collection_types + ) + elif isinstance(v, (tuple, list, set)): + cf = v.__class__ if retain_collection_types is True else list + rv[a.name] = cf( + [ + _asdict_anything( + i, filter, dict_factory, retain_collection_types + ) + for i in v + ] + ) + elif isinstance(v, dict): + df = dict_factory + rv[a.name] = df( + ( + _asdict_anything( + kk, filter, df, retain_collection_types + ), + _asdict_anything( + vv, filter, df, retain_collection_types + ), + ) + for kk, vv in iteritems(v) + ) + else: + rv[a.name] = v + else: + rv[a.name] = v + return rv + + +def _asdict_anything(val, filter, dict_factory, retain_collection_types): + """ + ``asdict`` only works on attrs instances, this works on anything. + """ + if getattr(val.__class__, "__attrs_attrs__", None) is not None: + # Attrs class. + rv = asdict(val, True, filter, dict_factory, retain_collection_types) + elif isinstance(val, (tuple, list, set)): + cf = val.__class__ if retain_collection_types is True else list + rv = cf( + [ + _asdict_anything( + i, filter, dict_factory, retain_collection_types + ) + for i in val + ] + ) + elif isinstance(val, dict): + df = dict_factory + rv = df( + ( + _asdict_anything(kk, filter, df, retain_collection_types), + _asdict_anything(vv, filter, df, retain_collection_types), + ) + for kk, vv in iteritems(val) + ) + else: + rv = val + return rv + + +def astuple( + inst, + recurse=True, + filter=None, + tuple_factory=tuple, + retain_collection_types=False, +): + """ + Return the ``attrs`` attribute values of *inst* as a tuple. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attr.Attribute` as the first argument and the + value as the second argument. + :param callable tuple_factory: A callable to produce tuples from. For + example, to produce lists instead of tuples. + :param bool retain_collection_types: Do not convert to ``list`` + or ``dict`` when encountering an attribute which type is + ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is + ``True``. + + :rtype: return type of *tuple_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.2.0 + """ + attrs = fields(inst.__class__) + rv = [] + retain = retain_collection_types # Very long. :/ + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + if recurse is True: + if has(v.__class__): + rv.append( + astuple( + v, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + ) + elif isinstance(v, (tuple, list, set)): + cf = v.__class__ if retain is True else list + rv.append( + cf( + [ + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(j.__class__) + else j + for j in v + ] + ) + ) + elif isinstance(v, dict): + df = v.__class__ if retain is True else dict + rv.append( + df( + ( + astuple( + kk, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(kk.__class__) + else kk, + astuple( + vv, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(vv.__class__) + else vv, + ) + for kk, vv in iteritems(v) + ) + ) + else: + rv.append(v) + else: + rv.append(v) + return rv if tuple_factory is list else tuple_factory(rv) + + +def has(cls): + """ + Check whether *cls* is a class with ``attrs`` attributes. + + :param type cls: Class to introspect. + :raise TypeError: If *cls* is not a class. + + :rtype: bool + """ + return getattr(cls, "__attrs_attrs__", None) is not None + + +def assoc(inst, **changes): + """ + Copy *inst* and apply *changes*. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't + be found on *cls*. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. deprecated:: 17.1.0 + Use `evolve` instead. + """ + import warnings + + warnings.warn( + "assoc is deprecated and will be removed after 2018/01.", + DeprecationWarning, + stacklevel=2, + ) + new = copy.copy(inst) + attrs = fields(inst.__class__) + for k, v in iteritems(changes): + a = getattr(attrs, k, NOTHING) + if a is NOTHING: + raise AttrsAttributeNotFoundError( + "{k} is not an attrs attribute on {cl}.".format( + k=k, cl=new.__class__ + ) + ) + _obj_setattr(new, k, v) + return new + + +def evolve(inst, **changes): + """ + Create a new instance, based on *inst* with *changes* applied. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise TypeError: If *attr_name* couldn't be found in the class + ``__init__``. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 17.1.0 + """ + cls = inst.__class__ + attrs = fields(cls) + for a in attrs: + if not a.init: + continue + attr_name = a.name # To deal with private attributes. + init_name = attr_name if attr_name[0] != "_" else attr_name[1:] + if init_name not in changes: + changes[init_name] = getattr(inst, attr_name) + return cls(**changes) diff --git a/venv/lib/python3.7/site-packages/attr/_make.py b/venv/lib/python3.7/site-packages/attr/_make.py new file mode 100644 index 0000000..46f9c54 --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/_make.py @@ -0,0 +1,2168 @@ +from __future__ import absolute_import, division, print_function + +import copy +import linecache +import sys +import threading +import uuid +import warnings + +from operator import itemgetter + +from . import _config +from ._compat import ( + PY2, + isclass, + iteritems, + metadata_proxy, + ordered_dict, + set_closure_cell, +) +from .exceptions import ( + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + PythonTooOldError, + UnannotatedAttributeError, +) + + +# This is used at least twice, so cache it here. +_obj_setattr = object.__setattr__ +_init_converter_pat = "__attr_converter_{}" +_init_factory_pat = "__attr_factory_{}" +_tuple_property_pat = ( + " {attr_name} = _attrs_property(_attrs_itemgetter({index}))" +) +_classvar_prefixes = ("typing.ClassVar", "t.ClassVar", "ClassVar") +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_hash_cache_field = "_attrs_cached_hash" + +_empty_metadata_singleton = metadata_proxy({}) + +# Unique object for unequivocal getattr() defaults. +_sentinel = object() + + +class _Nothing(object): + """ + Sentinel class to indicate the lack of a value when ``None`` is ambiguous. + + ``_Nothing`` is a singleton. There is only ever one of it. + """ + + _singleton = None + + def __new__(cls): + if _Nothing._singleton is None: + _Nothing._singleton = super(_Nothing, cls).__new__(cls) + return _Nothing._singleton + + def __repr__(self): + return "NOTHING" + + +NOTHING = _Nothing() +""" +Sentinel to indicate the lack of a value when ``None`` is ambiguous. +""" + + +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, +): + """ + Create a new attribute on a class. + + .. warning:: + + Does *not* do anything unless the class is also decorated with + `attr.s`! + + :param default: A value that is used if an ``attrs``-generated ``__init__`` + is used and no value is passed while instantiating or the attribute is + excluded using ``init=False``. + + If the value is an instance of `Factory`, its callable will be + used to construct a new value (useful for mutable data types like lists + or dicts). + + If a default is not set (or set manually to ``attr.NOTHING``), a value + *must* be supplied when instantiating; otherwise a `TypeError` + will be raised. + + The default can also be set using decorator notation as shown below. + + :type default: Any value + + :param callable factory: Syntactic sugar for + ``default=attr.Factory(callable)``. + + :param validator: `callable` that is called by ``attrs``-generated + ``__init__`` methods after the instance has been initialized. They + receive the initialized instance, the `Attribute`, and the + passed value. + + The return value is *not* inspected so the validator has to throw an + exception itself. + + If a ``list`` is passed, its items are treated as validators and must + all pass. + + Validators can be globally disabled and re-enabled using + `get_run_validators`. + + The validator can also be set using decorator notation as shown below. + + :type validator: ``callable`` or a ``list`` of ``callable``\\ s. + + :param repr: Include this attribute in the generated ``__repr__`` + method. If ``True``, include the attribute; if ``False``, omit it. By + default, the built-in ``repr()`` function is used. To override how the + attribute value is formatted, pass a ``callable`` that takes a single + value and returns a string. Note that the resulting string is used + as-is, i.e. it will be used directly *instead* of calling ``repr()`` + (the default). + :type repr: a ``bool`` or a ``callable`` to use a custom function. + :param bool eq: If ``True`` (default), include this attribute in the + generated ``__eq__`` and ``__ne__`` methods that check two instances + for equality. + :param bool order: If ``True`` (default), include this attributes in the + generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. + :param bool cmp: Setting to ``True`` is equivalent to setting ``eq=True, + order=True``. Deprecated in favor of *eq* and *order*. + :param hash: Include this attribute in the generated ``__hash__`` + method. If ``None`` (default), mirror *eq*'s value. This is the + correct behavior according the Python spec. Setting this value to + anything else than ``None`` is *discouraged*. + :type hash: ``bool`` or ``None`` + :param bool init: Include this attribute in the generated ``__init__`` + method. It is possible to set this to ``False`` and set a default + value. In that case this attributed is unconditionally initialized + with the specified default value or factory. + :param callable converter: `callable` that is called by + ``attrs``-generated ``__init__`` methods to converter attribute's value + to the desired format. It is given the passed-in value, and the + returned value will be used as the new value of the attribute. The + value is converted before being passed to the validator, if any. + :param metadata: An arbitrary mapping, to be used by third-party + components. See `extending_metadata`. + :param type: The type of the attribute. In Python 3.6 or greater, the + preferred method to specify the type is using a variable annotation + (see `PEP 526 `_). + This argument is provided for backward compatibility. + Regardless of the approach used, the type will be stored on + ``Attribute.type``. + + Please note that ``attrs`` doesn't do anything with this metadata by + itself. You can use it as part of your own code or for + `static type checking `. + :param kw_only: Make this attribute keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + + .. versionadded:: 15.2.0 *convert* + .. versionadded:: 16.3.0 *metadata* + .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. + .. versionchanged:: 17.1.0 + *hash* is ``None`` and therefore mirrors *eq* by default. + .. versionadded:: 17.3.0 *type* + .. deprecated:: 17.4.0 *convert* + .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated + *convert* to achieve consistency with other noun-based arguments. + .. versionadded:: 18.1.0 + ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* + .. versionchanged:: 19.2.0 *convert* keyword argument removed + .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + """ + eq, order = _determine_eq_order(cmp, eq, order) + + if hash is not None and hash is not True and hash is not False: + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + + if factory is not None: + if default is not NOTHING: + raise ValueError( + "The `default` and `factory` arguments are mutually " + "exclusive." + ) + if not callable(factory): + raise ValueError("The `factory` argument must be a callable.") + default = Factory(factory) + + if metadata is None: + metadata = {} + + return _CountingAttr( + default=default, + validator=validator, + repr=repr, + cmp=None, + hash=hash, + init=init, + converter=converter, + metadata=metadata, + type=type, + kw_only=kw_only, + eq=eq, + order=order, + ) + + +def _make_attr_tuple_class(cls_name, attr_names): + """ + Create a tuple subclass to hold `Attribute`s for an `attrs` class. + + The subclass is a bare tuple with properties for names. + + class MyClassAttributes(tuple): + __slots__ = () + x = property(itemgetter(0)) + """ + attr_class_name = "{}Attributes".format(cls_name) + attr_class_template = [ + "class {}(tuple):".format(attr_class_name), + " __slots__ = ()", + ] + if attr_names: + for i, attr_name in enumerate(attr_names): + attr_class_template.append( + _tuple_property_pat.format(index=i, attr_name=attr_name) + ) + else: + attr_class_template.append(" pass") + globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} + eval(compile("\n".join(attr_class_template), "", "exec"), globs) + + return globs[attr_class_name] + + +# Tuple class for extracted attributes from a class definition. +# `base_attrs` is a subset of `attrs`. +_Attributes = _make_attr_tuple_class( + "_Attributes", + [ + # all attributes to build dunder methods for + "attrs", + # attributes that have been inherited + "base_attrs", + # map inherited attributes to their originating classes + "base_attrs_map", + ], +) + + +def _is_class_var(annot): + """ + Check whether *annot* is a typing.ClassVar. + + The string comparison hack is used to avoid evaluating all string + annotations which would put attrs-based classes at a performance + disadvantage compared to plain old classes. + """ + return str(annot).startswith(_classvar_prefixes) + + +def _get_annotations(cls): + """ + Get annotations for *cls*. + """ + anns = getattr(cls, "__annotations__", None) + if anns is None: + return {} + + # Verify that the annotations aren't merely inherited. + for base_cls in cls.__mro__[1:]: + if anns is getattr(base_cls, "__annotations__", None): + return {} + + return anns + + +def _counter_getter(e): + """ + Key function for sorting to avoid re-creating a lambda for every class. + """ + return e[1].counter + + +def _transform_attrs(cls, these, auto_attribs, kw_only): + """ + Transform all `_CountingAttr`s on a class into `Attribute`s. + + If *these* is passed, use that and don't look for them on the class. + + Return an `_Attributes`. + """ + cd = cls.__dict__ + anns = _get_annotations(cls) + + if these is not None: + ca_list = [(name, ca) for name, ca in iteritems(these)] + + if not isinstance(these, ordered_dict): + ca_list.sort(key=_counter_getter) + elif auto_attribs is True: + ca_names = { + name + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + } + ca_list = [] + annot_names = set() + for attr_name, type in anns.items(): + if _is_class_var(type): + continue + annot_names.add(attr_name) + a = cd.get(attr_name, NOTHING) + if not isinstance(a, _CountingAttr): + if a is NOTHING: + a = attrib() + else: + a = attrib(default=a) + ca_list.append((attr_name, a)) + + unannotated = ca_names - annot_names + if len(unannotated) > 0: + raise UnannotatedAttributeError( + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." + ) + else: + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + ), + key=lambda e: e[1].counter, + ) + + own_attrs = [ + Attribute.from_counting_attr( + name=attr_name, ca=ca, type=anns.get(attr_name) + ) + for attr_name, ca in ca_list + ] + + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + taken_attr_names = {a.name: a for a in own_attrs} + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + sub_attrs = getattr(base_cls, "__attrs_attrs__", None) + if sub_attrs is not None: + for a in sub_attrs: + prev_a = taken_attr_names.get(a.name) + # Only add an attribute if it hasn't been defined before. This + # allows for overwriting attribute definitions by subclassing. + if prev_a is None: + base_attrs.append(a) + taken_attr_names[a.name] = a + base_attr_map[a.name] = base_cls + + attr_names = [a.name for a in base_attrs + own_attrs] + + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + if kw_only: + own_attrs = [a._assoc(kw_only=True) for a in own_attrs] + base_attrs = [a._assoc(kw_only=True) for a in base_attrs] + + attrs = AttrsClass(base_attrs + own_attrs) + + # Mandatory vs non-mandatory attr order only matters when they are part of + # the __init__ signature and when they aren't kw_only (which are moved to + # the end and can be mandatory or non-mandatory in any order, as they will + # be specified as keyword args anyway). Check the order of those attrs: + had_default = False + for a in (a for a in attrs if a.init is not False and a.kw_only is False): + if had_default is True and a.default is NOTHING: + raise ValueError( + "No mandatory attributes allowed after an attribute with a " + "default value or factory. Attribute in question: %r" % (a,) + ) + + if had_default is False and a.default is not NOTHING: + had_default = True + + return _Attributes((attrs, base_attrs, base_attr_map)) + + +def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + raise FrozenInstanceError() + + +def _frozen_delattrs(self, name): + """ + Attached to frozen classes as __delattr__. + """ + raise FrozenInstanceError() + + +class _ClassBuilder(object): + """ + Iteratively build *one* class. + """ + + __slots__ = ( + "_cls", + "_cls_dict", + "_attrs", + "_base_names", + "_attr_names", + "_slots", + "_frozen", + "_weakref_slot", + "_cache_hash", + "_has_post_init", + "_delete_attribs", + "_base_attr_map", + "_is_exc", + ) + + def __init__( + self, + cls, + these, + slots, + frozen, + weakref_slot, + auto_attribs, + kw_only, + cache_hash, + is_exc, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, these, auto_attribs, kw_only + ) + + self._cls = cls + self._cls_dict = dict(cls.__dict__) if slots else {} + self._attrs = attrs + self._base_names = set(a.name for a in base_attrs) + self._base_attr_map = base_map + self._attr_names = tuple(a.name for a in attrs) + self._slots = slots + self._frozen = frozen or _has_frozen_base_class(cls) + self._weakref_slot = weakref_slot + self._cache_hash = cache_hash + self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) + self._delete_attribs = not bool(these) + self._is_exc = is_exc + + self._cls_dict["__attrs_attrs__"] = self._attrs + + if frozen: + self._cls_dict["__setattr__"] = _frozen_setattrs + self._cls_dict["__delattr__"] = _frozen_delattrs + + def __repr__(self): + return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__) + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + return self._create_slots_class() + else: + return self._patch_original_class() + + def _patch_original_class(self): + """ + Apply accumulated methods and return the class. + """ + cls = self._cls + base_names = self._base_names + + # Clean class of attribute definitions (`attr.ib()`s). + if self._delete_attribs: + for name in self._attr_names: + if ( + name not in base_names + and getattr(cls, name, _sentinel) is not _sentinel + ): + try: + delattr(cls, name) + except AttributeError: + # This can happen if a base class defines a class + # variable and we want to set an attribute with the + # same name by using only a type annotation. + pass + + # Attach our dunder methods. + for name, value in self._cls_dict.items(): + setattr(cls, name, value) + + # Attach __setstate__. This is necessary to clear the hash code + # cache on deserialization. See issue + # https://github.com/python-attrs/attrs/issues/482 . + # Note that this code only handles setstate for dict classes. + # For slotted classes, see similar code in _create_slots_class . + if self._cache_hash: + existing_set_state_method = getattr(cls, "__setstate__", None) + if existing_set_state_method: + raise NotImplementedError( + "Currently you cannot use hash caching if " + "you specify your own __setstate__ method." + "See https://github.com/python-attrs/attrs/issues/494 ." + ) + + def cache_hash_set_state(chss_self, _): + # clear hash code cache + setattr(chss_self, _hash_cache_field, None) + + setattr(cls, "__setstate__", cache_hash_set_state) + + return cls + + def _create_slots_class(self): + """ + Build and return a new class with a `__slots__` attribute. + """ + base_names = self._base_names + cd = { + k: v + for k, v in iteritems(self._cls_dict) + if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") + } + + weakref_inherited = False + + # Traverse the MRO to check for an existing __weakref__. + for base_cls in self._cls.__mro__[1:-1]: + if "__weakref__" in getattr(base_cls, "__dict__", ()): + weakref_inherited = True + break + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + + # We only add the names of attributes that aren't inherited. + # Settings __slots__ to inherited attributes wastes memory. + slot_names = [name for name in names if name not in base_names] + if self._cache_hash: + slot_names.append(_hash_cache_field) + cd["__slots__"] = tuple(slot_names) + + qualname = getattr(self._cls, "__qualname__", None) + if qualname is not None: + cd["__qualname__"] = qualname + + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return tuple(getattr(self, name) for name in state_attr_names) + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _obj_setattr.__get__(self, Attribute) + for name, value in zip(state_attr_names, state): + __bound_setattr(name, value) + # Clearing the hash code cache on deserialization is needed + # because hash codes can change from run to run. See issue + # https://github.com/python-attrs/attrs/issues/482 . + # Note that this code only handles setstate for slotted classes. + # For dict classes, see similar code in _patch_original_class . + if hash_caching_enabled: + __bound_setattr(_hash_cache_field, None) + + # slots and frozen require __getstate__/__setstate__ to work + cd["__getstate__"] = slots_getstate + cd["__setstate__"] = slots_setstate + + # Create new class based on old class and our methods. + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) + + # The following is a fix for + # https://github.com/python-attrs/attrs/issues/102. On Python 3, + # if a method mentions `__class__` or uses the no-arg super(), the + # compiler will bake a reference to the class in the method itself + # as `method.__closure__`. Since we replace the class with a + # clone, we rewrite these references so it keeps working. + for item in cls.__dict__.values(): + if isinstance(item, (classmethod, staticmethod)): + # Class- and staticmethods hide their functions inside. + # These might need to be rewritten as well. + closure_cells = getattr(item.__func__, "__closure__", None) + else: + closure_cells = getattr(item, "__closure__", None) + + if not closure_cells: # Catch None or the empty list. + continue + for cell in closure_cells: + if cell.cell_contents is self._cls: + set_closure_cell(cell, cls) + + return cls + + def add_repr(self, ns): + self._cls_dict["__repr__"] = self._add_method_dunders( + _make_repr(self._attrs, ns=ns) + ) + return self + + def add_str(self): + repr = self._cls_dict.get("__repr__") + if repr is None: + raise ValueError( + "__str__ can only be generated if a __repr__ exists." + ) + + def __str__(self): + return self.__repr__() + + self._cls_dict["__str__"] = self._add_method_dunders(__str__) + return self + + def make_unhashable(self): + self._cls_dict["__hash__"] = None + return self + + def add_hash(self): + self._cls_dict["__hash__"] = self._add_method_dunders( + _make_hash( + self._cls, + self._attrs, + frozen=self._frozen, + cache_hash=self._cache_hash, + ) + ) + + return self + + def add_init(self): + self._cls_dict["__init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + ) + ) + + return self + + def add_eq(self): + cd = self._cls_dict + + cd["__eq__"], cd["__ne__"] = ( + self._add_method_dunders(meth) + for meth in _make_eq(self._cls, self._attrs) + ) + + return self + + def add_order(self): + cd = self._cls_dict + + cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( + self._add_method_dunders(meth) + for meth in _make_order(self._cls, self._attrs) + ) + + return self + + def _add_method_dunders(self, method): + """ + Add __module__ and __qualname__ to a *method* if possible. + """ + try: + method.__module__ = self._cls.__module__ + except AttributeError: + pass + + try: + method.__qualname__ = ".".join( + (self._cls.__qualname__, method.__name__) + ) + except AttributeError: + pass + + return method + + +_CMP_DEPRECATION = ( + "The usage of `cmp` is deprecated and will be removed on or after " + "2021-06-01. Please use `eq` and `order` instead." +) + + +def _determine_eq_order(cmp, eq, order): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=3) + + return cmp, cmp + + # If left None, equality is on and ordering mirrors equality. + if eq is None: + eq = True + + if order is None: + order = eq + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, order + + +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=True, + cmp=None, + hash=None, + init=True, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, + auto_exc=False, + eq=None, + order=None, +): + r""" + A class decorator that adds `dunder + `_\ -methods according to the + specified attributes using `attr.ib` or the *these* argument. + + :param these: A dictionary of name to `attr.ib` mappings. This is + useful to avoid the definition of your attributes within the class body + because you can't (e.g. if you want to add ``__repr__`` methods to + Django models) or don't want to. + + If *these* is not ``None``, ``attrs`` will *not* search the class body + for attributes and will *not* remove any attributes from it. + + If *these* is an ordered dict (`dict` on Python 3.6+, + `collections.OrderedDict` otherwise), the order is deduced from + the order of the attributes inside *these*. Otherwise the order + of the definition of the attributes is used. + + :type these: `dict` of `str` to `attr.ib` + + :param str repr_ns: When using nested classes, there's no way in Python 2 + to automatically detect that. Therefore it's possible to set the + namespace explicitly for a more meaningful ``repr`` output. + :param bool repr: Create a ``__repr__`` method with a human readable + representation of ``attrs`` attributes.. + :param bool str: Create a ``__str__`` method that is identical to + ``__repr__``. This is usually not necessary except for + `Exception`\ s. + :param bool eq: If ``True`` or ``None`` (default), add ``__eq__`` and + ``__ne__`` methods that check two instances for equality. + + They compare the instances as if they were tuples of their ``attrs`` + attributes, but only iff the types of both classes are *identical*! + :type eq: `bool` or `None` + :param bool order: If ``True``, add ``__lt__``, ``__le__``, ``__gt__``, + and ``__ge__`` methods that behave like *eq* above and allow instances + to be ordered. If ``None`` (default) mirror value of *eq*. + :type order: `bool` or `None` + :param cmp: Setting to ``True`` is equivalent to setting ``eq=True, + order=True``. Deprecated in favor of *eq* and *order*, has precedence + over them for backward-compatibility though. Must not be mixed with + *eq* or *order*. + :type cmp: `bool` or `None` + :param hash: If ``None`` (default), the ``__hash__`` method is generated + according how *eq* and *frozen* are set. + + 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. + 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to + None, marking it unhashable (which it is). + 3. If *eq* is False, ``__hash__`` will be left untouched meaning the + ``__hash__`` method of the base class will be used (if base class is + ``object``, this means it will fall back to id-based hashing.). + + Although not recommended, you can decide for yourself and force + ``attrs`` to create one (e.g. if the class is immutable even though you + didn't freeze it programmatically) by passing ``True`` or not. Both of + these cases are rather special and should be used carefully. + + See our documentation on `hashing`, Python's documentation on + `object.__hash__`, and the `GitHub issue that led to the default \ + behavior `_ for more + details. + :type hash: ``bool`` or ``None`` + :param bool init: Create a ``__init__`` method that initializes the + ``attrs`` attributes. Leading underscores are stripped for the + argument name. If a ``__attrs_post_init__`` method exists on the + class, it will be called after the class is fully initialized. + :param bool slots: Create a `slotted class ` that's more + memory-efficient. + :param bool frozen: Make instances immutable after initialization. If + someone attempts to modify a frozen instance, + `attr.exceptions.FrozenInstanceError` is raised. + + Please note: + + 1. This is achieved by installing a custom ``__setattr__`` method + on your class, so you can't implement your own. + + 2. True immutability is impossible in Python. + + 3. This *does* have a minor a runtime performance `impact + ` when initializing new instances. In other words: + ``__init__`` is slightly slower with ``frozen=True``. + + 4. If a class is frozen, you cannot modify ``self`` in + ``__attrs_post_init__`` or a self-written ``__init__``. You can + circumvent that limitation by using + ``object.__setattr__(self, "attribute_name", value)``. + + :param bool weakref_slot: Make instances weak-referenceable. This has no + effect unless ``slots`` is also enabled. + :param bool auto_attribs: If True, collect `PEP 526`_-annotated attributes + (Python 3.6 and later only) from the class body. + + In this case, you **must** annotate every field. If ``attrs`` + encounters a field that is set to an `attr.ib` but lacks a type + annotation, an `attr.exceptions.UnannotatedAttributeError` is + raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't + want to set a type. + + If you assign a value to those attributes (e.g. ``x: int = 42``), that + value becomes the default value like if it were passed using + ``attr.ib(default=42)``. Passing an instance of `Factory` also + works as expected. + + Attributes annotated as `typing.ClassVar`, and attributes that are + neither annotated nor set to an `attr.ib` are **ignored**. + + .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/ + :param bool kw_only: Make all attributes keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param bool cache_hash: Ensure that the object's hash code is computed + only once and stored on the object. If this is set to ``True``, + hashing must be either explicitly or implicitly enabled for this + class. If the hash code is cached, avoid any reassignments of + fields involved in hash code computation or mutations of the objects + those fields point to after object creation. If such changes occur, + the behavior of the object's hash code is undefined. + :param bool auto_exc: If the class subclasses `BaseException` + (which implicitly includes any subclass of any exception), the + following happens to behave like a well-behaved Python exceptions + class: + + - the values for *eq*, *order*, and *hash* are ignored and the + instances compare and hash by the instance's ids (N.B. ``attrs`` will + *not* remove existing implementations of ``__hash__`` or the equality + methods. It just won't add own ones.), + - all attributes that are either passed into ``__init__`` or have a + default value are additionally available as a tuple in the ``args`` + attribute, + - the value of *str* is ignored leaving ``__str__`` to base classes. + + .. versionadded:: 16.0.0 *slots* + .. versionadded:: 16.1.0 *frozen* + .. versionadded:: 16.3.0 *str* + .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. + .. versionchanged:: 17.1.0 + *hash* supports ``None`` as value which is also the default now. + .. versionadded:: 17.3.0 *auto_attribs* + .. versionchanged:: 18.1.0 + If *these* is passed, no attributes are deleted from the class body. + .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + `DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionchanged:: 19.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider + subclasses comparable anymore. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* + .. versionadded:: 19.1.0 *auto_exc* + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + """ + eq, order = _determine_eq_order(cmp, eq, order) + + def wrap(cls): + + if getattr(cls, "__class__", None) is None: + raise TypeError("attrs only works with new-style classes.") + + is_exc = auto_exc is True and issubclass(cls, BaseException) + + builder = _ClassBuilder( + cls, + these, + slots, + frozen, + weakref_slot, + auto_attribs, + kw_only, + cache_hash, + is_exc, + ) + + if repr is True: + builder.add_repr(repr_ns) + if str is True: + builder.add_str() + if eq is True and not is_exc: + builder.add_eq() + if order is True and not is_exc: + builder.add_order() + + if hash is not True and hash is not False and hash is not None: + # Can't use `hash in` because 1 == True for example. + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + elif hash is False or (hash is None and eq is False) or is_exc: + # Don't do anything. Should fall back to __object__'s __hash__ + # which is by id. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + elif hash is True or (hash is None and eq is True and frozen is True): + # Build a __hash__ if told so, or if it's safe. + builder.add_hash() + else: + # Raise TypeError on attempts to hash. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + builder.make_unhashable() + + if init is True: + builder.add_init() + else: + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " init must be True." + ) + + return builder.build_class() + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +_attrs = attrs +""" +Internal alias so we can use it in functions that take an argument called +*attrs*. +""" + + +if PY2: + + def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return ( + getattr(cls.__setattr__, "__module__", None) + == _frozen_setattrs.__module__ + and cls.__setattr__.__name__ == _frozen_setattrs.__name__ + ) + + +else: + + def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ == _frozen_setattrs + + +def _attrs_to_tuple(obj, attrs): + """ + Create a tuple of all values of *obj*'s *attrs*. + """ + return tuple(getattr(obj, a.name) for a in attrs) + + +def _generate_unique_filename(cls, func_name): + """ + Create a "filename" suitable for a function being generated. + """ + unique_id = uuid.uuid4() + extra = "" + count = 1 + + while True: + unique_filename = "".format( + func_name, + cls.__module__, + getattr(cls, "__qualname__", cls.__name__), + extra, + ) + # To handle concurrency we essentially "reserve" our spot in + # the linecache with a dummy line. The caller can then + # set this value correctly. + cache_line = (1, None, (str(unique_id),), unique_filename) + if ( + linecache.cache.setdefault(unique_filename, cache_line) + == cache_line + ): + return unique_filename + + # Looks like this spot is taken. Try again. + count += 1 + extra = "-{0}".format(count) + + +def _make_hash(cls, attrs, frozen, cache_hash): + attrs = tuple( + a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) + ) + + tab = " " + + unique_filename = _generate_unique_filename(cls, "hash") + type_hash = hash(unique_filename) + + method_lines = ["def __hash__(self):"] + + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + method_lines.extend( + [indent + prefix + "hash((", indent + " %d," % (type_hash,)] + ) + + for a in attrs: + method_lines.append(indent + " self.%s," % a.name) + + method_lines.append(indent + " ))") + + if cache_hash: + method_lines.append(tab + "if self.%s is None:" % _hash_cache_field) + if frozen: + append_hash_computation_lines( + "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + "self.%s = " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab + "return self.%s" % _hash_cache_field) + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) + globs = {} + locs = {} + bytecode = compile(script, unique_filename, "exec") + eval(bytecode, globs, locs) + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + linecache.cache[unique_filename] = ( + len(script), + None, + script.splitlines(True), + unique_filename, + ) + + return locs["__hash__"] + + +def _add_hash(cls, attrs): + """ + Add a hash method to *cls*. + """ + cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) + return cls + + +def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or return the result + negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + +def _make_eq(cls, attrs): + attrs = [a for a in attrs if a.eq] + + unique_filename = _generate_unique_filename(cls, "eq") + lines = [ + "def __eq__(self, other):", + " if other.__class__ is not self.__class__:", + " return NotImplemented", + ] + # We can't just do a big self.x = other.x and... clause due to + # irregularities like nan == nan is false but (nan,) == (nan,) is true. + if attrs: + lines.append(" return (") + others = [" ) == ("] + for a in attrs: + lines.append(" self.%s," % (a.name,)) + others.append(" other.%s," % (a.name,)) + + lines += others + [" )"] + else: + lines.append(" return True") + + script = "\n".join(lines) + globs = {} + locs = {} + bytecode = compile(script, unique_filename, "exec") + eval(bytecode, globs, locs) + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + linecache.cache[unique_filename] = ( + len(script), + None, + script.splitlines(True), + unique_filename, + ) + return locs["__eq__"], __ne__ + + +def _make_order(cls, attrs): + attrs = [a for a in attrs if a.order] + + def attrs_to_tuple(obj): + """ + Save us some typing. + """ + return _attrs_to_tuple(obj, attrs) + + def __lt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) < attrs_to_tuple(other) + + return NotImplemented + + def __le__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) <= attrs_to_tuple(other) + + return NotImplemented + + def __gt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) > attrs_to_tuple(other) + + return NotImplemented + + def __ge__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) >= attrs_to_tuple(other) + + return NotImplemented + + return __lt__, __le__, __gt__, __ge__ + + +def _add_eq(cls, attrs=None): + """ + Add equality methods to *cls* with *attrs*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__eq__, cls.__ne__ = _make_eq(cls, attrs) + + return cls + + +_already_repring = threading.local() + + +def _make_repr(attrs, ns): + """ + Make a repr method that includes relevant *attrs*, adding *ns* to the full + name. + """ + + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom callable. + attr_names_with_reprs = tuple( + (a.name, repr if a.repr is True else a.repr) + for a in attrs + if a.repr is not False + ) + + def __repr__(self): + """ + Automatically created by attrs. + """ + try: + working_set = _already_repring.working_set + except AttributeError: + working_set = set() + _already_repring.working_set = working_set + + if id(self) in working_set: + return "..." + real_cls = self.__class__ + if ns is None: + qualname = getattr(real_cls, "__qualname__", None) + if qualname is not None: + class_name = qualname.rsplit(">.", 1)[-1] + else: + class_name = real_cls.__name__ + else: + class_name = ns + "." + real_cls.__name__ + + # Since 'self' remains on the stack (i.e.: strongly referenced) for the + # duration of this call, it's safe to depend on id(...) stability, and + # not need to track the instance and therefore worry about properties + # like weakref- or hash-ability. + working_set.add(id(self)) + try: + result = [class_name, "("] + first = True + for name, attr_repr in attr_names_with_reprs: + if first: + first = False + else: + result.append(", ") + result.extend( + (name, "=", attr_repr(getattr(self, name, NOTHING))) + ) + return "".join(result) + ")" + finally: + working_set.remove(id(self)) + + return __repr__ + + +def _add_repr(cls, ns=None, attrs=None): + """ + Add a repr method to *cls*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__repr__ = _make_repr(attrs, ns) + return cls + + +def _make_init( + cls, attrs, post_init, frozen, slots, cache_hash, base_attr_map, is_exc +): + attrs = [a for a in attrs if a.init or a.default is not NOTHING] + + unique_filename = _generate_unique_filename(cls, "init") + + script, globs, annotations = _attrs_to_init_script( + attrs, frozen, slots, post_init, cache_hash, base_attr_map, is_exc + ) + locs = {} + bytecode = compile(script, unique_filename, "exec") + attr_dict = dict((a.name, a) for a in attrs) + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if frozen is True: + # Save the lookup overhead in __init__ if we need to circumvent + # immutability. + globs["_cached_setattr"] = _obj_setattr + + eval(bytecode, globs, locs) + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + linecache.cache[unique_filename] = ( + len(script), + None, + script.splitlines(True), + unique_filename, + ) + + __init__ = locs["__init__"] + __init__.__annotations__ = annotations + + return __init__ + + +def fields(cls): + """ + Return the tuple of ``attrs`` attributes for a class. + + The tuple also allows accessing the fields by their names (see below for + examples). + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: tuple (with name accessors) of `attr.Attribute` + + .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields + by name. + """ + if not isclass(cls): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError( + "{cls!r} is not an attrs-decorated class.".format(cls=cls) + ) + return attrs + + +def fields_dict(cls): + """ + Return an ordered dictionary of ``attrs`` attributes for a class, whose + keys are the attribute names. + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: an ordered dict where keys are attribute names and values are + `attr.Attribute`\\ s. This will be a `dict` if it's + naturally ordered like on Python 3.6+ or an + :class:`~collections.OrderedDict` otherwise. + + .. versionadded:: 18.1.0 + """ + if not isclass(cls): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError( + "{cls!r} is not an attrs-decorated class.".format(cls=cls) + ) + return ordered_dict(((a.name, a) for a in attrs)) + + +def validate(inst): + """ + Validate all attributes on *inst* that have a validator. + + Leaves all exceptions through. + + :param inst: Instance of a class with ``attrs`` attributes. + """ + if _config._run_validators is False: + return + + for a in fields(inst.__class__): + v = a.validator + if v is not None: + v(inst, a, getattr(inst, a.name)) + + +def _is_slot_cls(cls): + return "__slots__" in cls.__dict__ + + +def _is_slot_attr(a_name, base_attr_map): + """ + Check if the attribute name comes from a slot class. + """ + return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) + + +def _attrs_to_init_script( + attrs, frozen, slots, post_init, cache_hash, base_attr_map, is_exc +): + """ + Return a script of an initializer for *attrs* and a dict of globals. + + The globals are expected by the generated script. + + If *frozen* is True, we cannot set the attributes directly so we use + a cached ``object.__setattr__``. + """ + lines = [] + any_slot_ancestors = any( + _is_slot_attr(a.name, base_attr_map) for a in attrs + ) + if frozen is True: + if slots is True: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup per + # assignment. + # Note _setattr will be used again below if cache_hash is True + "_setattr = _cached_setattr.__get__(self, self.__class__)" + ) + + def fmt_setter(attr_name, value_var): + return "_setattr('%(attr_name)s', %(value_var)s)" % { + "attr_name": attr_name, + "value_var": value_var, + } + + def fmt_setter_with_converter(attr_name, value_var): + conv_name = _init_converter_pat.format(attr_name) + return "_setattr('%(attr_name)s', %(conv)s(%(value_var)s))" % { + "attr_name": attr_name, + "value_var": value_var, + "conv": conv_name, + } + + else: + # Dict frozen classes assign directly to __dict__. + # But only if the attribute doesn't come from an ancestor slot + # class. + # Note _inst_dict will be used again below if cache_hash is True + lines.append("_inst_dict = self.__dict__") + if any_slot_ancestors: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup + # per assignment. + "_setattr = _cached_setattr.__get__(self, self.__class__)" + ) + + def fmt_setter(attr_name, value_var): + if _is_slot_attr(attr_name, base_attr_map): + res = "_setattr('%(attr_name)s', %(value_var)s)" % { + "attr_name": attr_name, + "value_var": value_var, + } + else: + res = "_inst_dict['%(attr_name)s'] = %(value_var)s" % { + "attr_name": attr_name, + "value_var": value_var, + } + return res + + def fmt_setter_with_converter(attr_name, value_var): + conv_name = _init_converter_pat.format(attr_name) + if _is_slot_attr(attr_name, base_attr_map): + tmpl = "_setattr('%(attr_name)s', %(c)s(%(value_var)s))" + else: + tmpl = "_inst_dict['%(attr_name)s'] = %(c)s(%(value_var)s)" + return tmpl % { + "attr_name": attr_name, + "value_var": value_var, + "c": conv_name, + } + + else: + # Not frozen. + def fmt_setter(attr_name, value): + return "self.%(attr_name)s = %(value)s" % { + "attr_name": attr_name, + "value": value, + } + + def fmt_setter_with_converter(attr_name, value_var): + conv_name = _init_converter_pat.format(attr_name) + return "self.%(attr_name)s = %(conv)s(%(value_var)s)" % { + "attr_name": attr_name, + "value_var": value_var, + "conv": conv_name, + } + + args = [] + kw_only_args = [] + attrs_to_validate = [] + + # This is a dictionary of names to validator and converter callables. + # Injecting this into __init__ globals lets us avoid lookups. + names_for_globals = {} + annotations = {"return": None} + + for a in attrs: + if a.validator: + attrs_to_validate.append(a) + attr_name = a.name + arg_name = a.name.lstrip("_") + has_factory = isinstance(a.default, Factory) + if has_factory and a.default.takes_self: + maybe_self = "self" + else: + maybe_self = "" + if a.init is False: + if has_factory: + init_factory_name = _init_factory_pat.format(a.name) + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + "({0})".format(maybe_self), + ) + ) + conv_name = _init_converter_pat.format(a.name) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + init_factory_name + "({0})".format(maybe_self), + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + "attr_dict['{attr_name}'].default".format( + attr_name=attr_name + ), + ) + ) + conv_name = _init_converter_pat.format(a.name) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + "attr_dict['{attr_name}'].default".format( + attr_name=attr_name + ), + ) + ) + elif a.default is not NOTHING and not has_factory: + arg = "{arg_name}=attr_dict['{attr_name}'].default".format( + arg_name=arg_name, attr_name=attr_name + ) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + if a.converter is not None: + lines.append(fmt_setter_with_converter(attr_name, arg_name)) + names_for_globals[ + _init_converter_pat.format(a.name) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name)) + elif has_factory: + arg = "{arg_name}=NOTHING".format(arg_name=arg_name) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + lines.append( + "if {arg_name} is not NOTHING:".format(arg_name=arg_name) + ) + init_factory_name = _init_factory_pat.format(a.name) + if a.converter is not None: + lines.append( + " " + fmt_setter_with_converter(attr_name, arg_name) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "({0})".format(maybe_self), + ) + ) + names_for_globals[ + _init_converter_pat.format(a.name) + ] = a.converter + else: + lines.append(" " + fmt_setter(attr_name, arg_name)) + lines.append("else:") + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "({0})".format(maybe_self), + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) + if a.converter is not None: + lines.append(fmt_setter_with_converter(attr_name, arg_name)) + names_for_globals[ + _init_converter_pat.format(a.name) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name)) + + if a.init is True and a.converter is None and a.type is not None: + annotations[arg_name] = a.type + + if attrs_to_validate: # we can skip this if there are no validators. + names_for_globals["_config"] = _config + lines.append("if _config._run_validators is True:") + for a in attrs_to_validate: + val_name = "__attr_validator_{}".format(a.name) + attr_name = "__attr_{}".format(a.name) + lines.append( + " {}(self, {}, self.{})".format(val_name, attr_name, a.name) + ) + names_for_globals[val_name] = a.validator + names_for_globals[attr_name] = a + if post_init: + lines.append("self.__attrs_post_init__()") + + # because this is set only after __attrs_post_init is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to + # field values during post-init combined with post-init accessing the + # hash code would result in silent bugs. + if cache_hash: + if frozen: + if slots: + # if frozen and slots, then _setattr defined above + init_hash_cache = "_setattr('%s', %s)" + else: + # if frozen and not slots, then _inst_dict defined above + init_hash_cache = "_inst_dict['%s'] = %s" + else: + init_hash_cache = "self.%s = %s" + lines.append(init_hash_cache % (_hash_cache_field, "None")) + + # For exceptions we rely on BaseException.__init__ for proper + # initialization. + if is_exc: + vals = ",".join("self." + a.name for a in attrs if a.init) + + lines.append("BaseException.__init__(self, %s)" % (vals,)) + + args = ", ".join(args) + if kw_only_args: + if PY2: + raise PythonTooOldError( + "Keyword-only arguments only work on Python 3 and later." + ) + + args += "{leading_comma}*, {kw_only_args}".format( + leading_comma=", " if args else "", + kw_only_args=", ".join(kw_only_args), + ) + return ( + """\ +def __init__(self, {args}): + {lines} +""".format( + args=args, lines="\n ".join(lines) if lines else "pass" + ), + names_for_globals, + annotations, + ) + + +class Attribute(object): + """ + *Read-only* representation of an attribute. + + :attribute name: The name of the attribute. + + Plus *all* arguments of `attr.ib` (except for ``factory`` + which is only syntactic sugar for ``default=Factory(...)``. + + For the version history of the fields, see `attr.ib`. + """ + + __slots__ = ( + "name", + "default", + "validator", + "repr", + "eq", + "order", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", + ) + + def __init__( + self, + name, + default, + validator, + repr, + cmp, # XXX: unused, remove along with other cmp code. + hash, + init, + metadata=None, + type=None, + converter=None, + kw_only=False, + eq=None, + order=None, + ): + eq, order = _determine_eq_order(cmp, eq, order) + + # Cache this descriptor here to speed things up later. + bound_setattr = _obj_setattr.__get__(self, Attribute) + + # Despite the big red warning, people *do* instantiate `Attribute` + # themselves. + bound_setattr("name", name) + bound_setattr("default", default) + bound_setattr("validator", validator) + bound_setattr("repr", repr) + bound_setattr("eq", eq) + bound_setattr("order", order) + bound_setattr("hash", hash) + bound_setattr("init", init) + bound_setattr("converter", converter) + bound_setattr( + "metadata", + ( + metadata_proxy(metadata) + if metadata + else _empty_metadata_singleton + ), + ) + bound_setattr("type", type) + bound_setattr("kw_only", kw_only) + + def __setattr__(self, name, value): + raise FrozenInstanceError() + + @classmethod + def from_counting_attr(cls, name, ca, type=None): + # type holds the annotated value. deal with conflicts: + if type is None: + type = ca.type + elif ca.type is not None: + raise ValueError( + "Type annotation and type argument cannot both be present" + ) + inst_dict = { + k: getattr(ca, k) + for k in Attribute.__slots__ + if k + not in ( + "name", + "validator", + "default", + "type", + ) # exclude methods and deprecated alias + } + return cls( + name=name, + validator=ca._validator, + default=ca._default, + type=type, + cmp=None, + **inst_dict + ) + + @property + def cmp(self): + """ + Simulate the presence of a cmp attribute and warn. + """ + warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2) + + return self.eq and self.order + + # Don't use attr.assoc since fields(Attribute) doesn't work + def _assoc(self, **changes): + """ + Copy *self* and apply *changes*. + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + + # Don't use _add_pickle since fields(Attribute) doesn't work + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): + bound_setattr = _obj_setattr.__get__(self, Attribute) + for name, value in name_values_pairs: + if name != "metadata": + bound_setattr(name, value) + else: + bound_setattr( + name, + metadata_proxy(value) + if value + else _empty_metadata_singleton, + ) + + +_a = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=(name != "metadata"), + init=True, + ) + for name in Attribute.__slots__ +] + +Attribute = _add_hash( + _add_eq(_add_repr(Attribute, attrs=_a), attrs=_a), + attrs=[a for a in _a if a.hash], +) + + +class _CountingAttr(object): + """ + Intermediate representation of attributes that uses a counter to preserve + the order in which the attributes have been defined. + + *Internal* data structure of the attrs library. Running into is most + likely the result of a bug like a forgotten `@attr.s` decorator. + """ + + __slots__ = ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "metadata", + "_validator", + "converter", + "type", + "kw_only", + ) + __attrs_attrs__ = tuple( + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + order=False, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + ) + ) + ( + Attribute( + name="metadata", + default=None, + validator=None, + repr=True, + cmp=None, + hash=False, + init=True, + kw_only=False, + eq=True, + order=False, + ), + ) + cls_counter = 0 + + def __init__( + self, + default, + validator, + repr, + cmp, # XXX: unused, remove along with cmp + hash, + init, + converter, + metadata, + type, + kw_only, + eq, + order, + ): + _CountingAttr.cls_counter += 1 + self.counter = _CountingAttr.cls_counter + self._default = default + # If validator is a list/tuple, wrap it using helper validator. + if validator and isinstance(validator, (list, tuple)): + self._validator = and_(*validator) + else: + self._validator = validator + self.repr = repr + self.eq = eq + self.order = order + self.hash = hash + self.init = init + self.converter = converter + self.metadata = metadata + self.type = type + self.kw_only = kw_only + + def validator(self, meth): + """ + Decorator that adds *meth* to the list of validators. + + Returns *meth* unchanged. + + .. versionadded:: 17.1.0 + """ + if self._validator is None: + self._validator = meth + else: + self._validator = and_(self._validator, meth) + return meth + + def default(self, meth): + """ + Decorator that allows to set the default for an attribute. + + Returns *meth* unchanged. + + :raises DefaultAlreadySetError: If default has been set before. + + .. versionadded:: 17.1.0 + """ + if self._default is not NOTHING: + raise DefaultAlreadySetError() + + self._default = Factory(meth, takes_self=True) + + return meth + + +_CountingAttr = _add_eq(_add_repr(_CountingAttr)) + + +@attrs(slots=True, init=False, hash=True) +class Factory(object): + """ + Stores a factory callable. + + If passed as the default value to `attr.ib`, the factory is used to + generate a new value. + + :param callable factory: A callable that takes either none or exactly one + mandatory positional argument depending on *takes_self*. + :param bool takes_self: Pass the partially initialized instance that is + being initialized as a positional argument. + + .. versionadded:: 17.1.0 *takes_self* + """ + + factory = attrib() + takes_self = attrib() + + def __init__(self, factory, takes_self=False): + """ + `Factory` is part of the default machinery so if we want a default + value here, we have to implement it ourselves. + """ + self.factory = factory + self.takes_self = takes_self + + +def make_class(name, attrs, bases=(object,), **attributes_arguments): + """ + A quick way to create a new class called *name* with *attrs*. + + :param name: The name for the new class. + :type name: str + + :param attrs: A list of names or a dictionary of mappings of names to + attributes. + + If *attrs* is a list or an ordered dict (`dict` on Python 3.6+, + `collections.OrderedDict` otherwise), the order is deduced from + the order of the names or attributes inside *attrs*. Otherwise the + order of the definition of the attributes is used. + :type attrs: `list` or `dict` + + :param tuple bases: Classes that the new class will subclass. + + :param attributes_arguments: Passed unmodified to `attr.s`. + + :return: A new class with *attrs*. + :rtype: type + + .. versionadded:: 17.1.0 *bases* + .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. + """ + if isinstance(attrs, dict): + cls_dict = attrs + elif isinstance(attrs, (list, tuple)): + cls_dict = dict((a, attrib()) for a in attrs) + else: + raise TypeError("attrs argument must be a dict or a list.") + + post_init = cls_dict.pop("__attrs_post_init__", None) + type_ = type( + name, + bases, + {} if post_init is None else {"__attrs_post_init__": post_init}, + ) + # For pickling to work, the __module__ variable needs to be set to the + # frame where the class is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__" + ) + except (AttributeError, ValueError): + pass + + # We do it here for proper warnings with meaningful stacklevel. + cmp = attributes_arguments.pop("cmp", None) + attributes_arguments["eq"], attributes_arguments[ + "order" + ] = _determine_eq_order( + cmp, attributes_arguments.get("eq"), attributes_arguments.get("order") + ) + + return _attrs(these=cls_dict, **attributes_arguments)(type_) + + +# These are required by within this module so we define them here and merely +# import into .validators. + + +@attrs(slots=True, hash=True) +class _AndValidator(object): + """ + Compose many validators to a single one. + """ + + _validators = attrib() + + def __call__(self, inst, attr, value): + for v in self._validators: + v(inst, attr, value) + + +def and_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators. + + :param validators: Arbitrary number of validators. + :type validators: callables + + .. versionadded:: 17.1.0 + """ + vals = [] + for validator in validators: + vals.extend( + validator._validators + if isinstance(validator, _AndValidator) + else [validator] + ) + + return _AndValidator(tuple(vals)) diff --git a/venv/lib/python3.7/site-packages/attr/_version_info.py b/venv/lib/python3.7/site-packages/attr/_version_info.py new file mode 100644 index 0000000..014e78a --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/_version_info.py @@ -0,0 +1,85 @@ +from __future__ import absolute_import, division, print_function + +from functools import total_ordering + +from ._funcs import astuple +from ._make import attrib, attrs + + +@total_ordering +@attrs(eq=False, order=False, slots=True, frozen=True) +class VersionInfo(object): + """ + A version object that can be compared to tuple of length 1--4: + + >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) + True + >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) + True + >>> vi = attr.VersionInfo(19, 2, 0, "final") + >>> vi < (19, 1, 1) + False + >>> vi < (19,) + False + >>> vi == (19, 2,) + True + >>> vi == (19, 2, 1) + False + + .. versionadded:: 19.2 + """ + + year = attrib(type=int) + minor = attrib(type=int) + micro = attrib(type=int) + releaselevel = attrib(type=str) + + @classmethod + def _from_version_string(cls, s): + """ + Parse *s* and return a _VersionInfo. + """ + v = s.split(".") + if len(v) == 3: + v.append("final") + + return cls( + year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] + ) + + def _ensure_tuple(self, other): + """ + Ensure *other* is a tuple of a valid length. + + Returns a possibly transformed *other* and ourselves as a tuple of + the same length as *other*. + """ + + if self.__class__ is other.__class__: + other = astuple(other) + + if not isinstance(other, tuple): + raise NotImplementedError + + if not (1 <= len(other) <= 4): + raise NotImplementedError + + return astuple(self)[: len(other)], other + + def __eq__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + return us == them + + def __lt__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't + # have to do anything special with releaselevel for now. + return us < them diff --git a/venv/lib/python3.7/site-packages/attr/_version_info.pyi b/venv/lib/python3.7/site-packages/attr/_version_info.pyi new file mode 100644 index 0000000..45ced08 --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/_version_info.pyi @@ -0,0 +1,9 @@ +class VersionInfo: + @property + def year(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... diff --git a/venv/lib/python3.7/site-packages/attr/converters.py b/venv/lib/python3.7/site-packages/attr/converters.py new file mode 100644 index 0000000..8592897 --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/converters.py @@ -0,0 +1,78 @@ +""" +Commonly useful converters. +""" + +from __future__ import absolute_import, division, print_function + +from ._make import NOTHING, Factory + + +def optional(converter): + """ + A converter that allows an attribute to be optional. An optional attribute + is one which can be set to ``None``. + + :param callable converter: the converter that is used for non-``None`` + values. + + .. versionadded:: 17.1.0 + """ + + def optional_converter(val): + if val is None: + return None + return converter(val) + + return optional_converter + + +def default_if_none(default=NOTHING, factory=None): + """ + A converter that allows to replace ``None`` values by *default* or the + result of *factory*. + + :param default: Value to be used if ``None`` is passed. Passing an instance + of `attr.Factory` is supported, however the ``takes_self`` option + is *not*. + :param callable factory: A callable that takes not parameters whose result + is used if ``None`` is passed. + + :raises TypeError: If **neither** *default* or *factory* is passed. + :raises TypeError: If **both** *default* and *factory* are passed. + :raises ValueError: If an instance of `attr.Factory` is passed with + ``takes_self=True``. + + .. versionadded:: 18.2.0 + """ + if default is NOTHING and factory is None: + raise TypeError("Must pass either `default` or `factory`.") + + if default is not NOTHING and factory is not None: + raise TypeError( + "Must pass either `default` or `factory` but not both." + ) + + if factory is not None: + default = Factory(factory) + + if isinstance(default, Factory): + if default.takes_self: + raise ValueError( + "`takes_self` is not supported by default_if_none." + ) + + def default_if_none_converter(val): + if val is not None: + return val + + return default.factory() + + else: + + def default_if_none_converter(val): + if val is not None: + return val + + return default + + return default_if_none_converter diff --git a/venv/lib/python3.7/site-packages/attr/converters.pyi b/venv/lib/python3.7/site-packages/attr/converters.pyi new file mode 100644 index 0000000..63b2a38 --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/converters.pyi @@ -0,0 +1,12 @@ +from typing import TypeVar, Optional, Callable, overload +from . import _ConverterType + +_T = TypeVar("_T") + +def optional( + converter: _ConverterType[_T] +) -> _ConverterType[Optional[_T]]: ... +@overload +def default_if_none(default: _T) -> _ConverterType[_T]: ... +@overload +def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType[_T]: ... diff --git a/venv/lib/python3.7/site-packages/attr/exceptions.py b/venv/lib/python3.7/site-packages/attr/exceptions.py new file mode 100644 index 0000000..d1b7618 --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/exceptions.py @@ -0,0 +1,74 @@ +from __future__ import absolute_import, division, print_function + + +class FrozenInstanceError(AttributeError): + """ + A frozen/immutable instance has been attempted to be modified. + + It mirrors the behavior of ``namedtuples`` by using the same error message + and subclassing `AttributeError`. + + .. versionadded:: 16.1.0 + """ + + msg = "can't set attribute" + args = [msg] + + +class AttrsAttributeNotFoundError(ValueError): + """ + An ``attrs`` function couldn't find an attribute that the user asked for. + + .. versionadded:: 16.2.0 + """ + + +class NotAnAttrsClassError(ValueError): + """ + A non-``attrs`` class has been passed into an ``attrs`` function. + + .. versionadded:: 16.2.0 + """ + + +class DefaultAlreadySetError(RuntimeError): + """ + A default has been set using ``attr.ib()`` and is attempted to be reset + using the decorator. + + .. versionadded:: 17.1.0 + """ + + +class UnannotatedAttributeError(RuntimeError): + """ + A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type + annotation. + + .. versionadded:: 17.3.0 + """ + + +class PythonTooOldError(RuntimeError): + """ + An ``attrs`` feature requiring a more recent python version has been used. + + .. versionadded:: 18.2.0 + """ + + +class NotCallableError(TypeError): + """ + A ``attr.ib()`` requiring a callable has been set with a value + that is not callable. + + .. versionadded:: 19.2.0 + """ + + def __init__(self, msg, value): + super(TypeError, self).__init__(msg, value) + self.msg = msg + self.value = value + + def __str__(self): + return str(self.msg) diff --git a/venv/lib/python3.7/site-packages/attr/exceptions.pyi b/venv/lib/python3.7/site-packages/attr/exceptions.pyi new file mode 100644 index 0000000..736fde2 --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/exceptions.pyi @@ -0,0 +1,15 @@ +from typing import Any + +class FrozenInstanceError(AttributeError): + msg: str = ... + +class AttrsAttributeNotFoundError(ValueError): ... +class NotAnAttrsClassError(ValueError): ... +class DefaultAlreadySetError(RuntimeError): ... +class UnannotatedAttributeError(RuntimeError): ... +class PythonTooOldError(RuntimeError): ... + +class NotCallableError(TypeError): + msg: str = ... + value: Any = ... + def __init__(self, msg: str, value: Any) -> None: ... diff --git a/venv/lib/python3.7/site-packages/attr/filters.py b/venv/lib/python3.7/site-packages/attr/filters.py new file mode 100644 index 0000000..dc47e8f --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/filters.py @@ -0,0 +1,52 @@ +""" +Commonly useful filters for `attr.asdict`. +""" + +from __future__ import absolute_import, division, print_function + +from ._compat import isclass +from ._make import Attribute + + +def _split_what(what): + """ + Returns a tuple of `frozenset`s of classes and attributes. + """ + return ( + frozenset(cls for cls in what if isclass(cls)), + frozenset(cls for cls in what if isinstance(cls, Attribute)), + ) + + +def include(*what): + """ + Whitelist *what*. + + :param what: What to whitelist. + :type what: `list` of `type` or `attr.Attribute`\\ s + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def include_(attribute, value): + return value.__class__ in cls or attribute in attrs + + return include_ + + +def exclude(*what): + """ + Blacklist *what*. + + :param what: What to blacklist. + :type what: `list` of classes or `attr.Attribute`\\ s. + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def exclude_(attribute, value): + return value.__class__ not in cls and attribute not in attrs + + return exclude_ diff --git a/venv/lib/python3.7/site-packages/attr/filters.pyi b/venv/lib/python3.7/site-packages/attr/filters.pyi new file mode 100644 index 0000000..68368fe --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/filters.pyi @@ -0,0 +1,5 @@ +from typing import Union, Any +from . import Attribute, _FilterType + +def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... +def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... diff --git a/venv/lib/python3.7/site-packages/attr/py.typed b/venv/lib/python3.7/site-packages/attr/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.7/site-packages/attr/validators.py b/venv/lib/python3.7/site-packages/attr/validators.py new file mode 100644 index 0000000..839d310 --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/validators.py @@ -0,0 +1,378 @@ +""" +Commonly useful validators. +""" + +from __future__ import absolute_import, division, print_function + +import re + +from ._make import _AndValidator, and_, attrib, attrs +from .exceptions import NotCallableError + + +__all__ = [ + "and_", + "deep_iterable", + "deep_mapping", + "in_", + "instance_of", + "is_callable", + "matches_re", + "optional", + "provides", +] + + +@attrs(repr=False, slots=True, hash=True) +class _InstanceOfValidator(object): + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not isinstance(value, self.type): + raise TypeError( + "'{name}' must be {type!r} (got {value!r} that is a " + "{actual!r}).".format( + name=attr.name, + type=self.type, + actual=value.__class__, + value=value, + ), + attr, + self.type, + value, + ) + + def __repr__(self): + return "".format( + type=self.type + ) + + +def instance_of(type): + """ + A validator that raises a `TypeError` if the initializer is called + with a wrong type for this particular attribute (checks are performed using + `isinstance` therefore it's also valid to pass a tuple of types). + + :param type: The type to check for. + :type type: type or tuple of types + + :raises TypeError: With a human readable error message, the attribute + (of type `attr.Attribute`), the expected type, and the value it + got. + """ + return _InstanceOfValidator(type) + + +@attrs(repr=False, frozen=True) +class _MatchesReValidator(object): + regex = attrib() + flags = attrib() + match_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.match_func(value): + raise ValueError( + "'{name}' must match regex {regex!r}" + " ({value!r} doesn't)".format( + name=attr.name, regex=self.regex.pattern, value=value + ), + attr, + self.regex, + value, + ) + + def __repr__(self): + return "".format( + regex=self.regex + ) + + +def matches_re(regex, flags=0, func=None): + r""" + A validator that raises `ValueError` if the initializer is called + with a string that doesn't match *regex*. + + :param str regex: a regex string to match against + :param int flags: flags that will be passed to the underlying re function + (default 0) + :param callable func: which underlying `re` function to call (options + are `re.fullmatch`, `re.search`, `re.match`, default + is ``None`` which means either `re.fullmatch` or an emulation of + it on Python 2). For performance reasons, they won't be used directly + but on a pre-`re.compile`\ ed pattern. + + .. versionadded:: 19.2.0 + """ + fullmatch = getattr(re, "fullmatch", None) + valid_funcs = (fullmatch, None, re.search, re.match) + if func not in valid_funcs: + raise ValueError( + "'func' must be one of %s." + % ( + ", ".join( + sorted( + e and e.__name__ or "None" for e in set(valid_funcs) + ) + ), + ) + ) + + pattern = re.compile(regex, flags) + if func is re.match: + match_func = pattern.match + elif func is re.search: + match_func = pattern.search + else: + if fullmatch: + match_func = pattern.fullmatch + else: + pattern = re.compile(r"(?:{})\Z".format(regex), flags) + match_func = pattern.match + + return _MatchesReValidator(pattern, flags, match_func) + + +@attrs(repr=False, slots=True, hash=True) +class _ProvidesValidator(object): + interface = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.interface.providedBy(value): + raise TypeError( + "'{name}' must provide {interface!r} which {value!r} " + "doesn't.".format( + name=attr.name, interface=self.interface, value=value + ), + attr, + self.interface, + value, + ) + + def __repr__(self): + return "".format( + interface=self.interface + ) + + +def provides(interface): + """ + A validator that raises a `TypeError` if the initializer is called + with an object that does not provide the requested *interface* (checks are + performed using ``interface.providedBy(value)`` (see `zope.interface + `_). + + :param zope.interface.Interface interface: The interface to check for. + + :raises TypeError: With a human readable error message, the attribute + (of type `attr.Attribute`), the expected interface, and the + value it got. + """ + return _ProvidesValidator(interface) + + +@attrs(repr=False, slots=True, hash=True) +class _OptionalValidator(object): + validator = attrib() + + def __call__(self, inst, attr, value): + if value is None: + return + + self.validator(inst, attr, value) + + def __repr__(self): + return "".format( + what=repr(self.validator) + ) + + +def optional(validator): + """ + A validator that makes an attribute optional. An optional attribute is one + which can be set to ``None`` in addition to satisfying the requirements of + the sub-validator. + + :param validator: A validator (or a list of validators) that is used for + non-``None`` values. + :type validator: callable or `list` of callables. + + .. versionadded:: 15.1.0 + .. versionchanged:: 17.1.0 *validator* can be a list of validators. + """ + if isinstance(validator, list): + return _OptionalValidator(_AndValidator(validator)) + return _OptionalValidator(validator) + + +@attrs(repr=False, slots=True, hash=True) +class _InValidator(object): + options = attrib() + + def __call__(self, inst, attr, value): + try: + in_options = value in self.options + except TypeError: # e.g. `1 in "abc"` + in_options = False + + if not in_options: + raise ValueError( + "'{name}' must be in {options!r} (got {value!r})".format( + name=attr.name, options=self.options, value=value + ) + ) + + def __repr__(self): + return "".format( + options=self.options + ) + + +def in_(options): + """ + A validator that raises a `ValueError` if the initializer is called + with a value that does not belong in the options provided. The check is + performed using ``value in options``. + + :param options: Allowed options. + :type options: list, tuple, `enum.Enum`, ... + + :raises ValueError: With a human readable error message, the attribute (of + type `attr.Attribute`), the expected options, and the value it + got. + + .. versionadded:: 17.1.0 + """ + return _InValidator(options) + + +@attrs(repr=False, slots=False, hash=True) +class _IsCallableValidator(object): + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not callable(value): + message = ( + "'{name}' must be callable " + "(got {value!r} that is a {actual!r})." + ) + raise NotCallableError( + msg=message.format( + name=attr.name, value=value, actual=value.__class__ + ), + value=value, + ) + + def __repr__(self): + return "" + + +def is_callable(): + """ + A validator that raises a `attr.exceptions.NotCallableError` if the + initializer is called with a value for this particular attribute + that is not callable. + + .. versionadded:: 19.1.0 + + :raises `attr.exceptions.NotCallableError`: With a human readable error + message containing the attribute (`attr.Attribute`) name, + and the value it got. + """ + return _IsCallableValidator() + + +@attrs(repr=False, slots=True, hash=True) +class _DeepIterable(object): + member_validator = attrib(validator=is_callable()) + iterable_validator = attrib( + default=None, validator=optional(is_callable()) + ) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.iterable_validator is not None: + self.iterable_validator(inst, attr, value) + + for member in value: + self.member_validator(inst, attr, member) + + def __repr__(self): + iterable_identifier = ( + "" + if self.iterable_validator is None + else " {iterable!r}".format(iterable=self.iterable_validator) + ) + return ( + "" + ).format( + iterable_identifier=iterable_identifier, + member=self.member_validator, + ) + + +def deep_iterable(member_validator, iterable_validator=None): + """ + A validator that performs deep validation of an iterable. + + :param member_validator: Validator to apply to iterable members + :param iterable_validator: Validator to apply to iterable itself + (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepIterable(member_validator, iterable_validator) + + +@attrs(repr=False, slots=True, hash=True) +class _DeepMapping(object): + key_validator = attrib(validator=is_callable()) + value_validator = attrib(validator=is_callable()) + mapping_validator = attrib(default=None, validator=optional(is_callable())) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.mapping_validator is not None: + self.mapping_validator(inst, attr, value) + + for key in value: + self.key_validator(inst, attr, key) + self.value_validator(inst, attr, value[key]) + + def __repr__(self): + return ( + "" + ).format(key=self.key_validator, value=self.value_validator) + + +def deep_mapping(key_validator, value_validator, mapping_validator=None): + """ + A validator that performs deep validation of a dictionary. + + :param key_validator: Validator to apply to dictionary keys + :param value_validator: Validator to apply to dictionary values + :param mapping_validator: Validator to apply to top-level mapping + attribute (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepMapping(key_validator, value_validator, mapping_validator) diff --git a/venv/lib/python3.7/site-packages/attr/validators.pyi b/venv/lib/python3.7/site-packages/attr/validators.pyi new file mode 100644 index 0000000..9a22abb --- /dev/null +++ b/venv/lib/python3.7/site-packages/attr/validators.pyi @@ -0,0 +1,66 @@ +from typing import ( + Container, + List, + Union, + TypeVar, + Type, + Any, + Optional, + Tuple, + Iterable, + Mapping, + Callable, + Match, + AnyStr, + overload, +) +from . import _ValidatorType + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_I = TypeVar("_I", bound=Iterable) +_K = TypeVar("_K") +_V = TypeVar("_V") +_M = TypeVar("_M", bound=Mapping) + +# To be more precise on instance_of use some overloads. +# If there are more than 3 items in the tuple then we fall back to Any +@overload +def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ... +@overload +def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2]] +) -> _ValidatorType[Union[_T1, _T2]]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2], Type[_T3]] +) -> _ValidatorType[Union[_T1, _T2, _T3]]: ... +@overload +def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ... +def provides(interface: Any) -> _ValidatorType[Any]: ... +def optional( + validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]] +) -> _ValidatorType[Optional[_T]]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... +def matches_re( + regex: AnyStr, + flags: int = ..., + func: Optional[ + Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]] + ] = ..., +) -> _ValidatorType[AnyStr]: ... +def deep_iterable( + member_validator: _ValidatorType[_T], + iterable_validator: Optional[_ValidatorType[_I]] = ..., +) -> _ValidatorType[_I]: ... +def deep_mapping( + key_validator: _ValidatorType[_K], + value_validator: _ValidatorType[_V], + mapping_validator: Optional[_ValidatorType[_M]] = ..., +) -> _ValidatorType[_M]: ... +def is_callable() -> _ValidatorType[_T]: ... diff --git a/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/INSTALLER b/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/LICENSE b/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/LICENSE new file mode 100644 index 0000000..7ae3df9 --- /dev/null +++ b/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Hynek Schlawack + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/METADATA b/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/METADATA new file mode 100644 index 0000000..a106415 --- /dev/null +++ b/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/METADATA @@ -0,0 +1,229 @@ +Metadata-Version: 2.1 +Name: attrs +Version: 19.3.0 +Summary: Classes Without Boilerplate +Home-page: https://www.attrs.org/ +Author: Hynek Schlawack +Author-email: hs@ox.cx +Maintainer: Hynek Schlawack +Maintainer-email: hs@ox.cx +License: MIT +Project-URL: Documentation, https://www.attrs.org/ +Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues +Project-URL: Source Code, https://github.com/python-attrs/attrs +Keywords: class,attribute,boilerplate +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Description-Content-Type: text/x-rst +Provides-Extra: azure-pipelines +Requires-Dist: coverage ; extra == 'azure-pipelines' +Requires-Dist: hypothesis ; extra == 'azure-pipelines' +Requires-Dist: pympler ; extra == 'azure-pipelines' +Requires-Dist: pytest (>=4.3.0) ; extra == 'azure-pipelines' +Requires-Dist: six ; extra == 'azure-pipelines' +Requires-Dist: zope.interface ; extra == 'azure-pipelines' +Requires-Dist: pytest-azurepipelines ; extra == 'azure-pipelines' +Provides-Extra: dev +Requires-Dist: coverage ; extra == 'dev' +Requires-Dist: hypothesis ; extra == 'dev' +Requires-Dist: pympler ; extra == 'dev' +Requires-Dist: pytest (>=4.3.0) ; extra == 'dev' +Requires-Dist: six ; extra == 'dev' +Requires-Dist: zope.interface ; extra == 'dev' +Requires-Dist: sphinx ; extra == 'dev' +Requires-Dist: pre-commit ; extra == 'dev' +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: zope.interface ; extra == 'docs' +Provides-Extra: tests +Requires-Dist: coverage ; extra == 'tests' +Requires-Dist: hypothesis ; extra == 'tests' +Requires-Dist: pympler ; extra == 'tests' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests' +Requires-Dist: six ; extra == 'tests' +Requires-Dist: zope.interface ; extra == 'tests' + +.. image:: https://www.attrs.org/en/latest/_static/attrs_logo.png + :alt: attrs Logo + +====================================== +``attrs``: Classes Without Boilerplate +====================================== + +.. image:: https://readthedocs.org/projects/attrs/badge/?version=stable + :target: https://www.attrs.org/en/stable/?badge=stable + :alt: Documentation Status + +.. image:: https://attrs.visualstudio.com/attrs/_apis/build/status/python-attrs.attrs?branchName=master + :target: https://attrs.visualstudio.com/attrs/_build/latest?definitionId=1&branchName=master + :alt: CI Status + +.. image:: https://codecov.io/github/python-attrs/attrs/branch/master/graph/badge.svg + :target: https://codecov.io/github/python-attrs/attrs + :alt: Test Coverage + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: black + +.. teaser-begin + +``attrs`` is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka `dunder `_ methods). + +Its main goal is to help you to write **concise** and **correct** software without slowing down your code. + +.. -spiel-end- + +For that, it gives you a class decorator and a way to declaratively define the attributes on that class: + +.. -code-begin- + +.. code-block:: pycon + + >>> import attr + + >>> @attr.s + ... class SomeClass(object): + ... a_number = attr.ib(default=42) + ... list_of_numbers = attr.ib(factory=list) + ... + ... def hard_math(self, another_number): + ... return self.a_number + sum(self.list_of_numbers) * another_number + + + >>> sc = SomeClass(1, [1, 2, 3]) + >>> sc + SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) + + >>> sc.hard_math(3) + 19 + >>> sc == SomeClass(1, [1, 2, 3]) + True + >>> sc != SomeClass(2, [3, 2, 1]) + True + + >>> attr.asdict(sc) + {'a_number': 1, 'list_of_numbers': [1, 2, 3]} + + >>> SomeClass() + SomeClass(a_number=42, list_of_numbers=[]) + + >>> C = attr.make_class("C", ["a", "b"]) + >>> C("foo", "bar") + C(a='foo', b='bar') + + +After *declaring* your attributes ``attrs`` gives you: + +- a concise and explicit overview of the class's attributes, +- a nice human-readable ``__repr__``, +- a complete set of comparison methods (equality and ordering), +- an initializer, +- and much more, + +*without* writing dull boilerplate code again and again and *without* runtime performance penalties. + +On Python 3.6 and later, you can often even drop the calls to ``attr.ib()`` by using `type annotations `_. + +This gives you the power to use actual classes with actual types in your code instead of confusing ``tuple``\ s or `confusingly behaving `_ ``namedtuple``\ s. +Which in turn encourages you to write *small classes* that do `one thing well `_. +Never again violate the `single responsibility principle `_ just because implementing ``__init__`` et al is a painful drag. + + +.. -testimonials- + +Testimonials +============ + +**Amber Hawkie Brown**, Twisted Release Manager and Computer Owl: + + Writing a fully-functional class using attrs takes me less time than writing this testimonial. + + +**Glyph Lefkowitz**, creator of `Twisted `_, `Automat `_, and other open source software, in `The One Python Library Everyone Needs `_: + + I’m looking forward to is being able to program in Python-with-attrs everywhere. + It exerts a subtle, but positive, design influence in all the codebases I’ve see it used in. + + +**Kenneth Reitz**, creator of `Requests `_ (`on paper no less `_!): + + attrs—classes for humans. I like it. + + +**Łukasz Langa**, creator of `Black `_, prolific Python core developer, and release manager for Python 3.8 and 3.9: + + I'm increasingly digging your attr.ocity. Good job! + + +.. -end- + +.. -project-information- + +Getting Help +============ + +Please use the ``python-attrs`` tag on `StackOverflow `_ to get help. + +Answering questions of your fellow developers is also great way to help the project! + + +Project Information +=================== + +``attrs`` is released under the `MIT `_ license, +its documentation lives at `Read the Docs `_, +the code on `GitHub `_, +and the latest release on `PyPI `_. +It’s rigorously tested on Python 2.7, 3.4+, and PyPy. + +We collect information on **third-party extensions** in our `wiki `_. +Feel free to browse and add your own! + +If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide `_ to get you started! + + +Release Information +=================== + +19.3.0 (2019-10-15) +------------------- + +Changes +^^^^^^^ + +- Fixed ``auto_attribs`` usage when default values cannot be compared directly with ``==``, such as ``numpy`` arrays. + `#585 `_ + +`Full changelog `_. + +Credits +======= + +``attrs`` is written and maintained by `Hynek Schlawack `_. + +The development is kindly supported by `Variomedia AG `_. + +A full list of contributors can be found in `GitHub's overview `_. + +It’s the spiritual successor of `characteristic `_ and aspires to fix some of it clunkiness and unfortunate decisions. +Both were inspired by Twisted’s `FancyEqMixin `_ but both are implemented using class decorators because `subclassing is bad for you `_, m’kay? + + diff --git a/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/RECORD b/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/RECORD new file mode 100644 index 0000000..c9acc4a --- /dev/null +++ b/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/RECORD @@ -0,0 +1,33 @@ +attr/__init__.py,sha256=ONaI-ZEGOIC7IDqH2HANgesnOxPE1m0GIRRYPPsXEHk,1349 +attr/__init__.pyi,sha256=fOnMRTF00b5J23PYPF74u66UVhVzzm0KYVxzmVXHPw0,8257 +attr/__pycache__/__init__.cpython-37.pyc,, +attr/__pycache__/_compat.cpython-37.pyc,, +attr/__pycache__/_config.cpython-37.pyc,, +attr/__pycache__/_funcs.cpython-37.pyc,, +attr/__pycache__/_make.cpython-37.pyc,, +attr/__pycache__/_version_info.cpython-37.pyc,, +attr/__pycache__/converters.cpython-37.pyc,, +attr/__pycache__/exceptions.cpython-37.pyc,, +attr/__pycache__/filters.cpython-37.pyc,, +attr/__pycache__/validators.cpython-37.pyc,, +attr/_compat.py,sha256=-pJtdtqgCg0K6rH_BWf3wKuTum58GD-WWPclQQ2SUaU,7326 +attr/_config.py,sha256=_KvW0mQdH2PYjHc0YfIUaV_o2pVfM7ziMEYTxwmEhOA,514 +attr/_funcs.py,sha256=unAJfNGSTOzxyFzkj7Rs3O1bfsQodmXyir9uZKen-vY,9696 +attr/_make.py,sha256=HhjGhFEbnxPKuUb9hFmAjXoQGpekniw1IEF3_Z-vwCc,70807 +attr/_version_info.py,sha256=azMi1lNelb3cJvvYUMXsXVbUANkRzbD5IEiaXVpeVr4,2162 +attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 +attr/converters.py,sha256=5QJRYSXE8G7PW0289y_SPwvvZIcw-nJIuBlfYVdB4BQ,2141 +attr/converters.pyi,sha256=wAhCoOT1MFV8t323rpD87O7bxQ8CYLTPiBQd-29BieI,351 +attr/exceptions.py,sha256=hbhOa3b4W8_mRrbj3FsMTR4Bt5xzbJs5xaFTWn8s6h4,1635 +attr/exceptions.pyi,sha256=4zuaJyl2axxWbqnZgxo_2oTpPNbyowEw3A4hqV5PmAc,458 +attr/filters.py,sha256=weDxwATsa69T_0bPVjiM1fGsciAMQmwhY5G8Jm5BxuI,1098 +attr/filters.pyi,sha256=xDpmKQlFdssgxGa5tsl1ADh_3zwAwAT4vUhd8h-8-Tk,214 +attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attr/validators.py,sha256=8AsxgdDgh3sGPseiUIMPGcTr6PvaDYfH3AK46tsvs8U,11460 +attr/validators.pyi,sha256=vZgsJqUwrJevh4v_Hd7_RSXqDrBctE6-3AEZ7uYKodo,1868 +attrs-19.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +attrs-19.3.0.dist-info/LICENSE,sha256=v2WaKLSSQGAvVrvfSQy-LsUJsVuY-Z17GaUsdA4yeGM,1082 +attrs-19.3.0.dist-info/METADATA,sha256=WmnjYy_TftebL3pewXyGEaD4TZRrLUEHk3frEkAtqL0,9022 +attrs-19.3.0.dist-info/RECORD,, +attrs-19.3.0.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 +attrs-19.3.0.dist-info/top_level.txt,sha256=tlRYMddkRlKPqJ96wP2_j9uEsmcNHgD2SbuWd4CzGVU,5 diff --git a/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/WHEEL b/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/WHEEL new file mode 100644 index 0000000..8b701e9 --- /dev/null +++ b/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.6) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/top_level.txt b/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/top_level.txt new file mode 100644 index 0000000..66a062d --- /dev/null +++ b/venv/lib/python3.7/site-packages/attrs-19.3.0.dist-info/top_level.txt @@ -0,0 +1 @@ +attr diff --git a/venv/lib/python3.7/site-packages/babel/__init__.py b/venv/lib/python3.7/site-packages/babel/__init__.py new file mode 100644 index 0000000..1132e6f --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +""" + babel + ~~~~~ + + Integrated collection of utilities that assist in internationalizing and + localizing applications. + + This package is basically composed of two major parts: + + * tools to build and work with ``gettext`` message catalogs + * a Python interface to the CLDR (Common Locale Data Repository), providing + access to various locale display names, localized number and date + formatting, etc. + + :copyright: (c) 2013-2019 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +from babel.core import UnknownLocaleError, Locale, default_locale, \ + negotiate_locale, parse_locale, get_locale_identifier + + +__version__ = '2.7.0' diff --git a/venv/lib/python3.7/site-packages/babel/_compat.py b/venv/lib/python3.7/site-packages/babel/_compat.py new file mode 100644 index 0000000..11b4d7a --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/_compat.py @@ -0,0 +1,79 @@ +import sys +import array + +PY2 = sys.version_info[0] == 2 + +_identity = lambda x: x + + +if not PY2: + text_type = str + string_types = (str,) + integer_types = (int, ) + + text_to_native = lambda s, enc: s + unichr = chr + + iterkeys = lambda d: iter(d.keys()) + itervalues = lambda d: iter(d.values()) + iteritems = lambda d: iter(d.items()) + + from io import StringIO, BytesIO + import pickle + + izip = zip + imap = map + range_type = range + + cmp = lambda a, b: (a > b) - (a < b) + + array_tobytes = array.array.tobytes + from collections import abc + +else: + text_type = unicode + string_types = (str, unicode) + integer_types = (int, long) + + text_to_native = lambda s, enc: s.encode(enc) + unichr = unichr + + iterkeys = lambda d: d.iterkeys() + itervalues = lambda d: d.itervalues() + iteritems = lambda d: d.iteritems() + + from cStringIO import StringIO as BytesIO + from StringIO import StringIO + import cPickle as pickle + + from itertools import imap + from itertools import izip + range_type = xrange + + cmp = cmp + + array_tobytes = array.array.tostring + import collections as abc + +number_types = integer_types + (float,) + + +def force_text(s, encoding='utf-8', errors='strict'): + if isinstance(s, text_type): + return s + if isinstance(s, bytes): + return s.decode(encoding, errors) + return text_type(s) + + +# +# Since Python 3.3, a fast decimal implementation is already included in the +# standard library. Otherwise use cdecimal when available +# +if sys.version_info[:2] >= (3, 3): + import decimal +else: + try: + import cdecimal as decimal + except ImportError: + import decimal diff --git a/venv/lib/python3.7/site-packages/babel/core.py b/venv/lib/python3.7/site-packages/babel/core.py new file mode 100644 index 0000000..a80807a --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/core.py @@ -0,0 +1,1133 @@ +# -*- coding: utf-8 -*- +""" + babel.core + ~~~~~~~~~~ + + Core locale representation and locale data access. + + :copyright: (c) 2013-2019 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +import os + +from babel import localedata +from babel._compat import pickle, string_types +from babel.plural import PluralRule + +__all__ = ['UnknownLocaleError', 'Locale', 'default_locale', 'negotiate_locale', + 'parse_locale'] + + +_global_data = None +_default_plural_rule = PluralRule({}) + + +def _raise_no_data_error(): + raise RuntimeError('The babel data files are not available. ' + 'This usually happens because you are using ' + 'a source checkout from Babel and you did ' + 'not build the data files. Just make sure ' + 'to run "python setup.py import_cldr" before ' + 'installing the library.') + + +def get_global(key): + """Return the dictionary for the given key in the global data. + + The global data is stored in the ``babel/global.dat`` file and contains + information independent of individual locales. + + >>> get_global('zone_aliases')['UTC'] + u'Etc/UTC' + >>> get_global('zone_territories')['Europe/Berlin'] + u'DE' + + The keys available are: + + - ``all_currencies`` + - ``currency_fractions`` + - ``language_aliases`` + - ``likely_subtags`` + - ``parent_exceptions`` + - ``script_aliases`` + - ``territory_aliases`` + - ``territory_currencies`` + - ``territory_languages`` + - ``territory_zones`` + - ``variant_aliases`` + - ``windows_zone_mapping`` + - ``zone_aliases`` + - ``zone_territories`` + + .. note:: The internal structure of the data may change between versions. + + .. versionadded:: 0.9 + + :param key: the data key + """ + global _global_data + if _global_data is None: + dirname = os.path.join(os.path.dirname(__file__)) + filename = os.path.join(dirname, 'global.dat') + if not os.path.isfile(filename): + _raise_no_data_error() + with open(filename, 'rb') as fileobj: + _global_data = pickle.load(fileobj) + return _global_data.get(key, {}) + + +LOCALE_ALIASES = { + 'ar': 'ar_SY', 'bg': 'bg_BG', 'bs': 'bs_BA', 'ca': 'ca_ES', 'cs': 'cs_CZ', + 'da': 'da_DK', 'de': 'de_DE', 'el': 'el_GR', 'en': 'en_US', 'es': 'es_ES', + 'et': 'et_EE', 'fa': 'fa_IR', 'fi': 'fi_FI', 'fr': 'fr_FR', 'gl': 'gl_ES', + 'he': 'he_IL', 'hu': 'hu_HU', 'id': 'id_ID', 'is': 'is_IS', 'it': 'it_IT', + 'ja': 'ja_JP', 'km': 'km_KH', 'ko': 'ko_KR', 'lt': 'lt_LT', 'lv': 'lv_LV', + 'mk': 'mk_MK', 'nl': 'nl_NL', 'nn': 'nn_NO', 'no': 'nb_NO', 'pl': 'pl_PL', + 'pt': 'pt_PT', 'ro': 'ro_RO', 'ru': 'ru_RU', 'sk': 'sk_SK', 'sl': 'sl_SI', + 'sv': 'sv_SE', 'th': 'th_TH', 'tr': 'tr_TR', 'uk': 'uk_UA' +} + + +class UnknownLocaleError(Exception): + """Exception thrown when a locale is requested for which no locale data + is available. + """ + + def __init__(self, identifier): + """Create the exception. + + :param identifier: the identifier string of the unsupported locale + """ + Exception.__init__(self, 'unknown locale %r' % identifier) + + #: The identifier of the locale that could not be found. + self.identifier = identifier + + +class Locale(object): + """Representation of a specific locale. + + >>> locale = Locale('en', 'US') + >>> repr(locale) + "Locale('en', territory='US')" + >>> locale.display_name + u'English (United States)' + + A `Locale` object can also be instantiated from a raw locale string: + + >>> locale = Locale.parse('en-US', sep='-') + >>> repr(locale) + "Locale('en', territory='US')" + + `Locale` objects provide access to a collection of locale data, such as + territory and language names, number and date format patterns, and more: + + >>> locale.number_symbols['decimal'] + u'.' + + If a locale is requested for which no locale data is available, an + `UnknownLocaleError` is raised: + + >>> Locale.parse('en_XX') + Traceback (most recent call last): + ... + UnknownLocaleError: unknown locale 'en_XX' + + For more information see :rfc:`3066`. + """ + + def __init__(self, language, territory=None, script=None, variant=None): + """Initialize the locale object from the given identifier components. + + >>> locale = Locale('en', 'US') + >>> locale.language + 'en' + >>> locale.territory + 'US' + + :param language: the language code + :param territory: the territory (country or region) code + :param script: the script code + :param variant: the variant code + :raise `UnknownLocaleError`: if no locale data is available for the + requested locale + """ + #: the language code + self.language = language + #: the territory (country or region) code + self.territory = territory + #: the script code + self.script = script + #: the variant code + self.variant = variant + self.__data = None + + identifier = str(self) + if not localedata.exists(identifier): + raise UnknownLocaleError(identifier) + + @classmethod + def default(cls, category=None, aliases=LOCALE_ALIASES): + """Return the system default locale for the specified category. + + >>> for name in ['LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LC_MESSAGES']: + ... os.environ[name] = '' + >>> os.environ['LANG'] = 'fr_FR.UTF-8' + >>> Locale.default('LC_MESSAGES') + Locale('fr', territory='FR') + + The following fallbacks to the variable are always considered: + + - ``LANGUAGE`` + - ``LC_ALL`` + - ``LC_CTYPE`` + - ``LANG`` + + :param category: one of the ``LC_XXX`` environment variable names + :param aliases: a dictionary of aliases for locale identifiers + """ + # XXX: use likely subtag expansion here instead of the + # aliases dictionary. + locale_string = default_locale(category, aliases=aliases) + return cls.parse(locale_string) + + @classmethod + def negotiate(cls, preferred, available, sep='_', aliases=LOCALE_ALIASES): + """Find the best match between available and requested locale strings. + + >>> Locale.negotiate(['de_DE', 'en_US'], ['de_DE', 'de_AT']) + Locale('de', territory='DE') + >>> Locale.negotiate(['de_DE', 'en_US'], ['en', 'de']) + Locale('de') + >>> Locale.negotiate(['de_DE', 'de'], ['en_US']) + + You can specify the character used in the locale identifiers to separate + the differnet components. This separator is applied to both lists. Also, + case is ignored in the comparison: + + >>> Locale.negotiate(['de-DE', 'de'], ['en-us', 'de-de'], sep='-') + Locale('de', territory='DE') + + :param preferred: the list of locale identifers preferred by the user + :param available: the list of locale identifiers available + :param aliases: a dictionary of aliases for locale identifiers + """ + identifier = negotiate_locale(preferred, available, sep=sep, + aliases=aliases) + if identifier: + return Locale.parse(identifier, sep=sep) + + @classmethod + def parse(cls, identifier, sep='_', resolve_likely_subtags=True): + """Create a `Locale` instance for the given locale identifier. + + >>> l = Locale.parse('de-DE', sep='-') + >>> l.display_name + u'Deutsch (Deutschland)' + + If the `identifier` parameter is not a string, but actually a `Locale` + object, that object is returned: + + >>> Locale.parse(l) + Locale('de', territory='DE') + + This also can perform resolving of likely subtags which it does + by default. This is for instance useful to figure out the most + likely locale for a territory you can use ``'und'`` as the + language tag: + + >>> Locale.parse('und_AT') + Locale('de', territory='AT') + + :param identifier: the locale identifier string + :param sep: optional component separator + :param resolve_likely_subtags: if this is specified then a locale will + have its likely subtag resolved if the + locale otherwise does not exist. For + instance ``zh_TW`` by itself is not a + locale that exists but Babel can + automatically expand it to the full + form of ``zh_hant_TW``. Note that this + expansion is only taking place if no + locale exists otherwise. For instance + there is a locale ``en`` that can exist + by itself. + :raise `ValueError`: if the string does not appear to be a valid locale + identifier + :raise `UnknownLocaleError`: if no locale data is available for the + requested locale + """ + if identifier is None: + return None + elif isinstance(identifier, Locale): + return identifier + elif not isinstance(identifier, string_types): + raise TypeError('Unexpected value for identifier: %r' % (identifier,)) + + parts = parse_locale(identifier, sep=sep) + input_id = get_locale_identifier(parts) + + def _try_load(parts): + try: + return cls(*parts) + except UnknownLocaleError: + return None + + def _try_load_reducing(parts): + # Success on first hit, return it. + locale = _try_load(parts) + if locale is not None: + return locale + + # Now try without script and variant + locale = _try_load(parts[:2]) + if locale is not None: + return locale + + locale = _try_load(parts) + if locale is not None: + return locale + if not resolve_likely_subtags: + raise UnknownLocaleError(input_id) + + # From here onwards is some very bad likely subtag resolving. This + # whole logic is not entirely correct but good enough (tm) for the + # time being. This has been added so that zh_TW does not cause + # errors for people when they upgrade. Later we should properly + # implement ICU like fuzzy locale objects and provide a way to + # maximize and minimize locale tags. + + language, territory, script, variant = parts + language = get_global('language_aliases').get(language, language) + territory = get_global('territory_aliases').get(territory, (territory,))[0] + script = get_global('script_aliases').get(script, script) + variant = get_global('variant_aliases').get(variant, variant) + + if territory == 'ZZ': + territory = None + if script == 'Zzzz': + script = None + + parts = language, territory, script, variant + + # First match: try the whole identifier + new_id = get_locale_identifier(parts) + likely_subtag = get_global('likely_subtags').get(new_id) + if likely_subtag is not None: + locale = _try_load_reducing(parse_locale(likely_subtag)) + if locale is not None: + return locale + + # If we did not find anything so far, try again with a + # simplified identifier that is just the language + likely_subtag = get_global('likely_subtags').get(language) + if likely_subtag is not None: + language2, _, script2, variant2 = parse_locale(likely_subtag) + locale = _try_load_reducing((language2, territory, script2, variant2)) + if locale is not None: + return locale + + raise UnknownLocaleError(input_id) + + def __eq__(self, other): + for key in ('language', 'territory', 'script', 'variant'): + if not hasattr(other, key): + return False + return (self.language == other.language) and \ + (self.territory == other.territory) and \ + (self.script == other.script) and \ + (self.variant == other.variant) + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash((self.language, self.territory, self.script, self.variant)) + + def __repr__(self): + parameters = [''] + for key in ('territory', 'script', 'variant'): + value = getattr(self, key) + if value is not None: + parameters.append('%s=%r' % (key, value)) + parameter_string = '%r' % self.language + ', '.join(parameters) + return 'Locale(%s)' % parameter_string + + def __str__(self): + return get_locale_identifier((self.language, self.territory, + self.script, self.variant)) + + @property + def _data(self): + if self.__data is None: + self.__data = localedata.LocaleDataDict(localedata.load(str(self))) + return self.__data + + def get_display_name(self, locale=None): + """Return the display name of the locale using the given locale. + + The display name will include the language, territory, script, and + variant, if those are specified. + + >>> Locale('zh', 'CN', script='Hans').get_display_name('en') + u'Chinese (Simplified, China)' + + :param locale: the locale to use + """ + if locale is None: + locale = self + locale = Locale.parse(locale) + retval = locale.languages.get(self.language) + if retval and (self.territory or self.script or self.variant): + details = [] + if self.script: + details.append(locale.scripts.get(self.script)) + if self.territory: + details.append(locale.territories.get(self.territory)) + if self.variant: + details.append(locale.variants.get(self.variant)) + details = filter(None, details) + if details: + retval += ' (%s)' % u', '.join(details) + return retval + + display_name = property(get_display_name, doc="""\ + The localized display name of the locale. + + >>> Locale('en').display_name + u'English' + >>> Locale('en', 'US').display_name + u'English (United States)' + >>> Locale('sv').display_name + u'svenska' + + :type: `unicode` + """) + + def get_language_name(self, locale=None): + """Return the language of this locale in the given locale. + + >>> Locale('zh', 'CN', script='Hans').get_language_name('de') + u'Chinesisch' + + .. versionadded:: 1.0 + + :param locale: the locale to use + """ + if locale is None: + locale = self + locale = Locale.parse(locale) + return locale.languages.get(self.language) + + language_name = property(get_language_name, doc="""\ + The localized language name of the locale. + + >>> Locale('en', 'US').language_name + u'English' + """) + + def get_territory_name(self, locale=None): + """Return the territory name in the given locale.""" + if locale is None: + locale = self + locale = Locale.parse(locale) + return locale.territories.get(self.territory) + + territory_name = property(get_territory_name, doc="""\ + The localized territory name of the locale if available. + + >>> Locale('de', 'DE').territory_name + u'Deutschland' + """) + + def get_script_name(self, locale=None): + """Return the script name in the given locale.""" + if locale is None: + locale = self + locale = Locale.parse(locale) + return locale.scripts.get(self.script) + + script_name = property(get_script_name, doc="""\ + The localized script name of the locale if available. + + >>> Locale('sr', 'ME', script='Latn').script_name + u'latinica' + """) + + @property + def english_name(self): + """The english display name of the locale. + + >>> Locale('de').english_name + u'German' + >>> Locale('de', 'DE').english_name + u'German (Germany)' + + :type: `unicode`""" + return self.get_display_name(Locale('en')) + + # { General Locale Display Names + + @property + def languages(self): + """Mapping of language codes to translated language names. + + >>> Locale('de', 'DE').languages['ja'] + u'Japanisch' + + See `ISO 639 `_ for + more information. + """ + return self._data['languages'] + + @property + def scripts(self): + """Mapping of script codes to translated script names. + + >>> Locale('en', 'US').scripts['Hira'] + u'Hiragana' + + See `ISO 15924 `_ + for more information. + """ + return self._data['scripts'] + + @property + def territories(self): + """Mapping of script codes to translated script names. + + >>> Locale('es', 'CO').territories['DE'] + u'Alemania' + + See `ISO 3166 `_ + for more information. + """ + return self._data['territories'] + + @property + def variants(self): + """Mapping of script codes to translated script names. + + >>> Locale('de', 'DE').variants['1901'] + u'Alte deutsche Rechtschreibung' + """ + return self._data['variants'] + + # { Number Formatting + + @property + def currencies(self): + """Mapping of currency codes to translated currency names. This + only returns the generic form of the currency name, not the count + specific one. If an actual number is requested use the + :func:`babel.numbers.get_currency_name` function. + + >>> Locale('en').currencies['COP'] + u'Colombian Peso' + >>> Locale('de', 'DE').currencies['COP'] + u'Kolumbianischer Peso' + """ + return self._data['currency_names'] + + @property + def currency_symbols(self): + """Mapping of currency codes to symbols. + + >>> Locale('en', 'US').currency_symbols['USD'] + u'$' + >>> Locale('es', 'CO').currency_symbols['USD'] + u'US$' + """ + return self._data['currency_symbols'] + + @property + def number_symbols(self): + """Symbols used in number formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('fr', 'FR').number_symbols['decimal'] + u',' + """ + return self._data['number_symbols'] + + @property + def decimal_formats(self): + """Locale patterns for decimal number formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').decimal_formats[None] + + """ + return self._data['decimal_formats'] + + @property + def currency_formats(self): + """Locale patterns for currency number formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').currency_formats['standard'] + + >>> Locale('en', 'US').currency_formats['accounting'] + + """ + return self._data['currency_formats'] + + @property + def percent_formats(self): + """Locale patterns for percent number formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').percent_formats[None] + + """ + return self._data['percent_formats'] + + @property + def scientific_formats(self): + """Locale patterns for scientific number formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').scientific_formats[None] + + """ + return self._data['scientific_formats'] + + # { Calendar Information and Date Formatting + + @property + def periods(self): + """Locale display names for day periods (AM/PM). + + >>> Locale('en', 'US').periods['am'] + u'AM' + """ + try: + return self._data['day_periods']['stand-alone']['wide'] + except KeyError: + return {} + + @property + def day_periods(self): + """Locale display names for various day periods (not necessarily only AM/PM). + + These are not meant to be used without the relevant `day_period_rules`. + """ + return self._data['day_periods'] + + @property + def day_period_rules(self): + """Day period rules for the locale. Used by `get_period_id`. + """ + return self._data.get('day_period_rules', {}) + + @property + def days(self): + """Locale display names for weekdays. + + >>> Locale('de', 'DE').days['format']['wide'][3] + u'Donnerstag' + """ + return self._data['days'] + + @property + def months(self): + """Locale display names for months. + + >>> Locale('de', 'DE').months['format']['wide'][10] + u'Oktober' + """ + return self._data['months'] + + @property + def quarters(self): + """Locale display names for quarters. + + >>> Locale('de', 'DE').quarters['format']['wide'][1] + u'1. Quartal' + """ + return self._data['quarters'] + + @property + def eras(self): + """Locale display names for eras. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').eras['wide'][1] + u'Anno Domini' + >>> Locale('en', 'US').eras['abbreviated'][0] + u'BC' + """ + return self._data['eras'] + + @property + def time_zones(self): + """Locale display names for time zones. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').time_zones['Europe/London']['long']['daylight'] + u'British Summer Time' + >>> Locale('en', 'US').time_zones['America/St_Johns']['city'] + u'St. John\u2019s' + """ + return self._data['time_zones'] + + @property + def meta_zones(self): + """Locale display names for meta time zones. + + Meta time zones are basically groups of different Olson time zones that + have the same GMT offset and daylight savings time. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').meta_zones['Europe_Central']['long']['daylight'] + u'Central European Summer Time' + + .. versionadded:: 0.9 + """ + return self._data['meta_zones'] + + @property + def zone_formats(self): + """Patterns related to the formatting of time zones. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').zone_formats['fallback'] + u'%(1)s (%(0)s)' + >>> Locale('pt', 'BR').zone_formats['region'] + u'Hor\\xe1rio %s' + + .. versionadded:: 0.9 + """ + return self._data['zone_formats'] + + @property + def first_week_day(self): + """The first day of a week, with 0 being Monday. + + >>> Locale('de', 'DE').first_week_day + 0 + >>> Locale('en', 'US').first_week_day + 6 + """ + return self._data['week_data']['first_day'] + + @property + def weekend_start(self): + """The day the weekend starts, with 0 being Monday. + + >>> Locale('de', 'DE').weekend_start + 5 + """ + return self._data['week_data']['weekend_start'] + + @property + def weekend_end(self): + """The day the weekend ends, with 0 being Monday. + + >>> Locale('de', 'DE').weekend_end + 6 + """ + return self._data['week_data']['weekend_end'] + + @property + def min_week_days(self): + """The minimum number of days in a week so that the week is counted as + the first week of a year or month. + + >>> Locale('de', 'DE').min_week_days + 4 + """ + return self._data['week_data']['min_days'] + + @property + def date_formats(self): + """Locale patterns for date formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').date_formats['short'] + + >>> Locale('fr', 'FR').date_formats['long'] + + """ + return self._data['date_formats'] + + @property + def time_formats(self): + """Locale patterns for time formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').time_formats['short'] + + >>> Locale('fr', 'FR').time_formats['long'] + + """ + return self._data['time_formats'] + + @property + def datetime_formats(self): + """Locale patterns for datetime formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en').datetime_formats['full'] + u"{1} 'at' {0}" + >>> Locale('th').datetime_formats['medium'] + u'{1} {0}' + """ + return self._data['datetime_formats'] + + @property + def datetime_skeletons(self): + """Locale patterns for formatting parts of a datetime. + + >>> Locale('en').datetime_skeletons['MEd'] + + >>> Locale('fr').datetime_skeletons['MEd'] + + >>> Locale('fr').datetime_skeletons['H'] + + """ + return self._data['datetime_skeletons'] + + @property + def interval_formats(self): + """Locale patterns for interval formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + How to format date intervals in Finnish when the day is the + smallest changing component: + + >>> Locale('fi_FI').interval_formats['MEd']['d'] + [u'E d. \u2013 ', u'E d.M.'] + + .. seealso:: + + The primary API to use this data is :py:func:`babel.dates.format_interval`. + + + :rtype: dict[str, dict[str, list[str]]] + """ + return self._data['interval_formats'] + + @property + def plural_form(self): + """Plural rules for the locale. + + >>> Locale('en').plural_form(1) + 'one' + >>> Locale('en').plural_form(0) + 'other' + >>> Locale('fr').plural_form(0) + 'one' + >>> Locale('ru').plural_form(100) + 'many' + """ + return self._data.get('plural_form', _default_plural_rule) + + @property + def list_patterns(self): + """Patterns for generating lists + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en').list_patterns['standard']['start'] + u'{0}, {1}' + >>> Locale('en').list_patterns['standard']['end'] + u'{0}, and {1}' + >>> Locale('en_GB').list_patterns['standard']['end'] + u'{0} and {1}' + """ + return self._data['list_patterns'] + + @property + def ordinal_form(self): + """Plural rules for the locale. + + >>> Locale('en').ordinal_form(1) + 'one' + >>> Locale('en').ordinal_form(2) + 'two' + >>> Locale('en').ordinal_form(3) + 'few' + >>> Locale('fr').ordinal_form(2) + 'other' + >>> Locale('ru').ordinal_form(100) + 'other' + """ + return self._data.get('ordinal_form', _default_plural_rule) + + @property + def measurement_systems(self): + """Localized names for various measurement systems. + + >>> Locale('fr', 'FR').measurement_systems['US'] + u'am\\xe9ricain' + >>> Locale('en', 'US').measurement_systems['US'] + u'US' + + """ + return self._data['measurement_systems'] + + @property + def character_order(self): + """The text direction for the language. + + >>> Locale('de', 'DE').character_order + 'left-to-right' + >>> Locale('ar', 'SA').character_order + 'right-to-left' + """ + return self._data['character_order'] + + @property + def text_direction(self): + """The text direction for the language in CSS short-hand form. + + >>> Locale('de', 'DE').text_direction + 'ltr' + >>> Locale('ar', 'SA').text_direction + 'rtl' + """ + return ''.join(word[0] for word in self.character_order.split('-')) + + @property + def unit_display_names(self): + """Display names for units of measurement. + + .. seealso:: + + You may want to use :py:func:`babel.units.get_unit_name` instead. + + .. note:: The format of the value returned may change between + Babel versions. + + """ + return self._data['unit_display_names'] + + +def default_locale(category=None, aliases=LOCALE_ALIASES): + """Returns the system default locale for a given category, based on + environment variables. + + >>> for name in ['LANGUAGE', 'LC_ALL', 'LC_CTYPE']: + ... os.environ[name] = '' + >>> os.environ['LANG'] = 'fr_FR.UTF-8' + >>> default_locale('LC_MESSAGES') + 'fr_FR' + + The "C" or "POSIX" pseudo-locales are treated as aliases for the + "en_US_POSIX" locale: + + >>> os.environ['LC_MESSAGES'] = 'POSIX' + >>> default_locale('LC_MESSAGES') + 'en_US_POSIX' + + The following fallbacks to the variable are always considered: + + - ``LANGUAGE`` + - ``LC_ALL`` + - ``LC_CTYPE`` + - ``LANG`` + + :param category: one of the ``LC_XXX`` environment variable names + :param aliases: a dictionary of aliases for locale identifiers + """ + varnames = (category, 'LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LANG') + for name in filter(None, varnames): + locale = os.getenv(name) + if locale: + if name == 'LANGUAGE' and ':' in locale: + # the LANGUAGE variable may contain a colon-separated list of + # language codes; we just pick the language on the list + locale = locale.split(':')[0] + if locale.split('.')[0] in ('C', 'POSIX'): + locale = 'en_US_POSIX' + elif aliases and locale in aliases: + locale = aliases[locale] + try: + return get_locale_identifier(parse_locale(locale)) + except ValueError: + pass + + +def negotiate_locale(preferred, available, sep='_', aliases=LOCALE_ALIASES): + """Find the best match between available and requested locale strings. + + >>> negotiate_locale(['de_DE', 'en_US'], ['de_DE', 'de_AT']) + 'de_DE' + >>> negotiate_locale(['de_DE', 'en_US'], ['en', 'de']) + 'de' + + Case is ignored by the algorithm, the result uses the case of the preferred + locale identifier: + + >>> negotiate_locale(['de_DE', 'en_US'], ['de_de', 'de_at']) + 'de_DE' + + >>> negotiate_locale(['de_DE', 'en_US'], ['de_de', 'de_at']) + 'de_DE' + + By default, some web browsers unfortunately do not include the territory + in the locale identifier for many locales, and some don't even allow the + user to easily add the territory. So while you may prefer using qualified + locale identifiers in your web-application, they would not normally match + the language-only locale sent by such browsers. To workaround that, this + function uses a default mapping of commonly used langauge-only locale + identifiers to identifiers including the territory: + + >>> negotiate_locale(['ja', 'en_US'], ['ja_JP', 'en_US']) + 'ja_JP' + + Some browsers even use an incorrect or outdated language code, such as "no" + for Norwegian, where the correct locale identifier would actually be "nb_NO" + (Bokmål) or "nn_NO" (Nynorsk). The aliases are intended to take care of + such cases, too: + + >>> negotiate_locale(['no', 'sv'], ['nb_NO', 'sv_SE']) + 'nb_NO' + + You can override this default mapping by passing a different `aliases` + dictionary to this function, or you can bypass the behavior althogher by + setting the `aliases` parameter to `None`. + + :param preferred: the list of locale strings preferred by the user + :param available: the list of locale strings available + :param sep: character that separates the different parts of the locale + strings + :param aliases: a dictionary of aliases for locale identifiers + """ + available = [a.lower() for a in available if a] + for locale in preferred: + ll = locale.lower() + if ll in available: + return locale + if aliases: + alias = aliases.get(ll) + if alias: + alias = alias.replace('_', sep) + if alias.lower() in available: + return alias + parts = locale.split(sep) + if len(parts) > 1 and parts[0].lower() in available: + return parts[0] + return None + + +def parse_locale(identifier, sep='_'): + """Parse a locale identifier into a tuple of the form ``(language, + territory, script, variant)``. + + >>> parse_locale('zh_CN') + ('zh', 'CN', None, None) + >>> parse_locale('zh_Hans_CN') + ('zh', 'CN', 'Hans', None) + + The default component separator is "_", but a different separator can be + specified using the `sep` parameter: + + >>> parse_locale('zh-CN', sep='-') + ('zh', 'CN', None, None) + + If the identifier cannot be parsed into a locale, a `ValueError` exception + is raised: + + >>> parse_locale('not_a_LOCALE_String') + Traceback (most recent call last): + ... + ValueError: 'not_a_LOCALE_String' is not a valid locale identifier + + Encoding information and locale modifiers are removed from the identifier: + + >>> parse_locale('it_IT@euro') + ('it', 'IT', None, None) + >>> parse_locale('en_US.UTF-8') + ('en', 'US', None, None) + >>> parse_locale('de_DE.iso885915@euro') + ('de', 'DE', None, None) + + See :rfc:`4646` for more information. + + :param identifier: the locale identifier string + :param sep: character that separates the different components of the locale + identifier + :raise `ValueError`: if the string does not appear to be a valid locale + identifier + """ + if '.' in identifier: + # this is probably the charset/encoding, which we don't care about + identifier = identifier.split('.', 1)[0] + if '@' in identifier: + # this is a locale modifier such as @euro, which we don't care about + # either + identifier = identifier.split('@', 1)[0] + + parts = identifier.split(sep) + lang = parts.pop(0).lower() + if not lang.isalpha(): + raise ValueError('expected only letters, got %r' % lang) + + script = territory = variant = None + if parts: + if len(parts[0]) == 4 and parts[0].isalpha(): + script = parts.pop(0).title() + + if parts: + if len(parts[0]) == 2 and parts[0].isalpha(): + territory = parts.pop(0).upper() + elif len(parts[0]) == 3 and parts[0].isdigit(): + territory = parts.pop(0) + + if parts: + if len(parts[0]) == 4 and parts[0][0].isdigit() or \ + len(parts[0]) >= 5 and parts[0][0].isalpha(): + variant = parts.pop() + + if parts: + raise ValueError('%r is not a valid locale identifier' % identifier) + + return lang, territory, script, variant + + +def get_locale_identifier(tup, sep='_'): + """The reverse of :func:`parse_locale`. It creates a locale identifier out + of a ``(language, territory, script, variant)`` tuple. Items can be set to + ``None`` and trailing ``None``\\s can also be left out of the tuple. + + >>> get_locale_identifier(('de', 'DE', None, '1999')) + 'de_DE_1999' + + .. versionadded:: 1.0 + + :param tup: the tuple as returned by :func:`parse_locale`. + :param sep: the separator for the identifier. + """ + tup = tuple(tup[:4]) + lang, territory, script, variant = tup + (None,) * (4 - len(tup)) + return sep.join(filter(None, (lang, script, territory, variant))) diff --git a/venv/lib/python3.7/site-packages/babel/dates.py b/venv/lib/python3.7/site-packages/babel/dates.py new file mode 100644 index 0000000..f1bd66f --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/dates.py @@ -0,0 +1,1788 @@ +# -*- coding: utf-8 -*- +""" + babel.dates + ~~~~~~~~~~~ + + Locale dependent formatting and parsing of dates and times. + + The default locale for the functions in this module is determined by the + following environment variables, in that order: + + * ``LC_TIME``, + * ``LC_ALL``, and + * ``LANG`` + + :copyright: (c) 2013-2019 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +from __future__ import division + +import re +import warnings +import pytz as _pytz + +from datetime import date, datetime, time, timedelta +from bisect import bisect_right + +from babel.core import default_locale, get_global, Locale +from babel.util import UTC, LOCALTZ +from babel._compat import string_types, integer_types, number_types, PY2 + +# "If a given short metazone form is known NOT to be understood in a given +# locale and the parent locale has this value such that it would normally +# be inherited, the inheritance of this value can be explicitly disabled by +# use of the 'no inheritance marker' as the value, which is 3 simultaneous [sic] +# empty set characters ( U+2205 )." +# - https://www.unicode.org/reports/tr35/tr35-dates.html#Metazone_Names + +NO_INHERITANCE_MARKER = u'\u2205\u2205\u2205' + + +LC_TIME = default_locale('LC_TIME') + +# Aliases for use in scopes where the modules are shadowed by local variables +date_ = date +datetime_ = datetime +time_ = time + + +def _get_dt_and_tzinfo(dt_or_tzinfo): + """ + Parse a `dt_or_tzinfo` value into a datetime and a tzinfo. + + See the docs for this function's callers for semantics. + + :rtype: tuple[datetime, tzinfo] + """ + if dt_or_tzinfo is None: + dt = datetime.now() + tzinfo = LOCALTZ + elif isinstance(dt_or_tzinfo, string_types): + dt = None + tzinfo = get_timezone(dt_or_tzinfo) + elif isinstance(dt_or_tzinfo, integer_types): + dt = None + tzinfo = UTC + elif isinstance(dt_or_tzinfo, (datetime, time)): + dt = _get_datetime(dt_or_tzinfo) + if dt.tzinfo is not None: + tzinfo = dt.tzinfo + else: + tzinfo = UTC + else: + dt = None + tzinfo = dt_or_tzinfo + return dt, tzinfo + + +def _get_datetime(instant): + """ + Get a datetime out of an "instant" (date, time, datetime, number). + + .. warning:: The return values of this function may depend on the system clock. + + If the instant is None, the current moment is used. + If the instant is a time, it's augmented with today's date. + + Dates are converted to naive datetimes with midnight as the time component. + + >>> _get_datetime(date(2015, 1, 1)) + datetime.datetime(2015, 1, 1, 0, 0) + + UNIX timestamps are converted to datetimes. + + >>> _get_datetime(1400000000) + datetime.datetime(2014, 5, 13, 16, 53, 20) + + Other values are passed through as-is. + + >>> x = datetime(2015, 1, 1) + >>> _get_datetime(x) is x + True + + :param instant: date, time, datetime, integer, float or None + :type instant: date|time|datetime|int|float|None + :return: a datetime + :rtype: datetime + """ + if instant is None: + return datetime_.utcnow() + elif isinstance(instant, integer_types) or isinstance(instant, float): + return datetime_.utcfromtimestamp(instant) + elif isinstance(instant, time): + return datetime_.combine(date.today(), instant) + elif isinstance(instant, date) and not isinstance(instant, datetime): + return datetime_.combine(instant, time()) + # TODO (3.x): Add an assertion/type check for this fallthrough branch: + return instant + + +def _ensure_datetime_tzinfo(datetime, tzinfo=None): + """ + Ensure the datetime passed has an attached tzinfo. + + If the datetime is tz-naive to begin with, UTC is attached. + + If a tzinfo is passed in, the datetime is normalized to that timezone. + + >>> _ensure_datetime_tzinfo(datetime(2015, 1, 1)).tzinfo.zone + 'UTC' + + >>> tz = get_timezone("Europe/Stockholm") + >>> _ensure_datetime_tzinfo(datetime(2015, 1, 1, 13, 15, tzinfo=UTC), tzinfo=tz).hour + 14 + + :param datetime: Datetime to augment. + :param tzinfo: Optional tznfo. + :return: datetime with tzinfo + :rtype: datetime + """ + if datetime.tzinfo is None: + datetime = datetime.replace(tzinfo=UTC) + if tzinfo is not None: + datetime = datetime.astimezone(get_timezone(tzinfo)) + if hasattr(tzinfo, 'normalize'): # pytz + datetime = tzinfo.normalize(datetime) + return datetime + + +def _get_time(time, tzinfo=None): + """ + Get a timezoned time from a given instant. + + .. warning:: The return values of this function may depend on the system clock. + + :param time: time, datetime or None + :rtype: time + """ + if time is None: + time = datetime.utcnow() + elif isinstance(time, number_types): + time = datetime.utcfromtimestamp(time) + if time.tzinfo is None: + time = time.replace(tzinfo=UTC) + if isinstance(time, datetime): + if tzinfo is not None: + time = time.astimezone(tzinfo) + if hasattr(tzinfo, 'normalize'): # pytz + time = tzinfo.normalize(time) + time = time.timetz() + elif tzinfo is not None: + time = time.replace(tzinfo=tzinfo) + return time + + +def get_timezone(zone=None): + """Looks up a timezone by name and returns it. The timezone object + returned comes from ``pytz`` and corresponds to the `tzinfo` interface and + can be used with all of the functions of Babel that operate with dates. + + If a timezone is not known a :exc:`LookupError` is raised. If `zone` + is ``None`` a local zone object is returned. + + :param zone: the name of the timezone to look up. If a timezone object + itself is passed in, mit's returned unchanged. + """ + if zone is None: + return LOCALTZ + if not isinstance(zone, string_types): + return zone + try: + return _pytz.timezone(zone) + except _pytz.UnknownTimeZoneError: + raise LookupError('Unknown timezone %s' % zone) + + +def get_next_timezone_transition(zone=None, dt=None): + """Given a timezone it will return a :class:`TimezoneTransition` object + that holds the information about the next timezone transition that's going + to happen. For instance this can be used to detect when the next DST + change is going to happen and how it looks like. + + The transition is calculated relative to the given datetime object. The + next transition that follows the date is used. If a transition cannot + be found the return value will be `None`. + + Transition information can only be provided for timezones returned by + the :func:`get_timezone` function. + + :param zone: the timezone for which the transition should be looked up. + If not provided the local timezone is used. + :param dt: the date after which the next transition should be found. + If not given the current time is assumed. + """ + zone = get_timezone(zone) + dt = _get_datetime(dt).replace(tzinfo=None) + + if not hasattr(zone, '_utc_transition_times'): + raise TypeError('Given timezone does not have UTC transition ' + 'times. This can happen because the operating ' + 'system fallback local timezone is used or a ' + 'custom timezone object') + + try: + idx = max(0, bisect_right(zone._utc_transition_times, dt)) + old_trans = zone._transition_info[idx - 1] + new_trans = zone._transition_info[idx] + old_tz = zone._tzinfos[old_trans] + new_tz = zone._tzinfos[new_trans] + except (LookupError, ValueError): + return None + + return TimezoneTransition( + activates=zone._utc_transition_times[idx], + from_tzinfo=old_tz, + to_tzinfo=new_tz, + reference_date=dt + ) + + +class TimezoneTransition(object): + """A helper object that represents the return value from + :func:`get_next_timezone_transition`. + """ + + def __init__(self, activates, from_tzinfo, to_tzinfo, reference_date=None): + #: the time of the activation of the timezone transition in UTC. + self.activates = activates + #: the timezone from where the transition starts. + self.from_tzinfo = from_tzinfo + #: the timezone for after the transition. + self.to_tzinfo = to_tzinfo + #: the reference date that was provided. This is the `dt` parameter + #: to the :func:`get_next_timezone_transition`. + self.reference_date = reference_date + + @property + def from_tz(self): + """The name of the timezone before the transition.""" + return self.from_tzinfo._tzname + + @property + def to_tz(self): + """The name of the timezone after the transition.""" + return self.to_tzinfo._tzname + + @property + def from_offset(self): + """The UTC offset in seconds before the transition.""" + return int(self.from_tzinfo._utcoffset.total_seconds()) + + @property + def to_offset(self): + """The UTC offset in seconds after the transition.""" + return int(self.to_tzinfo._utcoffset.total_seconds()) + + def __repr__(self): + return ' %s (%s)>' % ( + self.from_tz, + self.to_tz, + self.activates, + ) + + +def get_period_names(width='wide', context='stand-alone', locale=LC_TIME): + """Return the names for day periods (AM/PM) used by the locale. + + >>> get_period_names(locale='en_US')['am'] + u'AM' + + :param width: the width to use, one of "abbreviated", "narrow", or "wide" + :param context: the context, either "format" or "stand-alone" + :param locale: the `Locale` object, or a locale string + """ + return Locale.parse(locale).day_periods[context][width] + + +def get_day_names(width='wide', context='format', locale=LC_TIME): + """Return the day names used by the locale for the specified format. + + >>> get_day_names('wide', locale='en_US')[1] + u'Tuesday' + >>> get_day_names('short', locale='en_US')[1] + u'Tu' + >>> get_day_names('abbreviated', locale='es')[1] + u'mar.' + >>> get_day_names('narrow', context='stand-alone', locale='de_DE')[1] + u'D' + + :param width: the width to use, one of "wide", "abbreviated", "short" or "narrow" + :param context: the context, either "format" or "stand-alone" + :param locale: the `Locale` object, or a locale string + """ + return Locale.parse(locale).days[context][width] + + +def get_month_names(width='wide', context='format', locale=LC_TIME): + """Return the month names used by the locale for the specified format. + + >>> get_month_names('wide', locale='en_US')[1] + u'January' + >>> get_month_names('abbreviated', locale='es')[1] + u'ene.' + >>> get_month_names('narrow', context='stand-alone', locale='de_DE')[1] + u'J' + + :param width: the width to use, one of "wide", "abbreviated", or "narrow" + :param context: the context, either "format" or "stand-alone" + :param locale: the `Locale` object, or a locale string + """ + return Locale.parse(locale).months[context][width] + + +def get_quarter_names(width='wide', context='format', locale=LC_TIME): + """Return the quarter names used by the locale for the specified format. + + >>> get_quarter_names('wide', locale='en_US')[1] + u'1st quarter' + >>> get_quarter_names('abbreviated', locale='de_DE')[1] + u'Q1' + >>> get_quarter_names('narrow', locale='de_DE')[1] + u'1' + + :param width: the width to use, one of "wide", "abbreviated", or "narrow" + :param context: the context, either "format" or "stand-alone" + :param locale: the `Locale` object, or a locale string + """ + return Locale.parse(locale).quarters[context][width] + + +def get_era_names(width='wide', locale=LC_TIME): + """Return the era names used by the locale for the specified format. + + >>> get_era_names('wide', locale='en_US')[1] + u'Anno Domini' + >>> get_era_names('abbreviated', locale='de_DE')[1] + u'n. Chr.' + + :param width: the width to use, either "wide", "abbreviated", or "narrow" + :param locale: the `Locale` object, or a locale string + """ + return Locale.parse(locale).eras[width] + + +def get_date_format(format='medium', locale=LC_TIME): + """Return the date formatting patterns used by the locale for the specified + format. + + >>> get_date_format(locale='en_US') + + >>> get_date_format('full', locale='de_DE') + + + :param format: the format to use, one of "full", "long", "medium", or + "short" + :param locale: the `Locale` object, or a locale string + """ + return Locale.parse(locale).date_formats[format] + + +def get_datetime_format(format='medium', locale=LC_TIME): + """Return the datetime formatting patterns used by the locale for the + specified format. + + >>> get_datetime_format(locale='en_US') + u'{1}, {0}' + + :param format: the format to use, one of "full", "long", "medium", or + "short" + :param locale: the `Locale` object, or a locale string + """ + patterns = Locale.parse(locale).datetime_formats + if format not in patterns: + format = None + return patterns[format] + + +def get_time_format(format='medium', locale=LC_TIME): + """Return the time formatting patterns used by the locale for the specified + format. + + >>> get_time_format(locale='en_US') + + >>> get_time_format('full', locale='de_DE') + + + :param format: the format to use, one of "full", "long", "medium", or + "short" + :param locale: the `Locale` object, or a locale string + """ + return Locale.parse(locale).time_formats[format] + + +def get_timezone_gmt(datetime=None, width='long', locale=LC_TIME, return_z=False): + """Return the timezone associated with the given `datetime` object formatted + as string indicating the offset from GMT. + + >>> dt = datetime(2007, 4, 1, 15, 30) + >>> get_timezone_gmt(dt, locale='en') + u'GMT+00:00' + >>> get_timezone_gmt(dt, locale='en', return_z=True) + 'Z' + >>> get_timezone_gmt(dt, locale='en', width='iso8601_short') + u'+00' + >>> tz = get_timezone('America/Los_Angeles') + >>> dt = tz.localize(datetime(2007, 4, 1, 15, 30)) + >>> get_timezone_gmt(dt, locale='en') + u'GMT-07:00' + >>> get_timezone_gmt(dt, 'short', locale='en') + u'-0700' + >>> get_timezone_gmt(dt, locale='en', width='iso8601_short') + u'-07' + + The long format depends on the locale, for example in France the acronym + UTC string is used instead of GMT: + + >>> get_timezone_gmt(dt, 'long', locale='fr_FR') + u'UTC-07:00' + + .. versionadded:: 0.9 + + :param datetime: the ``datetime`` object; if `None`, the current date and + time in UTC is used + :param width: either "long" or "short" or "iso8601" or "iso8601_short" + :param locale: the `Locale` object, or a locale string + :param return_z: True or False; Function returns indicator "Z" + when local time offset is 0 + """ + datetime = _ensure_datetime_tzinfo(_get_datetime(datetime)) + locale = Locale.parse(locale) + + offset = datetime.tzinfo.utcoffset(datetime) + seconds = offset.days * 24 * 60 * 60 + offset.seconds + hours, seconds = divmod(seconds, 3600) + if return_z and hours == 0 and seconds == 0: + return 'Z' + elif seconds == 0 and width == 'iso8601_short': + return u'%+03d' % hours + elif width == 'short' or width == 'iso8601_short': + pattern = u'%+03d%02d' + elif width == 'iso8601': + pattern = u'%+03d:%02d' + else: + pattern = locale.zone_formats['gmt'] % '%+03d:%02d' + return pattern % (hours, seconds // 60) + + +def get_timezone_location(dt_or_tzinfo=None, locale=LC_TIME, return_city=False): + u"""Return a representation of the given timezone using "location format". + + The result depends on both the local display name of the country and the + city associated with the time zone: + + >>> tz = get_timezone('America/St_Johns') + >>> print(get_timezone_location(tz, locale='de_DE')) + Kanada (St. John’s) Zeit + >>> print(get_timezone_location(tz, locale='en')) + Canada (St. John’s) Time + >>> print(get_timezone_location(tz, locale='en', return_city=True)) + St. John’s + >>> tz = get_timezone('America/Mexico_City') + >>> get_timezone_location(tz, locale='de_DE') + u'Mexiko (Mexiko-Stadt) Zeit' + + If the timezone is associated with a country that uses only a single + timezone, just the localized country name is returned: + + >>> tz = get_timezone('Europe/Berlin') + >>> get_timezone_name(tz, locale='de_DE') + u'Mitteleurop\\xe4ische Zeit' + + .. versionadded:: 0.9 + + :param dt_or_tzinfo: the ``datetime`` or ``tzinfo`` object that determines + the timezone; if `None`, the current date and time in + UTC is assumed + :param locale: the `Locale` object, or a locale string + :param return_city: True or False, if True then return exemplar city (location) + for the time zone + :return: the localized timezone name using location format + + """ + dt, tzinfo = _get_dt_and_tzinfo(dt_or_tzinfo) + locale = Locale.parse(locale) + + if hasattr(tzinfo, 'zone'): + zone = tzinfo.zone + else: + zone = tzinfo.tzname(dt or datetime.utcnow()) + + # Get the canonical time-zone code + zone = get_global('zone_aliases').get(zone, zone) + + info = locale.time_zones.get(zone, {}) + + # Otherwise, if there is only one timezone for the country, return the + # localized country name + region_format = locale.zone_formats['region'] + territory = get_global('zone_territories').get(zone) + if territory not in locale.territories: + territory = 'ZZ' # invalid/unknown + territory_name = locale.territories[territory] + if not return_city and territory and len(get_global('territory_zones').get(territory, [])) == 1: + return region_format % territory_name + + # Otherwise, include the city in the output + fallback_format = locale.zone_formats['fallback'] + if 'city' in info: + city_name = info['city'] + else: + metazone = get_global('meta_zones').get(zone) + metazone_info = locale.meta_zones.get(metazone, {}) + if 'city' in metazone_info: + city_name = metazone_info['city'] + elif '/' in zone: + city_name = zone.split('/', 1)[1].replace('_', ' ') + else: + city_name = zone.replace('_', ' ') + + if return_city: + return city_name + return region_format % (fallback_format % { + '0': city_name, + '1': territory_name + }) + + +def get_timezone_name(dt_or_tzinfo=None, width='long', uncommon=False, + locale=LC_TIME, zone_variant=None, return_zone=False): + r"""Return the localized display name for the given timezone. The timezone + may be specified using a ``datetime`` or `tzinfo` object. + + >>> dt = time(15, 30, tzinfo=get_timezone('America/Los_Angeles')) + >>> get_timezone_name(dt, locale='en_US') + u'Pacific Standard Time' + >>> get_timezone_name(dt, locale='en_US', return_zone=True) + 'America/Los_Angeles' + >>> get_timezone_name(dt, width='short', locale='en_US') + u'PST' + + If this function gets passed only a `tzinfo` object and no concrete + `datetime`, the returned display name is indenpendent of daylight savings + time. This can be used for example for selecting timezones, or to set the + time of events that recur across DST changes: + + >>> tz = get_timezone('America/Los_Angeles') + >>> get_timezone_name(tz, locale='en_US') + u'Pacific Time' + >>> get_timezone_name(tz, 'short', locale='en_US') + u'PT' + + If no localized display name for the timezone is available, and the timezone + is associated with a country that uses only a single timezone, the name of + that country is returned, formatted according to the locale: + + >>> tz = get_timezone('Europe/Berlin') + >>> get_timezone_name(tz, locale='de_DE') + u'Mitteleurop\xe4ische Zeit' + >>> get_timezone_name(tz, locale='pt_BR') + u'Hor\xe1rio da Europa Central' + + On the other hand, if the country uses multiple timezones, the city is also + included in the representation: + + >>> tz = get_timezone('America/St_Johns') + >>> get_timezone_name(tz, locale='de_DE') + u'Neufundland-Zeit' + + Note that short format is currently not supported for all timezones and + all locales. This is partially because not every timezone has a short + code in every locale. In that case it currently falls back to the long + format. + + For more information see `LDML Appendix J: Time Zone Display Names + `_ + + .. versionadded:: 0.9 + + .. versionchanged:: 1.0 + Added `zone_variant` support. + + :param dt_or_tzinfo: the ``datetime`` or ``tzinfo`` object that determines + the timezone; if a ``tzinfo`` object is used, the + resulting display name will be generic, i.e. + independent of daylight savings time; if `None`, the + current date in UTC is assumed + :param width: either "long" or "short" + :param uncommon: deprecated and ignored + :param zone_variant: defines the zone variation to return. By default the + variation is defined from the datetime object + passed in. If no datetime object is passed in, the + ``'generic'`` variation is assumed. The following + values are valid: ``'generic'``, ``'daylight'`` and + ``'standard'``. + :param locale: the `Locale` object, or a locale string + :param return_zone: True or False. If true then function + returns long time zone ID + """ + dt, tzinfo = _get_dt_and_tzinfo(dt_or_tzinfo) + locale = Locale.parse(locale) + + if hasattr(tzinfo, 'zone'): + zone = tzinfo.zone + else: + zone = tzinfo.tzname(dt) + + if zone_variant is None: + if dt is None: + zone_variant = 'generic' + else: + dst = tzinfo.dst(dt) + if dst: + zone_variant = 'daylight' + else: + zone_variant = 'standard' + else: + if zone_variant not in ('generic', 'standard', 'daylight'): + raise ValueError('Invalid zone variation') + + # Get the canonical time-zone code + zone = get_global('zone_aliases').get(zone, zone) + if return_zone: + return zone + info = locale.time_zones.get(zone, {}) + # Try explicitly translated zone names first + if width in info: + if zone_variant in info[width]: + return info[width][zone_variant] + + metazone = get_global('meta_zones').get(zone) + if metazone: + metazone_info = locale.meta_zones.get(metazone, {}) + if width in metazone_info: + name = metazone_info[width].get(zone_variant) + if width == 'short' and name == NO_INHERITANCE_MARKER: + # If the short form is marked no-inheritance, + # try to fall back to the long name instead. + name = metazone_info.get('long', {}).get(zone_variant) + if name: + return name + + # If we have a concrete datetime, we assume that the result can't be + # independent of daylight savings time, so we return the GMT offset + if dt is not None: + return get_timezone_gmt(dt, width=width, locale=locale) + + return get_timezone_location(dt_or_tzinfo, locale=locale) + + +def format_date(date=None, format='medium', locale=LC_TIME): + """Return a date formatted according to the given pattern. + + >>> d = date(2007, 4, 1) + >>> format_date(d, locale='en_US') + u'Apr 1, 2007' + >>> format_date(d, format='full', locale='de_DE') + u'Sonntag, 1. April 2007' + + If you don't want to use the locale default formats, you can specify a + custom date pattern: + + >>> format_date(d, "EEE, MMM d, ''yy", locale='en') + u"Sun, Apr 1, '07" + + :param date: the ``date`` or ``datetime`` object; if `None`, the current + date is used + :param format: one of "full", "long", "medium", or "short", or a custom + date/time pattern + :param locale: a `Locale` object or a locale identifier + """ + if date is None: + date = date_.today() + elif isinstance(date, datetime): + date = date.date() + + locale = Locale.parse(locale) + if format in ('full', 'long', 'medium', 'short'): + format = get_date_format(format, locale=locale) + pattern = parse_pattern(format) + return pattern.apply(date, locale) + + +def format_datetime(datetime=None, format='medium', tzinfo=None, + locale=LC_TIME): + r"""Return a date formatted according to the given pattern. + + >>> dt = datetime(2007, 4, 1, 15, 30) + >>> format_datetime(dt, locale='en_US') + u'Apr 1, 2007, 3:30:00 PM' + + For any pattern requiring the display of the time-zone, the third-party + ``pytz`` package is needed to explicitly specify the time-zone: + + >>> format_datetime(dt, 'full', tzinfo=get_timezone('Europe/Paris'), + ... locale='fr_FR') + u'dimanche 1 avril 2007 \xe0 17:30:00 heure d\u2019\xe9t\xe9 d\u2019Europe centrale' + >>> format_datetime(dt, "yyyy.MM.dd G 'at' HH:mm:ss zzz", + ... tzinfo=get_timezone('US/Eastern'), locale='en') + u'2007.04.01 AD at 11:30:00 EDT' + + :param datetime: the `datetime` object; if `None`, the current date and + time is used + :param format: one of "full", "long", "medium", or "short", or a custom + date/time pattern + :param tzinfo: the timezone to apply to the time for display + :param locale: a `Locale` object or a locale identifier + """ + datetime = _ensure_datetime_tzinfo(_get_datetime(datetime), tzinfo) + + locale = Locale.parse(locale) + if format in ('full', 'long', 'medium', 'short'): + return get_datetime_format(format, locale=locale) \ + .replace("'", "") \ + .replace('{0}', format_time(datetime, format, tzinfo=None, + locale=locale)) \ + .replace('{1}', format_date(datetime, format, locale=locale)) + else: + return parse_pattern(format).apply(datetime, locale) + + +def format_time(time=None, format='medium', tzinfo=None, locale=LC_TIME): + r"""Return a time formatted according to the given pattern. + + >>> t = time(15, 30) + >>> format_time(t, locale='en_US') + u'3:30:00 PM' + >>> format_time(t, format='short', locale='de_DE') + u'15:30' + + If you don't want to use the locale default formats, you can specify a + custom time pattern: + + >>> format_time(t, "hh 'o''clock' a", locale='en') + u"03 o'clock PM" + + For any pattern requiring the display of the time-zone a + timezone has to be specified explicitly: + + >>> t = datetime(2007, 4, 1, 15, 30) + >>> tzinfo = get_timezone('Europe/Paris') + >>> t = tzinfo.localize(t) + >>> format_time(t, format='full', tzinfo=tzinfo, locale='fr_FR') + u'15:30:00 heure d\u2019\xe9t\xe9 d\u2019Europe centrale' + >>> format_time(t, "hh 'o''clock' a, zzzz", tzinfo=get_timezone('US/Eastern'), + ... locale='en') + u"09 o'clock AM, Eastern Daylight Time" + + As that example shows, when this function gets passed a + ``datetime.datetime`` value, the actual time in the formatted string is + adjusted to the timezone specified by the `tzinfo` parameter. If the + ``datetime`` is "naive" (i.e. it has no associated timezone information), + it is assumed to be in UTC. + + These timezone calculations are **not** performed if the value is of type + ``datetime.time``, as without date information there's no way to determine + what a given time would translate to in a different timezone without + information about whether daylight savings time is in effect or not. This + means that time values are left as-is, and the value of the `tzinfo` + parameter is only used to display the timezone name if needed: + + >>> t = time(15, 30) + >>> format_time(t, format='full', tzinfo=get_timezone('Europe/Paris'), + ... locale='fr_FR') + u'15:30:00 heure normale d\u2019Europe centrale' + >>> format_time(t, format='full', tzinfo=get_timezone('US/Eastern'), + ... locale='en_US') + u'3:30:00 PM Eastern Standard Time' + + :param time: the ``time`` or ``datetime`` object; if `None`, the current + time in UTC is used + :param format: one of "full", "long", "medium", or "short", or a custom + date/time pattern + :param tzinfo: the time-zone to apply to the time for display + :param locale: a `Locale` object or a locale identifier + """ + time = _get_time(time, tzinfo) + + locale = Locale.parse(locale) + if format in ('full', 'long', 'medium', 'short'): + format = get_time_format(format, locale=locale) + return parse_pattern(format).apply(time, locale) + + +def format_skeleton(skeleton, datetime=None, tzinfo=None, fuzzy=True, locale=LC_TIME): + r"""Return a time and/or date formatted according to the given pattern. + + The skeletons are defined in the CLDR data and provide more flexibility + than the simple short/long/medium formats, but are a bit harder to use. + The are defined using the date/time symbols without order or punctuation + and map to a suitable format for the given locale. + + >>> t = datetime(2007, 4, 1, 15, 30) + >>> format_skeleton('MMMEd', t, locale='fr') + u'dim. 1 avr.' + >>> format_skeleton('MMMEd', t, locale='en') + u'Sun, Apr 1' + >>> format_skeleton('yMMd', t, locale='fi') # yMMd is not in the Finnish locale; yMd gets used + u'1.4.2007' + >>> format_skeleton('yMMd', t, fuzzy=False, locale='fi') # yMMd is not in the Finnish locale, an error is thrown + Traceback (most recent call last): + ... + KeyError: yMMd + + After the skeleton is resolved to a pattern `format_datetime` is called so + all timezone processing etc is the same as for that. + + :param skeleton: A date time skeleton as defined in the cldr data. + :param datetime: the ``time`` or ``datetime`` object; if `None`, the current + time in UTC is used + :param tzinfo: the time-zone to apply to the time for display + :param fuzzy: If the skeleton is not found, allow choosing a skeleton that's + close enough to it. + :param locale: a `Locale` object or a locale identifier + """ + locale = Locale.parse(locale) + if fuzzy and skeleton not in locale.datetime_skeletons: + skeleton = match_skeleton(skeleton, locale.datetime_skeletons) + format = locale.datetime_skeletons[skeleton] + return format_datetime(datetime, format, tzinfo, locale) + + +TIMEDELTA_UNITS = ( + ('year', 3600 * 24 * 365), + ('month', 3600 * 24 * 30), + ('week', 3600 * 24 * 7), + ('day', 3600 * 24), + ('hour', 3600), + ('minute', 60), + ('second', 1) +) + + +def format_timedelta(delta, granularity='second', threshold=.85, + add_direction=False, format='long', + locale=LC_TIME): + """Return a time delta according to the rules of the given locale. + + >>> format_timedelta(timedelta(weeks=12), locale='en_US') + u'3 months' + >>> format_timedelta(timedelta(seconds=1), locale='es') + u'1 segundo' + + The granularity parameter can be provided to alter the lowest unit + presented, which defaults to a second. + + >>> format_timedelta(timedelta(hours=3), granularity='day', + ... locale='en_US') + u'1 day' + + The threshold parameter can be used to determine at which value the + presentation switches to the next higher unit. A higher threshold factor + means the presentation will switch later. For example: + + >>> format_timedelta(timedelta(hours=23), threshold=0.9, locale='en_US') + u'1 day' + >>> format_timedelta(timedelta(hours=23), threshold=1.1, locale='en_US') + u'23 hours' + + In addition directional information can be provided that informs + the user if the date is in the past or in the future: + + >>> format_timedelta(timedelta(hours=1), add_direction=True, locale='en') + u'in 1 hour' + >>> format_timedelta(timedelta(hours=-1), add_direction=True, locale='en') + u'1 hour ago' + + The format parameter controls how compact or wide the presentation is: + + >>> format_timedelta(timedelta(hours=3), format='short', locale='en') + u'3 hr' + >>> format_timedelta(timedelta(hours=3), format='narrow', locale='en') + u'3h' + + :param delta: a ``timedelta`` object representing the time difference to + format, or the delta in seconds as an `int` value + :param granularity: determines the smallest unit that should be displayed, + the value can be one of "year", "month", "week", "day", + "hour", "minute" or "second" + :param threshold: factor that determines at which point the presentation + switches to the next higher unit + :param add_direction: if this flag is set to `True` the return value will + include directional information. For instance a + positive timedelta will include the information about + it being in the future, a negative will be information + about the value being in the past. + :param format: the format, can be "narrow", "short" or "long". ( + "medium" is deprecated, currently converted to "long" to + maintain compatibility) + :param locale: a `Locale` object or a locale identifier + """ + if format not in ('narrow', 'short', 'medium', 'long'): + raise TypeError('Format must be one of "narrow", "short" or "long"') + if format == 'medium': + warnings.warn('"medium" value for format param of format_timedelta' + ' is deprecated. Use "long" instead', + category=DeprecationWarning) + format = 'long' + if isinstance(delta, timedelta): + seconds = int((delta.days * 86400) + delta.seconds) + else: + seconds = delta + locale = Locale.parse(locale) + + def _iter_patterns(a_unit): + if add_direction: + unit_rel_patterns = locale._data['date_fields'][a_unit] + if seconds >= 0: + yield unit_rel_patterns['future'] + else: + yield unit_rel_patterns['past'] + a_unit = 'duration-' + a_unit + yield locale._data['unit_patterns'].get(a_unit, {}).get(format) + + for unit, secs_per_unit in TIMEDELTA_UNITS: + value = abs(seconds) / secs_per_unit + if value >= threshold or unit == granularity: + if unit == granularity and value > 0: + value = max(1, value) + value = int(round(value)) + plural_form = locale.plural_form(value) + pattern = None + for patterns in _iter_patterns(unit): + if patterns is not None: + pattern = patterns[plural_form] + break + # This really should not happen + if pattern is None: + return u'' + return pattern.replace('{0}', str(value)) + + return u'' + + +def _format_fallback_interval(start, end, skeleton, tzinfo, locale): + if skeleton in locale.datetime_skeletons: # Use the given skeleton + format = lambda dt: format_skeleton(skeleton, dt, tzinfo, locale=locale) + elif all((isinstance(d, date) and not isinstance(d, datetime)) for d in (start, end)): # Both are just dates + format = lambda dt: format_date(dt, locale=locale) + elif all((isinstance(d, time) and not isinstance(d, date)) for d in (start, end)): # Both are times + format = lambda dt: format_time(dt, tzinfo=tzinfo, locale=locale) + else: + format = lambda dt: format_datetime(dt, tzinfo=tzinfo, locale=locale) + + formatted_start = format(start) + formatted_end = format(end) + + if formatted_start == formatted_end: + return format(start) + + return ( + locale.interval_formats.get(None, "{0}-{1}"). + replace("{0}", formatted_start). + replace("{1}", formatted_end) + ) + + +def format_interval(start, end, skeleton=None, tzinfo=None, fuzzy=True, locale=LC_TIME): + """ + Format an interval between two instants according to the locale's rules. + + >>> format_interval(date(2016, 1, 15), date(2016, 1, 17), "yMd", locale="fi") + u'15.\u201317.1.2016' + + >>> format_interval(time(12, 12), time(16, 16), "Hm", locale="en_GB") + '12:12\u201316:16' + + >>> format_interval(time(5, 12), time(16, 16), "hm", locale="en_US") + '5:12 AM \u2013 4:16 PM' + + >>> format_interval(time(16, 18), time(16, 24), "Hm", locale="it") + '16:18\u201316:24' + + If the start instant equals the end instant, the interval is formatted like the instant. + + >>> format_interval(time(16, 18), time(16, 18), "Hm", locale="it") + '16:18' + + Unknown skeletons fall back to "default" formatting. + + >>> format_interval(date(2015, 1, 1), date(2017, 1, 1), "wzq", locale="ja") + '2015/01/01\uff5e2017/01/01' + + >>> format_interval(time(16, 18), time(16, 24), "xxx", locale="ja") + '16:18:00\uff5e16:24:00' + + >>> format_interval(date(2016, 1, 15), date(2016, 1, 17), "xxx", locale="de") + '15.01.2016 \u2013 17.01.2016' + + :param start: First instant (datetime/date/time) + :param end: Second instant (datetime/date/time) + :param skeleton: The "skeleton format" to use for formatting. + :param tzinfo: tzinfo to use (if none is already attached) + :param fuzzy: If the skeleton is not found, allow choosing a skeleton that's + close enough to it. + :param locale: A locale object or identifier. + :return: Formatted interval + """ + locale = Locale.parse(locale) + + # NB: The quote comments below are from the algorithm description in + # https://www.unicode.org/reports/tr35/tr35-dates.html#intervalFormats + + # > Look for the intervalFormatItem element that matches the "skeleton", + # > starting in the current locale and then following the locale fallback + # > chain up to, but not including root. + + interval_formats = locale.interval_formats + + if skeleton not in interval_formats or not skeleton: + # > If no match was found from the previous step, check what the closest + # > match is in the fallback locale chain, as in availableFormats. That + # > is, this allows for adjusting the string value field's width, + # > including adjusting between "MMM" and "MMMM", and using different + # > variants of the same field, such as 'v' and 'z'. + if skeleton and fuzzy: + skeleton = match_skeleton(skeleton, interval_formats) + else: + skeleton = None + if not skeleton: # Still no match whatsoever? + # > Otherwise, format the start and end datetime using the fallback pattern. + return _format_fallback_interval(start, end, skeleton, tzinfo, locale) + + skel_formats = interval_formats[skeleton] + + if start == end: + return format_skeleton(skeleton, start, tzinfo, fuzzy=fuzzy, locale=locale) + + start = _ensure_datetime_tzinfo(_get_datetime(start), tzinfo=tzinfo) + end = _ensure_datetime_tzinfo(_get_datetime(end), tzinfo=tzinfo) + + start_fmt = DateTimeFormat(start, locale=locale) + end_fmt = DateTimeFormat(end, locale=locale) + + # > If a match is found from previous steps, compute the calendar field + # > with the greatest difference between start and end datetime. If there + # > is no difference among any of the fields in the pattern, format as a + # > single date using availableFormats, and return. + + for field in PATTERN_CHAR_ORDER: # These are in largest-to-smallest order + if field in skel_formats: + if start_fmt.extract(field) != end_fmt.extract(field): + # > If there is a match, use the pieces of the corresponding pattern to + # > format the start and end datetime, as above. + return "".join( + parse_pattern(pattern).apply(instant, locale) + for pattern, instant + in zip(skel_formats[field], (start, end)) + ) + + # > Otherwise, format the start and end datetime using the fallback pattern. + + return _format_fallback_interval(start, end, skeleton, tzinfo, locale) + + +def get_period_id(time, tzinfo=None, type=None, locale=LC_TIME): + """ + Get the day period ID for a given time. + + This ID can be used as a key for the period name dictionary. + + >>> get_period_names(locale="de")[get_period_id(time(7, 42), locale="de")] + u'Morgen' + + :param time: The time to inspect. + :param tzinfo: The timezone for the time. See ``format_time``. + :param type: The period type to use. Either "selection" or None. + The selection type is used for selecting among phrases such as + “Your email arrived yesterday evening” or “Your email arrived last night”. + :param locale: the `Locale` object, or a locale string + :return: period ID. Something is always returned -- even if it's just "am" or "pm". + """ + time = _get_time(time, tzinfo) + seconds_past_midnight = int(time.hour * 60 * 60 + time.minute * 60 + time.second) + locale = Locale.parse(locale) + + # The LDML rules state that the rules may not overlap, so iterating in arbitrary + # order should be alright, though `at` periods should be preferred. + rulesets = locale.day_period_rules.get(type, {}).items() + + for rule_id, rules in rulesets: + for rule in rules: + if "at" in rule and rule["at"] == seconds_past_midnight: + return rule_id + + for rule_id, rules in rulesets: + for rule in rules: + start_ok = end_ok = False + + if "from" in rule and seconds_past_midnight >= rule["from"]: + start_ok = True + if "to" in rule and seconds_past_midnight <= rule["to"]: + # This rule type does not exist in the present CLDR data; + # excuse the lack of test coverage. + end_ok = True + if "before" in rule and seconds_past_midnight < rule["before"]: + end_ok = True + if "after" in rule: + raise NotImplementedError("'after' is deprecated as of CLDR 29.") + + if start_ok and end_ok: + return rule_id + + if seconds_past_midnight < 43200: + return "am" + else: + return "pm" + + +def parse_date(string, locale=LC_TIME): + """Parse a date from a string. + + This function uses the date format for the locale as a hint to determine + the order in which the date fields appear in the string. + + >>> parse_date('4/1/04', locale='en_US') + datetime.date(2004, 4, 1) + >>> parse_date('01.04.2004', locale='de_DE') + datetime.date(2004, 4, 1) + + :param string: the string containing the date + :param locale: a `Locale` object or a locale identifier + """ + # TODO: try ISO format first? + format = get_date_format(locale=locale).pattern.lower() + year_idx = format.index('y') + month_idx = format.index('m') + if month_idx < 0: + month_idx = format.index('l') + day_idx = format.index('d') + + indexes = [(year_idx, 'Y'), (month_idx, 'M'), (day_idx, 'D')] + indexes.sort() + indexes = dict([(item[1], idx) for idx, item in enumerate(indexes)]) + + # FIXME: this currently only supports numbers, but should also support month + # names, both in the requested locale, and english + + numbers = re.findall(r'(\d+)', string) + year = numbers[indexes['Y']] + if len(year) == 2: + year = 2000 + int(year) + else: + year = int(year) + month = int(numbers[indexes['M']]) + day = int(numbers[indexes['D']]) + if month > 12: + month, day = day, month + return date(year, month, day) + + +def parse_time(string, locale=LC_TIME): + """Parse a time from a string. + + This function uses the time format for the locale as a hint to determine + the order in which the time fields appear in the string. + + >>> parse_time('15:30:00', locale='en_US') + datetime.time(15, 30) + + :param string: the string containing the time + :param locale: a `Locale` object or a locale identifier + :return: the parsed time + :rtype: `time` + """ + # TODO: try ISO format first? + format = get_time_format(locale=locale).pattern.lower() + hour_idx = format.index('h') + if hour_idx < 0: + hour_idx = format.index('k') + min_idx = format.index('m') + sec_idx = format.index('s') + + indexes = [(hour_idx, 'H'), (min_idx, 'M'), (sec_idx, 'S')] + indexes.sort() + indexes = dict([(item[1], idx) for idx, item in enumerate(indexes)]) + + # FIXME: support 12 hour clock, and 0-based hour specification + # and seconds should be optional, maybe minutes too + # oh, and time-zones, of course + + numbers = re.findall(r'(\d+)', string) + hour = int(numbers[indexes['H']]) + minute = int(numbers[indexes['M']]) + second = int(numbers[indexes['S']]) + return time(hour, minute, second) + + +class DateTimePattern(object): + + def __init__(self, pattern, format): + self.pattern = pattern + self.format = format + + def __repr__(self): + return '<%s %r>' % (type(self).__name__, self.pattern) + + def __unicode__(self): + return self.pattern + + def __str__(self): + pat = self.pattern + if PY2: + pat = pat.encode('utf-8') + return pat + + def __mod__(self, other): + if type(other) is not DateTimeFormat: + return NotImplemented + return self.format % other + + def apply(self, datetime, locale): + return self % DateTimeFormat(datetime, locale) + + +class DateTimeFormat(object): + + def __init__(self, value, locale): + assert isinstance(value, (date, datetime, time)) + if isinstance(value, (datetime, time)) and value.tzinfo is None: + value = value.replace(tzinfo=UTC) + self.value = value + self.locale = Locale.parse(locale) + + def __getitem__(self, name): + char = name[0] + num = len(name) + if char == 'G': + return self.format_era(char, num) + elif char in ('y', 'Y', 'u'): + return self.format_year(char, num) + elif char in ('Q', 'q'): + return self.format_quarter(char, num) + elif char in ('M', 'L'): + return self.format_month(char, num) + elif char in ('w', 'W'): + return self.format_week(char, num) + elif char == 'd': + return self.format(self.value.day, num) + elif char == 'D': + return self.format_day_of_year(num) + elif char == 'F': + return self.format_day_of_week_in_month() + elif char in ('E', 'e', 'c'): + return self.format_weekday(char, num) + elif char == 'a': + # TODO: Add support for the rest of the period formats (a*, b*, B*) + return self.format_period(char) + elif char == 'h': + if self.value.hour % 12 == 0: + return self.format(12, num) + else: + return self.format(self.value.hour % 12, num) + elif char == 'H': + return self.format(self.value.hour, num) + elif char == 'K': + return self.format(self.value.hour % 12, num) + elif char == 'k': + if self.value.hour == 0: + return self.format(24, num) + else: + return self.format(self.value.hour, num) + elif char == 'm': + return self.format(self.value.minute, num) + elif char == 's': + return self.format(self.value.second, num) + elif char == 'S': + return self.format_frac_seconds(num) + elif char == 'A': + return self.format_milliseconds_in_day(num) + elif char in ('z', 'Z', 'v', 'V', 'x', 'X', 'O'): + return self.format_timezone(char, num) + else: + raise KeyError('Unsupported date/time field %r' % char) + + def extract(self, char): + char = str(char)[0] + if char == 'y': + return self.value.year + elif char == 'M': + return self.value.month + elif char == 'd': + return self.value.day + elif char == 'H': + return self.value.hour + elif char == 'h': + return self.value.hour % 12 or 12 + elif char == 'm': + return self.value.minute + elif char == 'a': + return int(self.value.hour >= 12) # 0 for am, 1 for pm + else: + raise NotImplementedError("Not implemented: extracting %r from %r" % (char, self.value)) + + def format_era(self, char, num): + width = {3: 'abbreviated', 4: 'wide', 5: 'narrow'}[max(3, num)] + era = int(self.value.year >= 0) + return get_era_names(width, self.locale)[era] + + def format_year(self, char, num): + value = self.value.year + if char.isupper(): + value = self.value.isocalendar()[0] + year = self.format(value, num) + if num == 2: + year = year[-2:] + return year + + def format_quarter(self, char, num): + quarter = (self.value.month - 1) // 3 + 1 + if num <= 2: + return '%0*d' % (num, quarter) + width = {3: 'abbreviated', 4: 'wide', 5: 'narrow'}[num] + context = {'Q': 'format', 'q': 'stand-alone'}[char] + return get_quarter_names(width, context, self.locale)[quarter] + + def format_month(self, char, num): + if num <= 2: + return '%0*d' % (num, self.value.month) + width = {3: 'abbreviated', 4: 'wide', 5: 'narrow'}[num] + context = {'M': 'format', 'L': 'stand-alone'}[char] + return get_month_names(width, context, self.locale)[self.value.month] + + def format_week(self, char, num): + if char.islower(): # week of year + day_of_year = self.get_day_of_year() + week = self.get_week_number(day_of_year) + if week == 0: + date = self.value - timedelta(days=day_of_year) + week = self.get_week_number(self.get_day_of_year(date), + date.weekday()) + return self.format(week, num) + else: # week of month + week = self.get_week_number(self.value.day) + if week == 0: + date = self.value - timedelta(days=self.value.day) + week = self.get_week_number(date.day, date.weekday()) + return '%d' % week + + def format_weekday(self, char='E', num=4): + """ + Return weekday from parsed datetime according to format pattern. + + >>> format = DateTimeFormat(date(2016, 2, 28), Locale.parse('en_US')) + >>> format.format_weekday() + u'Sunday' + + 'E': Day of week - Use one through three letters for the abbreviated day name, four for the full (wide) name, + five for the narrow name, or six for the short name. + >>> format.format_weekday('E',2) + u'Sun' + + 'e': Local day of week. Same as E except adds a numeric value that will depend on the local starting day of the + week, using one or two letters. For this example, Monday is the first day of the week. + >>> format.format_weekday('e',2) + '01' + + 'c': Stand-Alone local day of week - Use one letter for the local numeric value (same as 'e'), three for the + abbreviated day name, four for the full (wide) name, five for the narrow name, or six for the short name. + >>> format.format_weekday('c',1) + '1' + + :param char: pattern format character ('e','E','c') + :param num: count of format character + + """ + if num < 3: + if char.islower(): + value = 7 - self.locale.first_week_day + self.value.weekday() + return self.format(value % 7 + 1, num) + num = 3 + weekday = self.value.weekday() + width = {3: 'abbreviated', 4: 'wide', 5: 'narrow', 6: 'short'}[num] + if char == 'c': + context = 'stand-alone' + else: + context = 'format' + return get_day_names(width, context, self.locale)[weekday] + + def format_day_of_year(self, num): + return self.format(self.get_day_of_year(), num) + + def format_day_of_week_in_month(self): + return '%d' % ((self.value.day - 1) // 7 + 1) + + def format_period(self, char): + period = {0: 'am', 1: 'pm'}[int(self.value.hour >= 12)] + for width in ('wide', 'narrow', 'abbreviated'): + period_names = get_period_names(context='format', width=width, locale=self.locale) + if period in period_names: + return period_names[period] + raise ValueError('Could not format period %s in %s' % (period, self.locale)) + + def format_frac_seconds(self, num): + """ Return fractional seconds. + + Rounds the time's microseconds to the precision given by the number \ + of digits passed in. + """ + value = self.value.microsecond / 1000000 + return self.format(round(value, num) * 10**num, num) + + def format_milliseconds_in_day(self, num): + msecs = self.value.microsecond // 1000 + self.value.second * 1000 + \ + self.value.minute * 60000 + self.value.hour * 3600000 + return self.format(msecs, num) + + def format_timezone(self, char, num): + width = {3: 'short', 4: 'long', 5: 'iso8601'}[max(3, num)] + if char == 'z': + return get_timezone_name(self.value, width, locale=self.locale) + elif char == 'Z': + if num == 5: + return get_timezone_gmt(self.value, width, locale=self.locale, return_z=True) + return get_timezone_gmt(self.value, width, locale=self.locale) + elif char == 'O': + if num == 4: + return get_timezone_gmt(self.value, width, locale=self.locale) + # TODO: To add support for O:1 + elif char == 'v': + return get_timezone_name(self.value.tzinfo, width, + locale=self.locale) + elif char == 'V': + if num == 1: + return get_timezone_name(self.value.tzinfo, width, + uncommon=True, locale=self.locale) + elif num == 2: + return get_timezone_name(self.value.tzinfo, locale=self.locale, return_zone=True) + elif num == 3: + return get_timezone_location(self.value.tzinfo, locale=self.locale, return_city=True) + return get_timezone_location(self.value.tzinfo, locale=self.locale) + # Included additional elif condition to add support for 'Xx' in timezone format + elif char == 'X': + if num == 1: + return get_timezone_gmt(self.value, width='iso8601_short', locale=self.locale, + return_z=True) + elif num in (2, 4): + return get_timezone_gmt(self.value, width='short', locale=self.locale, + return_z=True) + elif num in (3, 5): + return get_timezone_gmt(self.value, width='iso8601', locale=self.locale, + return_z=True) + elif char == 'x': + if num == 1: + return get_timezone_gmt(self.value, width='iso8601_short', locale=self.locale) + elif num in (2, 4): + return get_timezone_gmt(self.value, width='short', locale=self.locale) + elif num in (3, 5): + return get_timezone_gmt(self.value, width='iso8601', locale=self.locale) + + def format(self, value, length): + return '%0*d' % (length, value) + + def get_day_of_year(self, date=None): + if date is None: + date = self.value + return (date - date.replace(month=1, day=1)).days + 1 + + def get_week_number(self, day_of_period, day_of_week=None): + """Return the number of the week of a day within a period. This may be + the week number in a year or the week number in a month. + + Usually this will return a value equal to or greater than 1, but if the + first week of the period is so short that it actually counts as the last + week of the previous period, this function will return 0. + + >>> format = DateTimeFormat(date(2006, 1, 8), Locale.parse('de_DE')) + >>> format.get_week_number(6) + 1 + + >>> format = DateTimeFormat(date(2006, 1, 8), Locale.parse('en_US')) + >>> format.get_week_number(6) + 2 + + :param day_of_period: the number of the day in the period (usually + either the day of month or the day of year) + :param day_of_week: the week day; if ommitted, the week day of the + current date is assumed + """ + if day_of_week is None: + day_of_week = self.value.weekday() + first_day = (day_of_week - self.locale.first_week_day - + day_of_period + 1) % 7 + if first_day < 0: + first_day += 7 + week_number = (day_of_period + first_day - 1) // 7 + + if 7 - first_day >= self.locale.min_week_days: + week_number += 1 + + if self.locale.first_week_day == 0: + # Correct the weeknumber in case of iso-calendar usage (first_week_day=0). + # If the weeknumber exceeds the maximum number of weeks for the given year + # we must count from zero.For example the above calculation gives week 53 + # for 2018-12-31. By iso-calender definition 2018 has a max of 52 + # weeks, thus the weeknumber must be 53-52=1. + max_weeks = date(year=self.value.year, day=28, month=12).isocalendar()[1] + if week_number > max_weeks: + week_number -= max_weeks + + return week_number + + +PATTERN_CHARS = { + 'G': [1, 2, 3, 4, 5], # era + 'y': None, 'Y': None, 'u': None, # year + 'Q': [1, 2, 3, 4, 5], 'q': [1, 2, 3, 4, 5], # quarter + 'M': [1, 2, 3, 4, 5], 'L': [1, 2, 3, 4, 5], # month + 'w': [1, 2], 'W': [1], # week + 'd': [1, 2], 'D': [1, 2, 3], 'F': [1], 'g': None, # day + 'E': [1, 2, 3, 4, 5, 6], 'e': [1, 2, 3, 4, 5, 6], 'c': [1, 3, 4, 5, 6], # week day + 'a': [1], # period + 'h': [1, 2], 'H': [1, 2], 'K': [1, 2], 'k': [1, 2], # hour + 'm': [1, 2], # minute + 's': [1, 2], 'S': None, 'A': None, # second + 'z': [1, 2, 3, 4], 'Z': [1, 2, 3, 4, 5], 'O': [1, 4], 'v': [1, 4], # zone + 'V': [1, 2, 3, 4], 'x': [1, 2, 3, 4, 5], 'X': [1, 2, 3, 4, 5] # zone +} + +#: The pattern characters declared in the Date Field Symbol Table +#: (https://www.unicode.org/reports/tr35/tr35-dates.html#Date_Field_Symbol_Table) +#: in order of decreasing magnitude. +PATTERN_CHAR_ORDER = "GyYuUQqMLlwWdDFgEecabBChHKkjJmsSAzZOvVXx" + +_pattern_cache = {} + + +def parse_pattern(pattern): + """Parse date, time, and datetime format patterns. + + >>> parse_pattern("MMMMd").format + u'%(MMMM)s%(d)s' + >>> parse_pattern("MMM d, yyyy").format + u'%(MMM)s %(d)s, %(yyyy)s' + + Pattern can contain literal strings in single quotes: + + >>> parse_pattern("H:mm' Uhr 'z").format + u'%(H)s:%(mm)s Uhr %(z)s' + + An actual single quote can be used by using two adjacent single quote + characters: + + >>> parse_pattern("hh' o''clock'").format + u"%(hh)s o'clock" + + :param pattern: the formatting pattern to parse + """ + if type(pattern) is DateTimePattern: + return pattern + + if pattern in _pattern_cache: + return _pattern_cache[pattern] + + result = [] + + for tok_type, tok_value in tokenize_pattern(pattern): + if tok_type == "chars": + result.append(tok_value.replace('%', '%%')) + elif tok_type == "field": + fieldchar, fieldnum = tok_value + limit = PATTERN_CHARS[fieldchar] + if limit and fieldnum not in limit: + raise ValueError('Invalid length for field: %r' + % (fieldchar * fieldnum)) + result.append('%%(%s)s' % (fieldchar * fieldnum)) + else: + raise NotImplementedError("Unknown token type: %s" % tok_type) + + _pattern_cache[pattern] = pat = DateTimePattern(pattern, u''.join(result)) + return pat + + +def tokenize_pattern(pattern): + """ + Tokenize date format patterns. + + Returns a list of (token_type, token_value) tuples. + + ``token_type`` may be either "chars" or "field". + + For "chars" tokens, the value is the literal value. + + For "field" tokens, the value is a tuple of (field character, repetition count). + + :param pattern: Pattern string + :type pattern: str + :rtype: list[tuple] + """ + result = [] + quotebuf = None + charbuf = [] + fieldchar = [''] + fieldnum = [0] + + def append_chars(): + result.append(('chars', ''.join(charbuf).replace('\0', "'"))) + del charbuf[:] + + def append_field(): + result.append(('field', (fieldchar[0], fieldnum[0]))) + fieldchar[0] = '' + fieldnum[0] = 0 + + for idx, char in enumerate(pattern.replace("''", '\0')): + if quotebuf is None: + if char == "'": # quote started + if fieldchar[0]: + append_field() + elif charbuf: + append_chars() + quotebuf = [] + elif char in PATTERN_CHARS: + if charbuf: + append_chars() + if char == fieldchar[0]: + fieldnum[0] += 1 + else: + if fieldchar[0]: + append_field() + fieldchar[0] = char + fieldnum[0] = 1 + else: + if fieldchar[0]: + append_field() + charbuf.append(char) + + elif quotebuf is not None: + if char == "'": # end of quote + charbuf.extend(quotebuf) + quotebuf = None + else: # inside quote + quotebuf.append(char) + + if fieldchar[0]: + append_field() + elif charbuf: + append_chars() + + return result + + +def untokenize_pattern(tokens): + """ + Turn a date format pattern token stream back into a string. + + This is the reverse operation of ``tokenize_pattern``. + + :type tokens: Iterable[tuple] + :rtype: str + """ + output = [] + for tok_type, tok_value in tokens: + if tok_type == "field": + output.append(tok_value[0] * tok_value[1]) + elif tok_type == "chars": + if not any(ch in PATTERN_CHARS for ch in tok_value): # No need to quote + output.append(tok_value) + else: + output.append("'%s'" % tok_value.replace("'", "''")) + return "".join(output) + + +def split_interval_pattern(pattern): + """ + Split an interval-describing datetime pattern into multiple pieces. + + > The pattern is then designed to be broken up into two pieces by determining the first repeating field. + - https://www.unicode.org/reports/tr35/tr35-dates.html#intervalFormats + + >>> split_interval_pattern(u'E d.M. \u2013 E d.M.') + [u'E d.M. \u2013 ', 'E d.M.'] + >>> split_interval_pattern("Y 'text' Y 'more text'") + ["Y 'text '", "Y 'more text'"] + >>> split_interval_pattern(u"E, MMM d \u2013 E") + [u'E, MMM d \u2013 ', u'E'] + >>> split_interval_pattern("MMM d") + ['MMM d'] + >>> split_interval_pattern("y G") + ['y G'] + >>> split_interval_pattern(u"MMM d \u2013 d") + [u'MMM d \u2013 ', u'd'] + + :param pattern: Interval pattern string + :return: list of "subpatterns" + """ + + seen_fields = set() + parts = [[]] + + for tok_type, tok_value in tokenize_pattern(pattern): + if tok_type == "field": + if tok_value[0] in seen_fields: # Repeated field + parts.append([]) + seen_fields.clear() + seen_fields.add(tok_value[0]) + parts[-1].append((tok_type, tok_value)) + + return [untokenize_pattern(tokens) for tokens in parts] + + +def match_skeleton(skeleton, options, allow_different_fields=False): + """ + Find the closest match for the given datetime skeleton among the options given. + + This uses the rules outlined in the TR35 document. + + >>> match_skeleton('yMMd', ('yMd', 'yMMMd')) + 'yMd' + + >>> match_skeleton('yMMd', ('jyMMd',), allow_different_fields=True) + 'jyMMd' + + >>> match_skeleton('yMMd', ('qyMMd',), allow_different_fields=False) + + >>> match_skeleton('hmz', ('hmv',)) + 'hmv' + + :param skeleton: The skeleton to match + :type skeleton: str + :param options: An iterable of other skeletons to match against + :type options: Iterable[str] + :return: The closest skeleton match, or if no match was found, None. + :rtype: str|None + """ + + # TODO: maybe implement pattern expansion? + + # Based on the implementation in + # http://source.icu-project.org/repos/icu/icu4j/trunk/main/classes/core/src/com/ibm/icu/text/DateIntervalInfo.java + + # Filter out falsy values and sort for stability; when `interval_formats` is passed in, there may be a None key. + options = sorted(option for option in options if option) + + if 'z' in skeleton and not any('z' in option for option in options): + skeleton = skeleton.replace('z', 'v') + + get_input_field_width = dict(t[1] for t in tokenize_pattern(skeleton) if t[0] == "field").get + best_skeleton = None + best_distance = None + for option in options: + get_opt_field_width = dict(t[1] for t in tokenize_pattern(option) if t[0] == "field").get + distance = 0 + for field in PATTERN_CHARS: + input_width = get_input_field_width(field, 0) + opt_width = get_opt_field_width(field, 0) + if input_width == opt_width: + continue + if opt_width == 0 or input_width == 0: + if not allow_different_fields: # This one is not okay + option = None + break + distance += 0x1000 # Magic weight constant for "entirely different fields" + elif field == 'M' and ((input_width > 2 and opt_width <= 2) or (input_width <= 2 and opt_width > 2)): + distance += 0x100 # Magic weight for "text turns into a number" + else: + distance += abs(input_width - opt_width) + + if not option: # We lost the option along the way (probably due to "allow_different_fields") + continue + + if not best_skeleton or distance < best_distance: + best_skeleton = option + best_distance = distance + + if distance == 0: # Found a perfect match! + break + + return best_skeleton diff --git a/venv/lib/python3.7/site-packages/babel/global.dat b/venv/lib/python3.7/site-packages/babel/global.dat new file mode 100644 index 0000000..0aacd0c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/global.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/languages.py b/venv/lib/python3.7/site-packages/babel/languages.py new file mode 100644 index 0000000..0974367 --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/languages.py @@ -0,0 +1,71 @@ +# -- encoding: UTF-8 -- +from babel.core import get_global + + +def get_official_languages(territory, regional=False, de_facto=False): + """ + Get the official language(s) for the given territory. + + The language codes, if any are known, are returned in order of descending popularity. + + If the `regional` flag is set, then languages which are regionally official are also returned. + + If the `de_facto` flag is set, then languages which are "de facto" official are also returned. + + .. warning:: Note that the data is as up to date as the current version of the CLDR used + by Babel. If you need scientifically accurate information, use another source! + + :param territory: Territory code + :type territory: str + :param regional: Whether to return regionally official languages too + :type regional: bool + :param de_facto: Whether to return de-facto official languages too + :type de_facto: bool + :return: Tuple of language codes + :rtype: tuple[str] + """ + + territory = str(territory).upper() + allowed_stati = {"official"} + if regional: + allowed_stati.add("official_regional") + if de_facto: + allowed_stati.add("de_facto_official") + + languages = get_global("territory_languages").get(territory, {}) + pairs = [ + (info['population_percent'], language) + for language, info in languages.items() + if info.get('official_status') in allowed_stati + ] + pairs.sort(reverse=True) + return tuple(lang for _, lang in pairs) + + +def get_territory_language_info(territory): + """ + Get a dictionary of language information for a territory. + + The dictionary is keyed by language code; the values are dicts with more information. + + The following keys are currently known for the values: + + * `population_percent`: The percentage of the territory's population speaking the + language. + * `official_status`: An optional string describing the officiality status of the language. + Known values are "official", "official_regional" and "de_facto_official". + + .. warning:: Note that the data is as up to date as the current version of the CLDR used + by Babel. If you need scientifically accurate information, use another source! + + .. note:: Note that the format of the dict returned may change between Babel versions. + + See https://www.unicode.org/cldr/charts/latest/supplemental/territory_language_information.html + + :param territory: Territory code + :type territory: str + :return: Language information dictionary + :rtype: dict[str, dict] + """ + territory = str(territory).upper() + return get_global("territory_languages").get(territory, {}).copy() diff --git a/venv/lib/python3.7/site-packages/babel/lists.py b/venv/lib/python3.7/site-packages/babel/lists.py new file mode 100644 index 0000000..ab5a24c --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/lists.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +""" + babel.lists + ~~~~~~~~~~~ + + Locale dependent formatting of lists. + + The default locale for the functions in this module is determined by the + following environment variables, in that order: + + * ``LC_ALL``, and + * ``LANG`` + + :copyright: (c) 2015-2019 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +from babel.core import Locale, default_locale + +DEFAULT_LOCALE = default_locale() + + +def format_list(lst, style='standard', locale=DEFAULT_LOCALE): + """ + Format the items in `lst` as a list. + + >>> format_list(['apples', 'oranges', 'pears'], locale='en') + u'apples, oranges, and pears' + >>> format_list(['apples', 'oranges', 'pears'], locale='zh') + u'apples\u3001oranges\u548cpears' + >>> format_list(['omena', 'peruna', 'aplari'], style='or', locale='fi') + u'omena, peruna tai aplari' + + These styles are defined, but not all are necessarily available in all locales. + The following text is verbatim from the Unicode TR35-49 spec [1]. + + * standard: + A typical 'and' list for arbitrary placeholders. + eg. "January, February, and March" + * standard-short: + A short version of a 'and' list, suitable for use with short or abbreviated placeholder values. + eg. "Jan., Feb., and Mar." + * or: + A typical 'or' list for arbitrary placeholders. + eg. "January, February, or March" + * or-short: + A short version of an 'or' list. + eg. "Jan., Feb., or Mar." + * unit: + A list suitable for wide units. + eg. "3 feet, 7 inches" + * unit-short: + A list suitable for short units + eg. "3 ft, 7 in" + * unit-narrow: + A list suitable for narrow units, where space on the screen is very limited. + eg. "3′ 7″" + + [1]: https://www.unicode.org/reports/tr35/tr35-49/tr35-general.html#ListPatterns + + :param lst: a sequence of items to format in to a list + :param style: the style to format the list with. See above for description. + :param locale: the locale + """ + locale = Locale.parse(locale) + if not lst: + return '' + if len(lst) == 1: + return lst[0] + + if style not in locale.list_patterns: + raise ValueError('Locale %s does not support list formatting style %r (supported are %s)' % ( + locale, + style, + list(sorted(locale.list_patterns)), + )) + patterns = locale.list_patterns[style] + + if len(lst) == 2: + return patterns['2'].format(*lst) + + result = patterns['start'].format(lst[0], lst[1]) + for elem in lst[2:-1]: + result = patterns['middle'].format(result, elem) + result = patterns['end'].format(result, lst[-1]) + + return result diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/af.dat b/venv/lib/python3.7/site-packages/babel/locale-data/af.dat new file mode 100644 index 0000000..0eb67c0 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/af.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/af_NA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/af_NA.dat new file mode 100644 index 0000000..45eb700 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/af_NA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/af_ZA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/af_ZA.dat new file mode 100644 index 0000000..bcaba27 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/af_ZA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/agq.dat b/venv/lib/python3.7/site-packages/babel/locale-data/agq.dat new file mode 100644 index 0000000..24d86c6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/agq.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/agq_CM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/agq_CM.dat new file mode 100644 index 0000000..8a4fb33 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/agq_CM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ak.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ak.dat new file mode 100644 index 0000000..eff91f6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ak.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ak_GH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ak_GH.dat new file mode 100644 index 0000000..32ad02e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ak_GH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/am.dat b/venv/lib/python3.7/site-packages/babel/locale-data/am.dat new file mode 100644 index 0000000..57ebfb5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/am.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/am_ET.dat b/venv/lib/python3.7/site-packages/babel/locale-data/am_ET.dat new file mode 100644 index 0000000..5547b3f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/am_ET.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar.dat new file mode 100644 index 0000000..1c043c6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_001.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_001.dat new file mode 100644 index 0000000..06d8d9a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_001.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_AE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_AE.dat new file mode 100644 index 0000000..ec26fce Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_AE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_BH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_BH.dat new file mode 100644 index 0000000..260feeb Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_BH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_DJ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_DJ.dat new file mode 100644 index 0000000..b88b543 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_DJ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_DZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_DZ.dat new file mode 100644 index 0000000..ec9f484 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_DZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_EG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_EG.dat new file mode 100644 index 0000000..e014345 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_EG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_EH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_EH.dat new file mode 100644 index 0000000..b437ea5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_EH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_ER.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_ER.dat new file mode 100644 index 0000000..4501c04 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_ER.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_IL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_IL.dat new file mode 100644 index 0000000..b47223f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_IL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_IQ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_IQ.dat new file mode 100644 index 0000000..9b74704 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_IQ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_JO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_JO.dat new file mode 100644 index 0000000..50d6283 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_JO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_KM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_KM.dat new file mode 100644 index 0000000..5208248 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_KM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_KW.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_KW.dat new file mode 100644 index 0000000..8acdabb Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_KW.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_LB.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_LB.dat new file mode 100644 index 0000000..01253c0 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_LB.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_LY.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_LY.dat new file mode 100644 index 0000000..9b51122 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_LY.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_MA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_MA.dat new file mode 100644 index 0000000..7980e3a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_MA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_MR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_MR.dat new file mode 100644 index 0000000..d6d57a7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_MR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_OM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_OM.dat new file mode 100644 index 0000000..7082022 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_OM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_PS.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_PS.dat new file mode 100644 index 0000000..0f2f4a1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_PS.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_QA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_QA.dat new file mode 100644 index 0000000..6695b65 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_QA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_SA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_SA.dat new file mode 100644 index 0000000..11dc6cb Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_SA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_SD.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_SD.dat new file mode 100644 index 0000000..d9d68c6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_SD.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_SO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_SO.dat new file mode 100644 index 0000000..984a8d2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_SO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_SS.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_SS.dat new file mode 100644 index 0000000..03ef013 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_SS.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_SY.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_SY.dat new file mode 100644 index 0000000..9864dcd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_SY.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_TD.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_TD.dat new file mode 100644 index 0000000..ef80380 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_TD.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_TN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_TN.dat new file mode 100644 index 0000000..d2b12cf Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_TN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ar_YE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ar_YE.dat new file mode 100644 index 0000000..6c7115e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ar_YE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/as.dat b/venv/lib/python3.7/site-packages/babel/locale-data/as.dat new file mode 100644 index 0000000..d1edaef Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/as.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/as_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/as_IN.dat new file mode 100644 index 0000000..160978d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/as_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/asa.dat b/venv/lib/python3.7/site-packages/babel/locale-data/asa.dat new file mode 100644 index 0000000..e165a46 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/asa.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/asa_TZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/asa_TZ.dat new file mode 100644 index 0000000..bf2654b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/asa_TZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ast.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ast.dat new file mode 100644 index 0000000..f630964 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ast.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ast_ES.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ast_ES.dat new file mode 100644 index 0000000..4172567 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ast_ES.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/az.dat b/venv/lib/python3.7/site-packages/babel/locale-data/az.dat new file mode 100644 index 0000000..d11610c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/az.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/az_Cyrl.dat b/venv/lib/python3.7/site-packages/babel/locale-data/az_Cyrl.dat new file mode 100644 index 0000000..038294c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/az_Cyrl.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/az_Cyrl_AZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/az_Cyrl_AZ.dat new file mode 100644 index 0000000..29b119f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/az_Cyrl_AZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/az_Latn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/az_Latn.dat new file mode 100644 index 0000000..8ab69ea Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/az_Latn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/az_Latn_AZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/az_Latn_AZ.dat new file mode 100644 index 0000000..c052389 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/az_Latn_AZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bas.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bas.dat new file mode 100644 index 0000000..0054afe Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bas.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bas_CM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bas_CM.dat new file mode 100644 index 0000000..fd79663 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bas_CM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/be.dat b/venv/lib/python3.7/site-packages/babel/locale-data/be.dat new file mode 100644 index 0000000..b4435ee Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/be.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/be_BY.dat b/venv/lib/python3.7/site-packages/babel/locale-data/be_BY.dat new file mode 100644 index 0000000..e5d4f1a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/be_BY.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bem.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bem.dat new file mode 100644 index 0000000..695bde0 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bem.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bem_ZM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bem_ZM.dat new file mode 100644 index 0000000..ca2a5e0 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bem_ZM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bez.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bez.dat new file mode 100644 index 0000000..13ebb7c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bez.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bez_TZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bez_TZ.dat new file mode 100644 index 0000000..0a7a38b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bez_TZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bg.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bg.dat new file mode 100644 index 0000000..bf15bb3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bg.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bg_BG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bg_BG.dat new file mode 100644 index 0000000..393e3fe Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bg_BG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bm.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bm.dat new file mode 100644 index 0000000..70a9bf8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bm.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bm_ML.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bm_ML.dat new file mode 100644 index 0000000..911b47d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bm_ML.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bn.dat new file mode 100644 index 0000000..eed644a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bn_BD.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bn_BD.dat new file mode 100644 index 0000000..47cfcc4 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bn_BD.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bn_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bn_IN.dat new file mode 100644 index 0000000..c60196a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bn_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bo.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bo.dat new file mode 100644 index 0000000..029e676 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bo.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bo_CN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bo_CN.dat new file mode 100644 index 0000000..3d8dc23 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bo_CN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bo_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bo_IN.dat new file mode 100644 index 0000000..83c1459 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bo_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/br.dat b/venv/lib/python3.7/site-packages/babel/locale-data/br.dat new file mode 100644 index 0000000..5fbc18e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/br.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/br_FR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/br_FR.dat new file mode 100644 index 0000000..5ad6ff3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/br_FR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/brx.dat b/venv/lib/python3.7/site-packages/babel/locale-data/brx.dat new file mode 100644 index 0000000..d5dcde9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/brx.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/brx_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/brx_IN.dat new file mode 100644 index 0000000..323f024 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/brx_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bs.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bs.dat new file mode 100644 index 0000000..62eefa8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bs.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bs_Cyrl.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bs_Cyrl.dat new file mode 100644 index 0000000..666a315 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bs_Cyrl.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bs_Cyrl_BA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bs_Cyrl_BA.dat new file mode 100644 index 0000000..edcd425 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bs_Cyrl_BA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bs_Latn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bs_Latn.dat new file mode 100644 index 0000000..f3bec0b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bs_Latn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/bs_Latn_BA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/bs_Latn_BA.dat new file mode 100644 index 0000000..6526e91 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/bs_Latn_BA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ca.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ca.dat new file mode 100644 index 0000000..703aad3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ca.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ca_AD.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ca_AD.dat new file mode 100644 index 0000000..1d18b5d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ca_AD.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ca_ES.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ca_ES.dat new file mode 100644 index 0000000..b9c9d64 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ca_ES.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ca_ES_VALENCIA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ca_ES_VALENCIA.dat new file mode 100644 index 0000000..cd8c58a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ca_ES_VALENCIA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ca_FR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ca_FR.dat new file mode 100644 index 0000000..aff8bdd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ca_FR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ca_IT.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ca_IT.dat new file mode 100644 index 0000000..c91a107 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ca_IT.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ccp.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ccp.dat new file mode 100644 index 0000000..93bb577 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ccp.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ccp_BD.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ccp_BD.dat new file mode 100644 index 0000000..efa33cb Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ccp_BD.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ccp_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ccp_IN.dat new file mode 100644 index 0000000..20a79ba Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ccp_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ce.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ce.dat new file mode 100644 index 0000000..71ba69c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ce.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ce_RU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ce_RU.dat new file mode 100644 index 0000000..c0bebf7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ce_RU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ceb.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ceb.dat new file mode 100644 index 0000000..580874c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ceb.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ceb_PH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ceb_PH.dat new file mode 100644 index 0000000..7adeb3e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ceb_PH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/cgg.dat b/venv/lib/python3.7/site-packages/babel/locale-data/cgg.dat new file mode 100644 index 0000000..907ac76 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/cgg.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/cgg_UG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/cgg_UG.dat new file mode 100644 index 0000000..e77c0f1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/cgg_UG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/chr.dat b/venv/lib/python3.7/site-packages/babel/locale-data/chr.dat new file mode 100644 index 0000000..38bd9ff Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/chr.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/chr_US.dat b/venv/lib/python3.7/site-packages/babel/locale-data/chr_US.dat new file mode 100644 index 0000000..46d00bd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/chr_US.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ckb.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ckb.dat new file mode 100644 index 0000000..5e84bb1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ckb.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ckb_IQ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ckb_IQ.dat new file mode 100644 index 0000000..42e6831 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ckb_IQ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ckb_IR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ckb_IR.dat new file mode 100644 index 0000000..1762f65 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ckb_IR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/cs.dat b/venv/lib/python3.7/site-packages/babel/locale-data/cs.dat new file mode 100644 index 0000000..60ae66e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/cs.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/cs_CZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/cs_CZ.dat new file mode 100644 index 0000000..9fecf77 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/cs_CZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/cu.dat b/venv/lib/python3.7/site-packages/babel/locale-data/cu.dat new file mode 100644 index 0000000..8b2586b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/cu.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/cu_RU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/cu_RU.dat new file mode 100644 index 0000000..7d555a1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/cu_RU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/cy.dat b/venv/lib/python3.7/site-packages/babel/locale-data/cy.dat new file mode 100644 index 0000000..ba997da Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/cy.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/cy_GB.dat b/venv/lib/python3.7/site-packages/babel/locale-data/cy_GB.dat new file mode 100644 index 0000000..ef87db4 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/cy_GB.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/da.dat b/venv/lib/python3.7/site-packages/babel/locale-data/da.dat new file mode 100644 index 0000000..b1ecfe8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/da.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/da_DK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/da_DK.dat new file mode 100644 index 0000000..ad8acec Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/da_DK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/da_GL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/da_GL.dat new file mode 100644 index 0000000..d9ae841 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/da_GL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/dav.dat b/venv/lib/python3.7/site-packages/babel/locale-data/dav.dat new file mode 100644 index 0000000..bc1aec1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/dav.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/dav_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/dav_KE.dat new file mode 100644 index 0000000..4a4a6b9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/dav_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/de.dat b/venv/lib/python3.7/site-packages/babel/locale-data/de.dat new file mode 100644 index 0000000..356fe0d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/de.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/de_AT.dat b/venv/lib/python3.7/site-packages/babel/locale-data/de_AT.dat new file mode 100644 index 0000000..b587360 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/de_AT.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/de_BE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/de_BE.dat new file mode 100644 index 0000000..d26dbf2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/de_BE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/de_CH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/de_CH.dat new file mode 100644 index 0000000..b1fce92 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/de_CH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/de_DE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/de_DE.dat new file mode 100644 index 0000000..cd40ebf Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/de_DE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/de_IT.dat b/venv/lib/python3.7/site-packages/babel/locale-data/de_IT.dat new file mode 100644 index 0000000..5479547 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/de_IT.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/de_LI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/de_LI.dat new file mode 100644 index 0000000..34b29a7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/de_LI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/de_LU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/de_LU.dat new file mode 100644 index 0000000..70c9a6d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/de_LU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/dje.dat b/venv/lib/python3.7/site-packages/babel/locale-data/dje.dat new file mode 100644 index 0000000..cf2eaec Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/dje.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/dje_NE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/dje_NE.dat new file mode 100644 index 0000000..8b255fc Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/dje_NE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/dsb.dat b/venv/lib/python3.7/site-packages/babel/locale-data/dsb.dat new file mode 100644 index 0000000..c6c6c49 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/dsb.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/dsb_DE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/dsb_DE.dat new file mode 100644 index 0000000..67b9d22 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/dsb_DE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/dua.dat b/venv/lib/python3.7/site-packages/babel/locale-data/dua.dat new file mode 100644 index 0000000..0c645ce Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/dua.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/dua_CM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/dua_CM.dat new file mode 100644 index 0000000..ce07e0f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/dua_CM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/dyo.dat b/venv/lib/python3.7/site-packages/babel/locale-data/dyo.dat new file mode 100644 index 0000000..6ba4f40 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/dyo.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/dyo_SN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/dyo_SN.dat new file mode 100644 index 0000000..e6a0ff5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/dyo_SN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/dz.dat b/venv/lib/python3.7/site-packages/babel/locale-data/dz.dat new file mode 100644 index 0000000..5cd9e2c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/dz.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/dz_BT.dat b/venv/lib/python3.7/site-packages/babel/locale-data/dz_BT.dat new file mode 100644 index 0000000..d6bdb7a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/dz_BT.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ebu.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ebu.dat new file mode 100644 index 0000000..8926af1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ebu.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ebu_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ebu_KE.dat new file mode 100644 index 0000000..4b58339 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ebu_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ee.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ee.dat new file mode 100644 index 0000000..087d173 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ee.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ee_GH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ee_GH.dat new file mode 100644 index 0000000..5d3418a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ee_GH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ee_TG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ee_TG.dat new file mode 100644 index 0000000..8b6854e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ee_TG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/el.dat b/venv/lib/python3.7/site-packages/babel/locale-data/el.dat new file mode 100644 index 0000000..a7e51a4 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/el.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/el_CY.dat b/venv/lib/python3.7/site-packages/babel/locale-data/el_CY.dat new file mode 100644 index 0000000..30eceff Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/el_CY.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/el_GR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/el_GR.dat new file mode 100644 index 0000000..032670c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/el_GR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en.dat new file mode 100644 index 0000000..0d7b5b5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_001.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_001.dat new file mode 100644 index 0000000..38df118 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_001.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_150.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_150.dat new file mode 100644 index 0000000..31160ab Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_150.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_AE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_AE.dat new file mode 100644 index 0000000..ac1a116 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_AE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_AG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_AG.dat new file mode 100644 index 0000000..5231bd8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_AG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_AI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_AI.dat new file mode 100644 index 0000000..53a35e7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_AI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_AS.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_AS.dat new file mode 100644 index 0000000..8839670 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_AS.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_AT.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_AT.dat new file mode 100644 index 0000000..b280528 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_AT.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_AU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_AU.dat new file mode 100644 index 0000000..f90aff4 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_AU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_BB.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_BB.dat new file mode 100644 index 0000000..d389c5f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_BB.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_BE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_BE.dat new file mode 100644 index 0000000..12fa413 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_BE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_BI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_BI.dat new file mode 100644 index 0000000..267a159 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_BI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_BM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_BM.dat new file mode 100644 index 0000000..af89bac Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_BM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_BS.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_BS.dat new file mode 100644 index 0000000..f17bed5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_BS.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_BW.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_BW.dat new file mode 100644 index 0000000..0215479 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_BW.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_BZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_BZ.dat new file mode 100644 index 0000000..8992335 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_BZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_CA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_CA.dat new file mode 100644 index 0000000..9e5796b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_CA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_CC.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_CC.dat new file mode 100644 index 0000000..90adcb7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_CC.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_CH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_CH.dat new file mode 100644 index 0000000..d62a921 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_CH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_CK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_CK.dat new file mode 100644 index 0000000..12b3ac0 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_CK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_CM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_CM.dat new file mode 100644 index 0000000..66fcd2f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_CM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_CX.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_CX.dat new file mode 100644 index 0000000..16eb67c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_CX.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_CY.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_CY.dat new file mode 100644 index 0000000..b26b0f7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_CY.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_DE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_DE.dat new file mode 100644 index 0000000..e0cff88 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_DE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_DG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_DG.dat new file mode 100644 index 0000000..f33a878 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_DG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_DK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_DK.dat new file mode 100644 index 0000000..16ad3c3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_DK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_DM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_DM.dat new file mode 100644 index 0000000..0ae776e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_DM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_ER.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_ER.dat new file mode 100644 index 0000000..b3eb16e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_ER.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_FI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_FI.dat new file mode 100644 index 0000000..65612ed Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_FI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_FJ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_FJ.dat new file mode 100644 index 0000000..bec18db Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_FJ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_FK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_FK.dat new file mode 100644 index 0000000..e9f6b58 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_FK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_FM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_FM.dat new file mode 100644 index 0000000..d2f5004 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_FM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_GB.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_GB.dat new file mode 100644 index 0000000..4fcfc9b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_GB.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_GD.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_GD.dat new file mode 100644 index 0000000..54dd15b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_GD.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_GG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_GG.dat new file mode 100644 index 0000000..4a941d5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_GG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_GH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_GH.dat new file mode 100644 index 0000000..a9d6c3d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_GH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_GI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_GI.dat new file mode 100644 index 0000000..aad3782 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_GI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_GM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_GM.dat new file mode 100644 index 0000000..0dbf7a3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_GM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_GU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_GU.dat new file mode 100644 index 0000000..03db580 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_GU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_GY.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_GY.dat new file mode 100644 index 0000000..517f8e6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_GY.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_HK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_HK.dat new file mode 100644 index 0000000..fb40e30 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_HK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_IE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_IE.dat new file mode 100644 index 0000000..5eb5cd5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_IE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_IL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_IL.dat new file mode 100644 index 0000000..a6dac39 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_IL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_IM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_IM.dat new file mode 100644 index 0000000..3ce8869 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_IM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_IN.dat new file mode 100644 index 0000000..3ab1236 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_IO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_IO.dat new file mode 100644 index 0000000..a72abe3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_IO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_JE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_JE.dat new file mode 100644 index 0000000..f9957b2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_JE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_JM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_JM.dat new file mode 100644 index 0000000..3a92d6f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_JM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_KE.dat new file mode 100644 index 0000000..eff74ad Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_KI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_KI.dat new file mode 100644 index 0000000..02803c6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_KI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_KN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_KN.dat new file mode 100644 index 0000000..8d84bd1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_KN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_KY.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_KY.dat new file mode 100644 index 0000000..90298e3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_KY.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_LC.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_LC.dat new file mode 100644 index 0000000..6a28838 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_LC.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_LR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_LR.dat new file mode 100644 index 0000000..1b2d6d9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_LR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_LS.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_LS.dat new file mode 100644 index 0000000..4796dac Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_LS.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_MG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_MG.dat new file mode 100644 index 0000000..2a38161 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_MG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_MH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_MH.dat new file mode 100644 index 0000000..169d2f1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_MH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_MO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_MO.dat new file mode 100644 index 0000000..27de6e6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_MO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_MP.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_MP.dat new file mode 100644 index 0000000..1997b03 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_MP.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_MS.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_MS.dat new file mode 100644 index 0000000..428b5b2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_MS.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_MT.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_MT.dat new file mode 100644 index 0000000..310e4f5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_MT.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_MU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_MU.dat new file mode 100644 index 0000000..e0cd935 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_MU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_MW.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_MW.dat new file mode 100644 index 0000000..f86d860 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_MW.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_MY.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_MY.dat new file mode 100644 index 0000000..2729f6d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_MY.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_NA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_NA.dat new file mode 100644 index 0000000..0c9c648 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_NA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_NF.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_NF.dat new file mode 100644 index 0000000..508283e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_NF.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_NG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_NG.dat new file mode 100644 index 0000000..f67e6dc Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_NG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_NL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_NL.dat new file mode 100644 index 0000000..54a2a82 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_NL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_NR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_NR.dat new file mode 100644 index 0000000..abca973 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_NR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_NU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_NU.dat new file mode 100644 index 0000000..110fff6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_NU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_NZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_NZ.dat new file mode 100644 index 0000000..8f448c4 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_NZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_PG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_PG.dat new file mode 100644 index 0000000..1b9984a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_PG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_PH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_PH.dat new file mode 100644 index 0000000..8683a6e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_PH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_PK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_PK.dat new file mode 100644 index 0000000..fb75795 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_PK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_PN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_PN.dat new file mode 100644 index 0000000..c486195 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_PN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_PR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_PR.dat new file mode 100644 index 0000000..920c34b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_PR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_PW.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_PW.dat new file mode 100644 index 0000000..facc40b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_PW.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_RW.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_RW.dat new file mode 100644 index 0000000..db326f8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_RW.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_SB.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_SB.dat new file mode 100644 index 0000000..0d16e7b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_SB.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_SC.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_SC.dat new file mode 100644 index 0000000..4f69d26 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_SC.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_SD.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_SD.dat new file mode 100644 index 0000000..66262e5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_SD.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_SE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_SE.dat new file mode 100644 index 0000000..4014fcd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_SE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_SG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_SG.dat new file mode 100644 index 0000000..c51408e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_SG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_SH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_SH.dat new file mode 100644 index 0000000..a67341d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_SH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_SI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_SI.dat new file mode 100644 index 0000000..38289d0 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_SI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_SL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_SL.dat new file mode 100644 index 0000000..a1b7ae7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_SL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_SS.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_SS.dat new file mode 100644 index 0000000..1be4f7c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_SS.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_SX.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_SX.dat new file mode 100644 index 0000000..28063b7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_SX.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_SZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_SZ.dat new file mode 100644 index 0000000..e4652ab Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_SZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_TC.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_TC.dat new file mode 100644 index 0000000..267e905 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_TC.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_TK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_TK.dat new file mode 100644 index 0000000..17537bd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_TK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_TO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_TO.dat new file mode 100644 index 0000000..237c61a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_TO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_TT.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_TT.dat new file mode 100644 index 0000000..dc4c5be Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_TT.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_TV.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_TV.dat new file mode 100644 index 0000000..3f5748c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_TV.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_TZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_TZ.dat new file mode 100644 index 0000000..9d95b24 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_TZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_UG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_UG.dat new file mode 100644 index 0000000..7cf7e83 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_UG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_UM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_UM.dat new file mode 100644 index 0000000..9e44a91 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_UM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_US.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_US.dat new file mode 100644 index 0000000..44da6eb Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_US.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_US_POSIX.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_US_POSIX.dat new file mode 100644 index 0000000..4cb19e9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_US_POSIX.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_VC.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_VC.dat new file mode 100644 index 0000000..6f4ee8b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_VC.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_VG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_VG.dat new file mode 100644 index 0000000..c3993a9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_VG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_VI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_VI.dat new file mode 100644 index 0000000..79ca768 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_VI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_VU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_VU.dat new file mode 100644 index 0000000..41c3989 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_VU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_WS.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_WS.dat new file mode 100644 index 0000000..beefe8a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_WS.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_ZA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_ZA.dat new file mode 100644 index 0000000..cbd504a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_ZA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_ZM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_ZM.dat new file mode 100644 index 0000000..6758d42 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_ZM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/en_ZW.dat b/venv/lib/python3.7/site-packages/babel/locale-data/en_ZW.dat new file mode 100644 index 0000000..e839b84 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/en_ZW.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/eo.dat b/venv/lib/python3.7/site-packages/babel/locale-data/eo.dat new file mode 100644 index 0000000..0fcaeaa Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/eo.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/eo_001.dat b/venv/lib/python3.7/site-packages/babel/locale-data/eo_001.dat new file mode 100644 index 0000000..e99757b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/eo_001.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es.dat new file mode 100644 index 0000000..58a77bc Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_419.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_419.dat new file mode 100644 index 0000000..7204598 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_419.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_AR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_AR.dat new file mode 100644 index 0000000..143f8a4 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_AR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_BO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_BO.dat new file mode 100644 index 0000000..abe5980 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_BO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_BR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_BR.dat new file mode 100644 index 0000000..4e297a8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_BR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_BZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_BZ.dat new file mode 100644 index 0000000..9a32867 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_BZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_CL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_CL.dat new file mode 100644 index 0000000..11c5045 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_CL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_CO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_CO.dat new file mode 100644 index 0000000..6a66cf7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_CO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_CR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_CR.dat new file mode 100644 index 0000000..c74cc85 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_CR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_CU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_CU.dat new file mode 100644 index 0000000..5260748 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_CU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_DO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_DO.dat new file mode 100644 index 0000000..0e24759 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_DO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_EA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_EA.dat new file mode 100644 index 0000000..87d6112 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_EA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_EC.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_EC.dat new file mode 100644 index 0000000..cf07249 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_EC.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_ES.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_ES.dat new file mode 100644 index 0000000..eac20c7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_ES.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_GQ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_GQ.dat new file mode 100644 index 0000000..ac92c66 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_GQ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_GT.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_GT.dat new file mode 100644 index 0000000..fbf7e3a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_GT.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_HN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_HN.dat new file mode 100644 index 0000000..936f30e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_HN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_IC.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_IC.dat new file mode 100644 index 0000000..d7c402a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_IC.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_MX.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_MX.dat new file mode 100644 index 0000000..2623c03 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_MX.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_NI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_NI.dat new file mode 100644 index 0000000..9188ea9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_NI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_PA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_PA.dat new file mode 100644 index 0000000..df98db7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_PA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_PE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_PE.dat new file mode 100644 index 0000000..e758105 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_PE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_PH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_PH.dat new file mode 100644 index 0000000..0d0a835 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_PH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_PR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_PR.dat new file mode 100644 index 0000000..65806f2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_PR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_PY.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_PY.dat new file mode 100644 index 0000000..b3cc49e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_PY.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_SV.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_SV.dat new file mode 100644 index 0000000..cbeeaf3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_SV.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_US.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_US.dat new file mode 100644 index 0000000..0695183 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_US.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_UY.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_UY.dat new file mode 100644 index 0000000..7ddaccd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_UY.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/es_VE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/es_VE.dat new file mode 100644 index 0000000..1849ae6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/es_VE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/et.dat b/venv/lib/python3.7/site-packages/babel/locale-data/et.dat new file mode 100644 index 0000000..74256af Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/et.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/et_EE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/et_EE.dat new file mode 100644 index 0000000..788f6b9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/et_EE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/eu.dat b/venv/lib/python3.7/site-packages/babel/locale-data/eu.dat new file mode 100644 index 0000000..ed142f0 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/eu.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/eu_ES.dat b/venv/lib/python3.7/site-packages/babel/locale-data/eu_ES.dat new file mode 100644 index 0000000..6337623 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/eu_ES.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ewo.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ewo.dat new file mode 100644 index 0000000..c325827 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ewo.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ewo_CM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ewo_CM.dat new file mode 100644 index 0000000..ae02897 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ewo_CM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fa.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fa.dat new file mode 100644 index 0000000..dee5777 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fa.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fa_AF.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fa_AF.dat new file mode 100644 index 0000000..388d608 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fa_AF.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fa_IR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fa_IR.dat new file mode 100644 index 0000000..21c8849 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fa_IR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ff.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ff.dat new file mode 100644 index 0000000..275ff9e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ff.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn.dat new file mode 100644 index 0000000..8244228 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_BF.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_BF.dat new file mode 100644 index 0000000..2c80ac4 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_BF.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_CM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_CM.dat new file mode 100644 index 0000000..0ffce7d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_CM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_GH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_GH.dat new file mode 100644 index 0000000..6f403fb Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_GH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_GM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_GM.dat new file mode 100644 index 0000000..2ef732a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_GM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_GN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_GN.dat new file mode 100644 index 0000000..af65220 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_GN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_GW.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_GW.dat new file mode 100644 index 0000000..cf96314 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_GW.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_LR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_LR.dat new file mode 100644 index 0000000..8ca28bf Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_LR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_MR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_MR.dat new file mode 100644 index 0000000..712c716 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_MR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_NE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_NE.dat new file mode 100644 index 0000000..d65b9e6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_NE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_NG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_NG.dat new file mode 100644 index 0000000..aca62bd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_NG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_SL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_SL.dat new file mode 100644 index 0000000..bd2c886 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_SL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_SN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_SN.dat new file mode 100644 index 0000000..5c0fd21 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ff_Latn_SN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fi.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fi.dat new file mode 100644 index 0000000..7ecae21 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fi.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fi_FI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fi_FI.dat new file mode 100644 index 0000000..f70deae Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fi_FI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fil.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fil.dat new file mode 100644 index 0000000..4995b20 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fil.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fil_PH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fil_PH.dat new file mode 100644 index 0000000..c2931e1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fil_PH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fo.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fo.dat new file mode 100644 index 0000000..310852d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fo.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fo_DK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fo_DK.dat new file mode 100644 index 0000000..0dcb324 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fo_DK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fo_FO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fo_FO.dat new file mode 100644 index 0000000..a1e117a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fo_FO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr.dat new file mode 100644 index 0000000..a75fc2b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_BE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_BE.dat new file mode 100644 index 0000000..a5e77c6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_BE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_BF.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_BF.dat new file mode 100644 index 0000000..7f9435b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_BF.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_BI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_BI.dat new file mode 100644 index 0000000..03dbb03 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_BI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_BJ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_BJ.dat new file mode 100644 index 0000000..a0376e8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_BJ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_BL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_BL.dat new file mode 100644 index 0000000..b3cbdb2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_BL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_CA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_CA.dat new file mode 100644 index 0000000..35eb9f5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_CA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_CD.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_CD.dat new file mode 100644 index 0000000..34b82fe Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_CD.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_CF.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_CF.dat new file mode 100644 index 0000000..092805a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_CF.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_CG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_CG.dat new file mode 100644 index 0000000..e9ba5b2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_CG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_CH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_CH.dat new file mode 100644 index 0000000..4f9d13a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_CH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_CI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_CI.dat new file mode 100644 index 0000000..1aa517c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_CI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_CM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_CM.dat new file mode 100644 index 0000000..01753c7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_CM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_DJ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_DJ.dat new file mode 100644 index 0000000..557274e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_DJ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_DZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_DZ.dat new file mode 100644 index 0000000..b7e865c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_DZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_FR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_FR.dat new file mode 100644 index 0000000..df56c01 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_FR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_GA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_GA.dat new file mode 100644 index 0000000..2782178 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_GA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_GF.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_GF.dat new file mode 100644 index 0000000..35321f2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_GF.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_GN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_GN.dat new file mode 100644 index 0000000..e78c8ab Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_GN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_GP.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_GP.dat new file mode 100644 index 0000000..95913af Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_GP.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_GQ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_GQ.dat new file mode 100644 index 0000000..4723823 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_GQ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_HT.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_HT.dat new file mode 100644 index 0000000..0c66f7b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_HT.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_KM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_KM.dat new file mode 100644 index 0000000..df08334 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_KM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_LU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_LU.dat new file mode 100644 index 0000000..f2cc9c9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_LU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_MA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_MA.dat new file mode 100644 index 0000000..c34484e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_MA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_MC.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_MC.dat new file mode 100644 index 0000000..8fc3cac Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_MC.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_MF.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_MF.dat new file mode 100644 index 0000000..d455ea3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_MF.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_MG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_MG.dat new file mode 100644 index 0000000..1ec218e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_MG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_ML.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_ML.dat new file mode 100644 index 0000000..aec1c29 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_ML.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_MQ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_MQ.dat new file mode 100644 index 0000000..67d699e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_MQ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_MR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_MR.dat new file mode 100644 index 0000000..a1f5735 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_MR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_MU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_MU.dat new file mode 100644 index 0000000..1dd1253 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_MU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_NC.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_NC.dat new file mode 100644 index 0000000..0aef0bb Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_NC.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_NE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_NE.dat new file mode 100644 index 0000000..ff1457f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_NE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_PF.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_PF.dat new file mode 100644 index 0000000..e076480 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_PF.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_PM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_PM.dat new file mode 100644 index 0000000..78297da Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_PM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_RE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_RE.dat new file mode 100644 index 0000000..e9444e9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_RE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_RW.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_RW.dat new file mode 100644 index 0000000..67828ab Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_RW.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_SC.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_SC.dat new file mode 100644 index 0000000..73c73de Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_SC.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_SN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_SN.dat new file mode 100644 index 0000000..8d906a2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_SN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_SY.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_SY.dat new file mode 100644 index 0000000..65c5f4a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_SY.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_TD.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_TD.dat new file mode 100644 index 0000000..62d8839 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_TD.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_TG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_TG.dat new file mode 100644 index 0000000..b4db50c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_TG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_TN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_TN.dat new file mode 100644 index 0000000..8f5939f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_TN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_VU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_VU.dat new file mode 100644 index 0000000..b59eb2a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_VU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_WF.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_WF.dat new file mode 100644 index 0000000..ab4bd64 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_WF.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fr_YT.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fr_YT.dat new file mode 100644 index 0000000..be6a752 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fr_YT.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fur.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fur.dat new file mode 100644 index 0000000..8344a47 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fur.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fur_IT.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fur_IT.dat new file mode 100644 index 0000000..76dbd68 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fur_IT.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fy.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fy.dat new file mode 100644 index 0000000..dcecf47 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fy.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/fy_NL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/fy_NL.dat new file mode 100644 index 0000000..80465be Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/fy_NL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ga.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ga.dat new file mode 100644 index 0000000..4699dca Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ga.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ga_IE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ga_IE.dat new file mode 100644 index 0000000..6f60d19 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ga_IE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/gd.dat b/venv/lib/python3.7/site-packages/babel/locale-data/gd.dat new file mode 100644 index 0000000..28c65a2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/gd.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/gd_GB.dat b/venv/lib/python3.7/site-packages/babel/locale-data/gd_GB.dat new file mode 100644 index 0000000..7ac06e2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/gd_GB.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/gl.dat b/venv/lib/python3.7/site-packages/babel/locale-data/gl.dat new file mode 100644 index 0000000..4bdb5a6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/gl.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/gl_ES.dat b/venv/lib/python3.7/site-packages/babel/locale-data/gl_ES.dat new file mode 100644 index 0000000..8e4e537 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/gl_ES.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/gsw.dat b/venv/lib/python3.7/site-packages/babel/locale-data/gsw.dat new file mode 100644 index 0000000..58fd9dc Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/gsw.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/gsw_CH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/gsw_CH.dat new file mode 100644 index 0000000..0266ea2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/gsw_CH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/gsw_FR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/gsw_FR.dat new file mode 100644 index 0000000..16c8f85 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/gsw_FR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/gsw_LI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/gsw_LI.dat new file mode 100644 index 0000000..1688e16 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/gsw_LI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/gu.dat b/venv/lib/python3.7/site-packages/babel/locale-data/gu.dat new file mode 100644 index 0000000..2e04077 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/gu.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/gu_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/gu_IN.dat new file mode 100644 index 0000000..f9b4ce3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/gu_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/guz.dat b/venv/lib/python3.7/site-packages/babel/locale-data/guz.dat new file mode 100644 index 0000000..e082835 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/guz.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/guz_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/guz_KE.dat new file mode 100644 index 0000000..867742b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/guz_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/gv.dat b/venv/lib/python3.7/site-packages/babel/locale-data/gv.dat new file mode 100644 index 0000000..8047b86 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/gv.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/gv_IM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/gv_IM.dat new file mode 100644 index 0000000..306a5ac Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/gv_IM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ha.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ha.dat new file mode 100644 index 0000000..0cb81ad Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ha.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ha_GH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ha_GH.dat new file mode 100644 index 0000000..f811720 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ha_GH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ha_NE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ha_NE.dat new file mode 100644 index 0000000..a5df628 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ha_NE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ha_NG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ha_NG.dat new file mode 100644 index 0000000..2df3466 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ha_NG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/haw.dat b/venv/lib/python3.7/site-packages/babel/locale-data/haw.dat new file mode 100644 index 0000000..08064a9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/haw.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/haw_US.dat b/venv/lib/python3.7/site-packages/babel/locale-data/haw_US.dat new file mode 100644 index 0000000..0431b61 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/haw_US.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/he.dat b/venv/lib/python3.7/site-packages/babel/locale-data/he.dat new file mode 100644 index 0000000..25aaebd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/he.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/he_IL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/he_IL.dat new file mode 100644 index 0000000..a35b0aa Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/he_IL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/hi.dat b/venv/lib/python3.7/site-packages/babel/locale-data/hi.dat new file mode 100644 index 0000000..dc5d40d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/hi.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/hi_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/hi_IN.dat new file mode 100644 index 0000000..d3db5bc Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/hi_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/hr.dat b/venv/lib/python3.7/site-packages/babel/locale-data/hr.dat new file mode 100644 index 0000000..ae45629 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/hr.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/hr_BA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/hr_BA.dat new file mode 100644 index 0000000..97652f3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/hr_BA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/hr_HR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/hr_HR.dat new file mode 100644 index 0000000..6da5b24 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/hr_HR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/hsb.dat b/venv/lib/python3.7/site-packages/babel/locale-data/hsb.dat new file mode 100644 index 0000000..a2b39e0 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/hsb.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/hsb_DE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/hsb_DE.dat new file mode 100644 index 0000000..2198923 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/hsb_DE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/hu.dat b/venv/lib/python3.7/site-packages/babel/locale-data/hu.dat new file mode 100644 index 0000000..64e8ada Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/hu.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/hu_HU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/hu_HU.dat new file mode 100644 index 0000000..6eb3be1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/hu_HU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/hy.dat b/venv/lib/python3.7/site-packages/babel/locale-data/hy.dat new file mode 100644 index 0000000..5f35ea9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/hy.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/hy_AM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/hy_AM.dat new file mode 100644 index 0000000..edfdbdf Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/hy_AM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ia.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ia.dat new file mode 100644 index 0000000..606198e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ia.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ia_001.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ia_001.dat new file mode 100644 index 0000000..58ffad5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ia_001.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/id.dat b/venv/lib/python3.7/site-packages/babel/locale-data/id.dat new file mode 100644 index 0000000..e0414a7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/id.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/id_ID.dat b/venv/lib/python3.7/site-packages/babel/locale-data/id_ID.dat new file mode 100644 index 0000000..0341a16 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/id_ID.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ig.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ig.dat new file mode 100644 index 0000000..8cfcfc2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ig.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ig_NG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ig_NG.dat new file mode 100644 index 0000000..a8f2e1e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ig_NG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ii.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ii.dat new file mode 100644 index 0000000..7dcbadc Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ii.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ii_CN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ii_CN.dat new file mode 100644 index 0000000..e36c92f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ii_CN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/is.dat b/venv/lib/python3.7/site-packages/babel/locale-data/is.dat new file mode 100644 index 0000000..79c813a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/is.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/is_IS.dat b/venv/lib/python3.7/site-packages/babel/locale-data/is_IS.dat new file mode 100644 index 0000000..d67238a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/is_IS.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/it.dat b/venv/lib/python3.7/site-packages/babel/locale-data/it.dat new file mode 100644 index 0000000..c1f9b3a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/it.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/it_CH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/it_CH.dat new file mode 100644 index 0000000..c1420c8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/it_CH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/it_IT.dat b/venv/lib/python3.7/site-packages/babel/locale-data/it_IT.dat new file mode 100644 index 0000000..14bd7ba Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/it_IT.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/it_SM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/it_SM.dat new file mode 100644 index 0000000..05aeb4a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/it_SM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/it_VA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/it_VA.dat new file mode 100644 index 0000000..a2eddcd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/it_VA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ja.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ja.dat new file mode 100644 index 0000000..85badc8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ja.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ja_JP.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ja_JP.dat new file mode 100644 index 0000000..cdc05d9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ja_JP.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/jgo.dat b/venv/lib/python3.7/site-packages/babel/locale-data/jgo.dat new file mode 100644 index 0000000..95db38d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/jgo.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/jgo_CM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/jgo_CM.dat new file mode 100644 index 0000000..ae2e49c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/jgo_CM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/jmc.dat b/venv/lib/python3.7/site-packages/babel/locale-data/jmc.dat new file mode 100644 index 0000000..76671bc Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/jmc.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/jmc_TZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/jmc_TZ.dat new file mode 100644 index 0000000..d928798 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/jmc_TZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/jv.dat b/venv/lib/python3.7/site-packages/babel/locale-data/jv.dat new file mode 100644 index 0000000..71a9b98 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/jv.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/jv_ID.dat b/venv/lib/python3.7/site-packages/babel/locale-data/jv_ID.dat new file mode 100644 index 0000000..047a0e6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/jv_ID.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ka.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ka.dat new file mode 100644 index 0000000..1257781 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ka.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ka_GE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ka_GE.dat new file mode 100644 index 0000000..5c22f8d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ka_GE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kab.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kab.dat new file mode 100644 index 0000000..9f47600 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kab.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kab_DZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kab_DZ.dat new file mode 100644 index 0000000..e2b2897 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kab_DZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kam.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kam.dat new file mode 100644 index 0000000..b9cd452 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kam.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kam_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kam_KE.dat new file mode 100644 index 0000000..1943086 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kam_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kde.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kde.dat new file mode 100644 index 0000000..3ecb1ca Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kde.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kde_TZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kde_TZ.dat new file mode 100644 index 0000000..d846f2f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kde_TZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kea.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kea.dat new file mode 100644 index 0000000..838bcd1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kea.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kea_CV.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kea_CV.dat new file mode 100644 index 0000000..555a0c6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kea_CV.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/khq.dat b/venv/lib/python3.7/site-packages/babel/locale-data/khq.dat new file mode 100644 index 0000000..15a66eb Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/khq.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/khq_ML.dat b/venv/lib/python3.7/site-packages/babel/locale-data/khq_ML.dat new file mode 100644 index 0000000..148aabd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/khq_ML.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ki.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ki.dat new file mode 100644 index 0000000..77ebee9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ki.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ki_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ki_KE.dat new file mode 100644 index 0000000..dea0308 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ki_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kk.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kk.dat new file mode 100644 index 0000000..ba71b13 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kk.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kk_KZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kk_KZ.dat new file mode 100644 index 0000000..ef71e9e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kk_KZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kkj.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kkj.dat new file mode 100644 index 0000000..b7252d7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kkj.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kkj_CM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kkj_CM.dat new file mode 100644 index 0000000..b5706c6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kkj_CM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kl.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kl.dat new file mode 100644 index 0000000..a1c45cc Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kl.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kl_GL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kl_GL.dat new file mode 100644 index 0000000..9ce15b1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kl_GL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kln.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kln.dat new file mode 100644 index 0000000..9dc3811 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kln.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kln_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kln_KE.dat new file mode 100644 index 0000000..9cf346f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kln_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/km.dat b/venv/lib/python3.7/site-packages/babel/locale-data/km.dat new file mode 100644 index 0000000..58f167f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/km.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/km_KH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/km_KH.dat new file mode 100644 index 0000000..f63fbb3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/km_KH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kn.dat new file mode 100644 index 0000000..d5a0f23 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kn_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kn_IN.dat new file mode 100644 index 0000000..765bcdf Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kn_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ko.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ko.dat new file mode 100644 index 0000000..a9b6115 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ko.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ko_KP.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ko_KP.dat new file mode 100644 index 0000000..eca1743 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ko_KP.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ko_KR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ko_KR.dat new file mode 100644 index 0000000..2ad65d1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ko_KR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kok.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kok.dat new file mode 100644 index 0000000..8d6c7f1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kok.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kok_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kok_IN.dat new file mode 100644 index 0000000..89c2a11 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kok_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ks.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ks.dat new file mode 100644 index 0000000..5ec89a9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ks.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ks_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ks_IN.dat new file mode 100644 index 0000000..333fb48 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ks_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ksb.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ksb.dat new file mode 100644 index 0000000..39ca1f7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ksb.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ksb_TZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ksb_TZ.dat new file mode 100644 index 0000000..30c4c72 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ksb_TZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ksf.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ksf.dat new file mode 100644 index 0000000..b9a865d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ksf.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ksf_CM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ksf_CM.dat new file mode 100644 index 0000000..29e42a2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ksf_CM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ksh.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ksh.dat new file mode 100644 index 0000000..947bd07 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ksh.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ksh_DE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ksh_DE.dat new file mode 100644 index 0000000..46e7985 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ksh_DE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ku.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ku.dat new file mode 100644 index 0000000..2362a69 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ku.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ku_TR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ku_TR.dat new file mode 100644 index 0000000..7011391 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ku_TR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kw.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kw.dat new file mode 100644 index 0000000..31066f5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kw.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/kw_GB.dat b/venv/lib/python3.7/site-packages/babel/locale-data/kw_GB.dat new file mode 100644 index 0000000..1a9261c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/kw_GB.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ky.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ky.dat new file mode 100644 index 0000000..2d32c0c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ky.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ky_KG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ky_KG.dat new file mode 100644 index 0000000..e8f789d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ky_KG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lag.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lag.dat new file mode 100644 index 0000000..627cefe Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lag.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lag_TZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lag_TZ.dat new file mode 100644 index 0000000..3d64c8f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lag_TZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lb.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lb.dat new file mode 100644 index 0000000..c2e1036 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lb.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lb_LU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lb_LU.dat new file mode 100644 index 0000000..c1f3fd6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lb_LU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lg.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lg.dat new file mode 100644 index 0000000..24aedfb Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lg.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lg_UG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lg_UG.dat new file mode 100644 index 0000000..155b632 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lg_UG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lkt.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lkt.dat new file mode 100644 index 0000000..6fb496c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lkt.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lkt_US.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lkt_US.dat new file mode 100644 index 0000000..e131aa4 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lkt_US.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ln.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ln.dat new file mode 100644 index 0000000..9cfac75 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ln.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ln_AO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ln_AO.dat new file mode 100644 index 0000000..7e50779 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ln_AO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ln_CD.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ln_CD.dat new file mode 100644 index 0000000..21691e1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ln_CD.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ln_CF.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ln_CF.dat new file mode 100644 index 0000000..2fcb15b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ln_CF.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ln_CG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ln_CG.dat new file mode 100644 index 0000000..88e8a2e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ln_CG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lo.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lo.dat new file mode 100644 index 0000000..a8ebb47 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lo.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lo_LA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lo_LA.dat new file mode 100644 index 0000000..af0becd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lo_LA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lrc.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lrc.dat new file mode 100644 index 0000000..265420c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lrc.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lrc_IQ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lrc_IQ.dat new file mode 100644 index 0000000..55f1a8b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lrc_IQ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lrc_IR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lrc_IR.dat new file mode 100644 index 0000000..3318856 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lrc_IR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lt.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lt.dat new file mode 100644 index 0000000..8454f3b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lt.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lt_LT.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lt_LT.dat new file mode 100644 index 0000000..3040fe9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lt_LT.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lu.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lu.dat new file mode 100644 index 0000000..621da12 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lu.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lu_CD.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lu_CD.dat new file mode 100644 index 0000000..542c60b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lu_CD.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/luo.dat b/venv/lib/python3.7/site-packages/babel/locale-data/luo.dat new file mode 100644 index 0000000..1ca9e4b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/luo.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/luo_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/luo_KE.dat new file mode 100644 index 0000000..4f438dd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/luo_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/luy.dat b/venv/lib/python3.7/site-packages/babel/locale-data/luy.dat new file mode 100644 index 0000000..105d642 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/luy.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/luy_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/luy_KE.dat new file mode 100644 index 0000000..12e8896 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/luy_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lv.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lv.dat new file mode 100644 index 0000000..c0155aa Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lv.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/lv_LV.dat b/venv/lib/python3.7/site-packages/babel/locale-data/lv_LV.dat new file mode 100644 index 0000000..83e04ec Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/lv_LV.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mas.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mas.dat new file mode 100644 index 0000000..ae30ae7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mas.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mas_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mas_KE.dat new file mode 100644 index 0000000..e7e9c93 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mas_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mas_TZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mas_TZ.dat new file mode 100644 index 0000000..ddd16e8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mas_TZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mer.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mer.dat new file mode 100644 index 0000000..44ddeed Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mer.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mer_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mer_KE.dat new file mode 100644 index 0000000..1d4cec8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mer_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mfe.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mfe.dat new file mode 100644 index 0000000..c3fc17f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mfe.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mfe_MU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mfe_MU.dat new file mode 100644 index 0000000..d87e3ea Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mfe_MU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mg.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mg.dat new file mode 100644 index 0000000..7263726 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mg.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mg_MG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mg_MG.dat new file mode 100644 index 0000000..3b38771 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mg_MG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mgh.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mgh.dat new file mode 100644 index 0000000..56fb62f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mgh.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mgh_MZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mgh_MZ.dat new file mode 100644 index 0000000..525e6d1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mgh_MZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mgo.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mgo.dat new file mode 100644 index 0000000..d206caf Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mgo.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mgo_CM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mgo_CM.dat new file mode 100644 index 0000000..f9269a3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mgo_CM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mi.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mi.dat new file mode 100644 index 0000000..62204c8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mi.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mi_NZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mi_NZ.dat new file mode 100644 index 0000000..33ba8d1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mi_NZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mk.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mk.dat new file mode 100644 index 0000000..3d5e33d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mk.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mk_MK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mk_MK.dat new file mode 100644 index 0000000..974a3b7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mk_MK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ml.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ml.dat new file mode 100644 index 0000000..8f4cfc9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ml.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ml_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ml_IN.dat new file mode 100644 index 0000000..925c10d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ml_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mn.dat new file mode 100644 index 0000000..9027f83 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mn_MN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mn_MN.dat new file mode 100644 index 0000000..c34265b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mn_MN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mr.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mr.dat new file mode 100644 index 0000000..efa1ded Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mr.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mr_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mr_IN.dat new file mode 100644 index 0000000..cd6a8cf Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mr_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ms.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ms.dat new file mode 100644 index 0000000..9f2f02d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ms.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ms_BN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ms_BN.dat new file mode 100644 index 0000000..72f48bd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ms_BN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ms_MY.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ms_MY.dat new file mode 100644 index 0000000..a95be5b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ms_MY.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ms_SG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ms_SG.dat new file mode 100644 index 0000000..9517ef3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ms_SG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mt.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mt.dat new file mode 100644 index 0000000..8c7fe45 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mt.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mt_MT.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mt_MT.dat new file mode 100644 index 0000000..d17b1e4 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mt_MT.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mua.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mua.dat new file mode 100644 index 0000000..12bf4e1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mua.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mua_CM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mua_CM.dat new file mode 100644 index 0000000..6ab7981 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mua_CM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/my.dat b/venv/lib/python3.7/site-packages/babel/locale-data/my.dat new file mode 100644 index 0000000..bda667c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/my.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/my_MM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/my_MM.dat new file mode 100644 index 0000000..90e8b26 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/my_MM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mzn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mzn.dat new file mode 100644 index 0000000..7184c6c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mzn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/mzn_IR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/mzn_IR.dat new file mode 100644 index 0000000..05b93cb Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/mzn_IR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/naq.dat b/venv/lib/python3.7/site-packages/babel/locale-data/naq.dat new file mode 100644 index 0000000..8392db3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/naq.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/naq_NA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/naq_NA.dat new file mode 100644 index 0000000..fbc77b9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/naq_NA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nb.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nb.dat new file mode 100644 index 0000000..7c1f69e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nb.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nb_NO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nb_NO.dat new file mode 100644 index 0000000..30ed114 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nb_NO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nb_SJ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nb_SJ.dat new file mode 100644 index 0000000..69fda37 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nb_SJ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nd.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nd.dat new file mode 100644 index 0000000..50a0c31 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nd.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nd_ZW.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nd_ZW.dat new file mode 100644 index 0000000..30156c5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nd_ZW.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nds.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nds.dat new file mode 100644 index 0000000..603c807 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nds.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nds_DE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nds_DE.dat new file mode 100644 index 0000000..37ae225 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nds_DE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nds_NL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nds_NL.dat new file mode 100644 index 0000000..d0d48d7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nds_NL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ne.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ne.dat new file mode 100644 index 0000000..59c014e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ne.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ne_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ne_IN.dat new file mode 100644 index 0000000..ff01846 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ne_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ne_NP.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ne_NP.dat new file mode 100644 index 0000000..27abb65 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ne_NP.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nl.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nl.dat new file mode 100644 index 0000000..fddf6f3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nl.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nl_AW.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nl_AW.dat new file mode 100644 index 0000000..6c9635e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nl_AW.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nl_BE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nl_BE.dat new file mode 100644 index 0000000..a102b92 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nl_BE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nl_BQ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nl_BQ.dat new file mode 100644 index 0000000..0385169 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nl_BQ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nl_CW.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nl_CW.dat new file mode 100644 index 0000000..4db3854 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nl_CW.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nl_NL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nl_NL.dat new file mode 100644 index 0000000..9772695 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nl_NL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nl_SR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nl_SR.dat new file mode 100644 index 0000000..8b3a239 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nl_SR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nl_SX.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nl_SX.dat new file mode 100644 index 0000000..3269133 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nl_SX.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nmg.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nmg.dat new file mode 100644 index 0000000..60c7bb3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nmg.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nmg_CM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nmg_CM.dat new file mode 100644 index 0000000..39e7af6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nmg_CM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nn.dat new file mode 100644 index 0000000..ce89472 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nn_NO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nn_NO.dat new file mode 100644 index 0000000..4730a30 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nn_NO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nnh.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nnh.dat new file mode 100644 index 0000000..2b437d2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nnh.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nnh_CM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nnh_CM.dat new file mode 100644 index 0000000..80b5c73 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nnh_CM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nus.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nus.dat new file mode 100644 index 0000000..280ec1a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nus.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nus_SS.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nus_SS.dat new file mode 100644 index 0000000..a0845c5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nus_SS.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nyn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nyn.dat new file mode 100644 index 0000000..ef5bda0 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nyn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/nyn_UG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/nyn_UG.dat new file mode 100644 index 0000000..4ada4dc Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/nyn_UG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/om.dat b/venv/lib/python3.7/site-packages/babel/locale-data/om.dat new file mode 100644 index 0000000..b275cff Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/om.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/om_ET.dat b/venv/lib/python3.7/site-packages/babel/locale-data/om_ET.dat new file mode 100644 index 0000000..7e0a1b2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/om_ET.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/om_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/om_KE.dat new file mode 100644 index 0000000..3cd023d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/om_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/or.dat b/venv/lib/python3.7/site-packages/babel/locale-data/or.dat new file mode 100644 index 0000000..f361df2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/or.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/or_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/or_IN.dat new file mode 100644 index 0000000..041e5ca Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/or_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/os.dat b/venv/lib/python3.7/site-packages/babel/locale-data/os.dat new file mode 100644 index 0000000..e1485e1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/os.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/os_GE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/os_GE.dat new file mode 100644 index 0000000..d806181 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/os_GE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/os_RU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/os_RU.dat new file mode 100644 index 0000000..c70bee3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/os_RU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pa.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pa.dat new file mode 100644 index 0000000..97f13f9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pa.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pa_Arab.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pa_Arab.dat new file mode 100644 index 0000000..b12d4ba Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pa_Arab.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pa_Arab_PK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pa_Arab_PK.dat new file mode 100644 index 0000000..81dd396 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pa_Arab_PK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pa_Guru.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pa_Guru.dat new file mode 100644 index 0000000..e431375 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pa_Guru.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pa_Guru_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pa_Guru_IN.dat new file mode 100644 index 0000000..a780474 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pa_Guru_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pl.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pl.dat new file mode 100644 index 0000000..9c7109d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pl.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pl_PL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pl_PL.dat new file mode 100644 index 0000000..a2f42c7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pl_PL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/prg.dat b/venv/lib/python3.7/site-packages/babel/locale-data/prg.dat new file mode 100644 index 0000000..44d3ebd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/prg.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/prg_001.dat b/venv/lib/python3.7/site-packages/babel/locale-data/prg_001.dat new file mode 100644 index 0000000..3fdc635 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/prg_001.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ps.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ps.dat new file mode 100644 index 0000000..1ea7469 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ps.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ps_AF.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ps_AF.dat new file mode 100644 index 0000000..192bcab Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ps_AF.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ps_PK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ps_PK.dat new file mode 100644 index 0000000..2ffc83f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ps_PK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pt.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pt.dat new file mode 100644 index 0000000..329295d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pt.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pt_AO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pt_AO.dat new file mode 100644 index 0000000..3b22b4a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pt_AO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pt_BR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pt_BR.dat new file mode 100644 index 0000000..55a1f4f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pt_BR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pt_CH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pt_CH.dat new file mode 100644 index 0000000..abd0a97 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pt_CH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pt_CV.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pt_CV.dat new file mode 100644 index 0000000..b245776 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pt_CV.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pt_GQ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pt_GQ.dat new file mode 100644 index 0000000..c98db41 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pt_GQ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pt_GW.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pt_GW.dat new file mode 100644 index 0000000..96d9a46 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pt_GW.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pt_LU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pt_LU.dat new file mode 100644 index 0000000..14ef983 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pt_LU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pt_MO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pt_MO.dat new file mode 100644 index 0000000..955dae0 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pt_MO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pt_MZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pt_MZ.dat new file mode 100644 index 0000000..82125fa Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pt_MZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pt_PT.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pt_PT.dat new file mode 100644 index 0000000..0cc9fb2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pt_PT.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pt_ST.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pt_ST.dat new file mode 100644 index 0000000..03340e1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pt_ST.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/pt_TL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/pt_TL.dat new file mode 100644 index 0000000..f6a45d2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/pt_TL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/qu.dat b/venv/lib/python3.7/site-packages/babel/locale-data/qu.dat new file mode 100644 index 0000000..96acf0b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/qu.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/qu_BO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/qu_BO.dat new file mode 100644 index 0000000..396b216 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/qu_BO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/qu_EC.dat b/venv/lib/python3.7/site-packages/babel/locale-data/qu_EC.dat new file mode 100644 index 0000000..77eca95 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/qu_EC.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/qu_PE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/qu_PE.dat new file mode 100644 index 0000000..76fd711 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/qu_PE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/rm.dat b/venv/lib/python3.7/site-packages/babel/locale-data/rm.dat new file mode 100644 index 0000000..d8060af Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/rm.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/rm_CH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/rm_CH.dat new file mode 100644 index 0000000..c46c2c4 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/rm_CH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/rn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/rn.dat new file mode 100644 index 0000000..3ed9f85 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/rn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/rn_BI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/rn_BI.dat new file mode 100644 index 0000000..1d649da Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/rn_BI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ro.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ro.dat new file mode 100644 index 0000000..68dff87 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ro.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ro_MD.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ro_MD.dat new file mode 100644 index 0000000..e745f00 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ro_MD.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ro_RO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ro_RO.dat new file mode 100644 index 0000000..44dd2b2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ro_RO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/rof.dat b/venv/lib/python3.7/site-packages/babel/locale-data/rof.dat new file mode 100644 index 0000000..1f515a3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/rof.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/rof_TZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/rof_TZ.dat new file mode 100644 index 0000000..13bc7f4 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/rof_TZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/root.dat b/venv/lib/python3.7/site-packages/babel/locale-data/root.dat new file mode 100644 index 0000000..5263281 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/root.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ru.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ru.dat new file mode 100644 index 0000000..fee711c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ru.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ru_BY.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ru_BY.dat new file mode 100644 index 0000000..36c6314 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ru_BY.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ru_KG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ru_KG.dat new file mode 100644 index 0000000..50204a8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ru_KG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ru_KZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ru_KZ.dat new file mode 100644 index 0000000..ae30bc0 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ru_KZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ru_MD.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ru_MD.dat new file mode 100644 index 0000000..62ee52d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ru_MD.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ru_RU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ru_RU.dat new file mode 100644 index 0000000..89fdbf7 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ru_RU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ru_UA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ru_UA.dat new file mode 100644 index 0000000..9293d91 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ru_UA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/rw.dat b/venv/lib/python3.7/site-packages/babel/locale-data/rw.dat new file mode 100644 index 0000000..2448e16 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/rw.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/rw_RW.dat b/venv/lib/python3.7/site-packages/babel/locale-data/rw_RW.dat new file mode 100644 index 0000000..5d715f2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/rw_RW.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/rwk.dat b/venv/lib/python3.7/site-packages/babel/locale-data/rwk.dat new file mode 100644 index 0000000..b710240 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/rwk.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/rwk_TZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/rwk_TZ.dat new file mode 100644 index 0000000..dd017ce Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/rwk_TZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sah.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sah.dat new file mode 100644 index 0000000..3a33e76 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sah.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sah_RU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sah_RU.dat new file mode 100644 index 0000000..99b9342 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sah_RU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/saq.dat b/venv/lib/python3.7/site-packages/babel/locale-data/saq.dat new file mode 100644 index 0000000..7a42541 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/saq.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/saq_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/saq_KE.dat new file mode 100644 index 0000000..9a255bd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/saq_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sbp.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sbp.dat new file mode 100644 index 0000000..85ed51c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sbp.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sbp_TZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sbp_TZ.dat new file mode 100644 index 0000000..117fcb6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sbp_TZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sd.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sd.dat new file mode 100644 index 0000000..f2d90f9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sd.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sd_PK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sd_PK.dat new file mode 100644 index 0000000..1edc415 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sd_PK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/se.dat b/venv/lib/python3.7/site-packages/babel/locale-data/se.dat new file mode 100644 index 0000000..a79afe6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/se.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/se_FI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/se_FI.dat new file mode 100644 index 0000000..7b2969c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/se_FI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/se_NO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/se_NO.dat new file mode 100644 index 0000000..2de0d30 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/se_NO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/se_SE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/se_SE.dat new file mode 100644 index 0000000..ab91dbd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/se_SE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/seh.dat b/venv/lib/python3.7/site-packages/babel/locale-data/seh.dat new file mode 100644 index 0000000..0083bc3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/seh.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/seh_MZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/seh_MZ.dat new file mode 100644 index 0000000..53442ff Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/seh_MZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ses.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ses.dat new file mode 100644 index 0000000..645e848 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ses.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ses_ML.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ses_ML.dat new file mode 100644 index 0000000..ff4e931 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ses_ML.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sg.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sg.dat new file mode 100644 index 0000000..1277aca Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sg.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sg_CF.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sg_CF.dat new file mode 100644 index 0000000..ade3002 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sg_CF.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/shi.dat b/venv/lib/python3.7/site-packages/babel/locale-data/shi.dat new file mode 100644 index 0000000..b5694cc Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/shi.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/shi_Latn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/shi_Latn.dat new file mode 100644 index 0000000..f65cf80 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/shi_Latn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/shi_Latn_MA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/shi_Latn_MA.dat new file mode 100644 index 0000000..1073962 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/shi_Latn_MA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/shi_Tfng.dat b/venv/lib/python3.7/site-packages/babel/locale-data/shi_Tfng.dat new file mode 100644 index 0000000..4c0a643 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/shi_Tfng.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/shi_Tfng_MA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/shi_Tfng_MA.dat new file mode 100644 index 0000000..1073962 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/shi_Tfng_MA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/si.dat b/venv/lib/python3.7/site-packages/babel/locale-data/si.dat new file mode 100644 index 0000000..deaafdd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/si.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/si_LK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/si_LK.dat new file mode 100644 index 0000000..4a18b92 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/si_LK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sk.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sk.dat new file mode 100644 index 0000000..c437fab Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sk.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sk_SK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sk_SK.dat new file mode 100644 index 0000000..ec93bbc Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sk_SK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sl.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sl.dat new file mode 100644 index 0000000..22fff79 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sl.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sl_SI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sl_SI.dat new file mode 100644 index 0000000..6986634 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sl_SI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/smn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/smn.dat new file mode 100644 index 0000000..28a8336 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/smn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/smn_FI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/smn_FI.dat new file mode 100644 index 0000000..e862633 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/smn_FI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sn.dat new file mode 100644 index 0000000..69d33a1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sn_ZW.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sn_ZW.dat new file mode 100644 index 0000000..4b32b69 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sn_ZW.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/so.dat b/venv/lib/python3.7/site-packages/babel/locale-data/so.dat new file mode 100644 index 0000000..010107f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/so.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/so_DJ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/so_DJ.dat new file mode 100644 index 0000000..f4700b6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/so_DJ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/so_ET.dat b/venv/lib/python3.7/site-packages/babel/locale-data/so_ET.dat new file mode 100644 index 0000000..acef668 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/so_ET.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/so_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/so_KE.dat new file mode 100644 index 0000000..9122f91 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/so_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/so_SO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/so_SO.dat new file mode 100644 index 0000000..4191a6a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/so_SO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sq.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sq.dat new file mode 100644 index 0000000..f32f612 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sq.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sq_AL.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sq_AL.dat new file mode 100644 index 0000000..c0680b3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sq_AL.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sq_MK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sq_MK.dat new file mode 100644 index 0000000..3ae7f80 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sq_MK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sq_XK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sq_XK.dat new file mode 100644 index 0000000..236227c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sq_XK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sr.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sr.dat new file mode 100644 index 0000000..62cabfb Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sr.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sr_Cyrl.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Cyrl.dat new file mode 100644 index 0000000..b810cd9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Cyrl.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sr_Cyrl_BA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Cyrl_BA.dat new file mode 100644 index 0000000..f51e54e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Cyrl_BA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sr_Cyrl_ME.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Cyrl_ME.dat new file mode 100644 index 0000000..aee3988 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Cyrl_ME.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sr_Cyrl_RS.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Cyrl_RS.dat new file mode 100644 index 0000000..53395f2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Cyrl_RS.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sr_Cyrl_XK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Cyrl_XK.dat new file mode 100644 index 0000000..3448206 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Cyrl_XK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sr_Latn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Latn.dat new file mode 100644 index 0000000..bd64672 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Latn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sr_Latn_BA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Latn_BA.dat new file mode 100644 index 0000000..606a2fa Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Latn_BA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sr_Latn_ME.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Latn_ME.dat new file mode 100644 index 0000000..00c6540 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Latn_ME.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sr_Latn_RS.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Latn_RS.dat new file mode 100644 index 0000000..66a9852 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Latn_RS.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sr_Latn_XK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Latn_XK.dat new file mode 100644 index 0000000..66e6371 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sr_Latn_XK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sv.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sv.dat new file mode 100644 index 0000000..6a04b1f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sv.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sv_AX.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sv_AX.dat new file mode 100644 index 0000000..3ea06d1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sv_AX.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sv_FI.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sv_FI.dat new file mode 100644 index 0000000..1dd845f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sv_FI.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sv_SE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sv_SE.dat new file mode 100644 index 0000000..d9bd8b8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sv_SE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sw.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sw.dat new file mode 100644 index 0000000..104e054 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sw.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sw_CD.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sw_CD.dat new file mode 100644 index 0000000..81e4ac1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sw_CD.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sw_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sw_KE.dat new file mode 100644 index 0000000..188aa0e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sw_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sw_TZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sw_TZ.dat new file mode 100644 index 0000000..738d2f9 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sw_TZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/sw_UG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/sw_UG.dat new file mode 100644 index 0000000..e910e3f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/sw_UG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ta.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ta.dat new file mode 100644 index 0000000..62b5df0 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ta.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ta_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ta_IN.dat new file mode 100644 index 0000000..b49727e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ta_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ta_LK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ta_LK.dat new file mode 100644 index 0000000..8311aad Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ta_LK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ta_MY.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ta_MY.dat new file mode 100644 index 0000000..94526ca Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ta_MY.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ta_SG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ta_SG.dat new file mode 100644 index 0000000..c960f96 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ta_SG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/te.dat b/venv/lib/python3.7/site-packages/babel/locale-data/te.dat new file mode 100644 index 0000000..0dfd39c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/te.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/te_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/te_IN.dat new file mode 100644 index 0000000..4277eda Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/te_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/teo.dat b/venv/lib/python3.7/site-packages/babel/locale-data/teo.dat new file mode 100644 index 0000000..0a78a12 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/teo.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/teo_KE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/teo_KE.dat new file mode 100644 index 0000000..e2cae34 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/teo_KE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/teo_UG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/teo_UG.dat new file mode 100644 index 0000000..4aca272 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/teo_UG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/tg.dat b/venv/lib/python3.7/site-packages/babel/locale-data/tg.dat new file mode 100644 index 0000000..c183809 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/tg.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/tg_TJ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/tg_TJ.dat new file mode 100644 index 0000000..723c733 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/tg_TJ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/th.dat b/venv/lib/python3.7/site-packages/babel/locale-data/th.dat new file mode 100644 index 0000000..6622d40 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/th.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/th_TH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/th_TH.dat new file mode 100644 index 0000000..e67481e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/th_TH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ti.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ti.dat new file mode 100644 index 0000000..59b4979 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ti.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ti_ER.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ti_ER.dat new file mode 100644 index 0000000..b35a22b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ti_ER.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ti_ET.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ti_ET.dat new file mode 100644 index 0000000..ee1ced6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ti_ET.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/tk.dat b/venv/lib/python3.7/site-packages/babel/locale-data/tk.dat new file mode 100644 index 0000000..a7a0487 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/tk.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/tk_TM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/tk_TM.dat new file mode 100644 index 0000000..33f97d1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/tk_TM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/to.dat b/venv/lib/python3.7/site-packages/babel/locale-data/to.dat new file mode 100644 index 0000000..9cf0b4f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/to.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/to_TO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/to_TO.dat new file mode 100644 index 0000000..8910e7c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/to_TO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/tr.dat b/venv/lib/python3.7/site-packages/babel/locale-data/tr.dat new file mode 100644 index 0000000..71d193f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/tr.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/tr_CY.dat b/venv/lib/python3.7/site-packages/babel/locale-data/tr_CY.dat new file mode 100644 index 0000000..7c40942 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/tr_CY.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/tr_TR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/tr_TR.dat new file mode 100644 index 0000000..569d59c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/tr_TR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/tt.dat b/venv/lib/python3.7/site-packages/babel/locale-data/tt.dat new file mode 100644 index 0000000..db348ac Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/tt.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/tt_RU.dat b/venv/lib/python3.7/site-packages/babel/locale-data/tt_RU.dat new file mode 100644 index 0000000..bd626ad Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/tt_RU.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/twq.dat b/venv/lib/python3.7/site-packages/babel/locale-data/twq.dat new file mode 100644 index 0000000..e403edd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/twq.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/twq_NE.dat b/venv/lib/python3.7/site-packages/babel/locale-data/twq_NE.dat new file mode 100644 index 0000000..1f5c4f6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/twq_NE.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/tzm.dat b/venv/lib/python3.7/site-packages/babel/locale-data/tzm.dat new file mode 100644 index 0000000..0d1538a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/tzm.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/tzm_MA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/tzm_MA.dat new file mode 100644 index 0000000..55b3d11 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/tzm_MA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ug.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ug.dat new file mode 100644 index 0000000..135279d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ug.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ug_CN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ug_CN.dat new file mode 100644 index 0000000..69a52a6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ug_CN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/uk.dat b/venv/lib/python3.7/site-packages/babel/locale-data/uk.dat new file mode 100644 index 0000000..2ed70f6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/uk.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/uk_UA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/uk_UA.dat new file mode 100644 index 0000000..9d19348 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/uk_UA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ur.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ur.dat new file mode 100644 index 0000000..6c1bee3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ur.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ur_IN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ur_IN.dat new file mode 100644 index 0000000..db7a827 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ur_IN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/ur_PK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/ur_PK.dat new file mode 100644 index 0000000..d0a15e0 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/ur_PK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/uz.dat b/venv/lib/python3.7/site-packages/babel/locale-data/uz.dat new file mode 100644 index 0000000..65cc8fb Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/uz.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/uz_Arab.dat b/venv/lib/python3.7/site-packages/babel/locale-data/uz_Arab.dat new file mode 100644 index 0000000..da46271 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/uz_Arab.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/uz_Arab_AF.dat b/venv/lib/python3.7/site-packages/babel/locale-data/uz_Arab_AF.dat new file mode 100644 index 0000000..d87c70a Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/uz_Arab_AF.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/uz_Cyrl.dat b/venv/lib/python3.7/site-packages/babel/locale-data/uz_Cyrl.dat new file mode 100644 index 0000000..ff9341e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/uz_Cyrl.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/uz_Cyrl_UZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/uz_Cyrl_UZ.dat new file mode 100644 index 0000000..ec69b3b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/uz_Cyrl_UZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/uz_Latn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/uz_Latn.dat new file mode 100644 index 0000000..25c6f44 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/uz_Latn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/uz_Latn_UZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/uz_Latn_UZ.dat new file mode 100644 index 0000000..9a501d5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/uz_Latn_UZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/vai.dat b/venv/lib/python3.7/site-packages/babel/locale-data/vai.dat new file mode 100644 index 0000000..e0742aa Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/vai.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/vai_Latn.dat b/venv/lib/python3.7/site-packages/babel/locale-data/vai_Latn.dat new file mode 100644 index 0000000..0c066f2 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/vai_Latn.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/vai_Latn_LR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/vai_Latn_LR.dat new file mode 100644 index 0000000..711221d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/vai_Latn_LR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/vai_Vaii.dat b/venv/lib/python3.7/site-packages/babel/locale-data/vai_Vaii.dat new file mode 100644 index 0000000..2bcd9ff Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/vai_Vaii.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/vai_Vaii_LR.dat b/venv/lib/python3.7/site-packages/babel/locale-data/vai_Vaii_LR.dat new file mode 100644 index 0000000..711221d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/vai_Vaii_LR.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/vi.dat b/venv/lib/python3.7/site-packages/babel/locale-data/vi.dat new file mode 100644 index 0000000..9d17320 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/vi.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/vi_VN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/vi_VN.dat new file mode 100644 index 0000000..bbb27ca Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/vi_VN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/vo.dat b/venv/lib/python3.7/site-packages/babel/locale-data/vo.dat new file mode 100644 index 0000000..b49b93f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/vo.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/vo_001.dat b/venv/lib/python3.7/site-packages/babel/locale-data/vo_001.dat new file mode 100644 index 0000000..d4d189d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/vo_001.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/vun.dat b/venv/lib/python3.7/site-packages/babel/locale-data/vun.dat new file mode 100644 index 0000000..68621b3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/vun.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/vun_TZ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/vun_TZ.dat new file mode 100644 index 0000000..dbdd1f1 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/vun_TZ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/wae.dat b/venv/lib/python3.7/site-packages/babel/locale-data/wae.dat new file mode 100644 index 0000000..0b00667 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/wae.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/wae_CH.dat b/venv/lib/python3.7/site-packages/babel/locale-data/wae_CH.dat new file mode 100644 index 0000000..dd61695 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/wae_CH.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/wo.dat b/venv/lib/python3.7/site-packages/babel/locale-data/wo.dat new file mode 100644 index 0000000..0e76352 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/wo.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/wo_SN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/wo_SN.dat new file mode 100644 index 0000000..790f0d6 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/wo_SN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/xh.dat b/venv/lib/python3.7/site-packages/babel/locale-data/xh.dat new file mode 100644 index 0000000..8066241 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/xh.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/xh_ZA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/xh_ZA.dat new file mode 100644 index 0000000..f2654e8 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/xh_ZA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/xog.dat b/venv/lib/python3.7/site-packages/babel/locale-data/xog.dat new file mode 100644 index 0000000..982a413 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/xog.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/xog_UG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/xog_UG.dat new file mode 100644 index 0000000..d445f5d Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/xog_UG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/yav.dat b/venv/lib/python3.7/site-packages/babel/locale-data/yav.dat new file mode 100644 index 0000000..900801c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/yav.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/yav_CM.dat b/venv/lib/python3.7/site-packages/babel/locale-data/yav_CM.dat new file mode 100644 index 0000000..f651603 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/yav_CM.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/yi.dat b/venv/lib/python3.7/site-packages/babel/locale-data/yi.dat new file mode 100644 index 0000000..21dfbbe Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/yi.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/yi_001.dat b/venv/lib/python3.7/site-packages/babel/locale-data/yi_001.dat new file mode 100644 index 0000000..e708489 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/yi_001.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/yo.dat b/venv/lib/python3.7/site-packages/babel/locale-data/yo.dat new file mode 100644 index 0000000..925ed4c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/yo.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/yo_BJ.dat b/venv/lib/python3.7/site-packages/babel/locale-data/yo_BJ.dat new file mode 100644 index 0000000..b2dfb94 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/yo_BJ.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/yo_NG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/yo_NG.dat new file mode 100644 index 0000000..33d975c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/yo_NG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/yue.dat b/venv/lib/python3.7/site-packages/babel/locale-data/yue.dat new file mode 100644 index 0000000..0145d26 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/yue.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/yue_Hans.dat b/venv/lib/python3.7/site-packages/babel/locale-data/yue_Hans.dat new file mode 100644 index 0000000..218c19b Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/yue_Hans.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/yue_Hans_CN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/yue_Hans_CN.dat new file mode 100644 index 0000000..d21bf88 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/yue_Hans_CN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/yue_Hant.dat b/venv/lib/python3.7/site-packages/babel/locale-data/yue_Hant.dat new file mode 100644 index 0000000..5cbcda5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/yue_Hant.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/yue_Hant_HK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/yue_Hant_HK.dat new file mode 100644 index 0000000..97333e3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/yue_Hant_HK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/zgh.dat b/venv/lib/python3.7/site-packages/babel/locale-data/zgh.dat new file mode 100644 index 0000000..963ba8f Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/zgh.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/zgh_MA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/zgh_MA.dat new file mode 100644 index 0000000..cee3f29 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/zgh_MA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/zh.dat b/venv/lib/python3.7/site-packages/babel/locale-data/zh.dat new file mode 100644 index 0000000..cb52d1e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/zh.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hans.dat b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hans.dat new file mode 100644 index 0000000..51ccaae Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hans.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hans_CN.dat b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hans_CN.dat new file mode 100644 index 0000000..e7ea9ee Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hans_CN.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hans_HK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hans_HK.dat new file mode 100644 index 0000000..dcca9b3 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hans_HK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hans_MO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hans_MO.dat new file mode 100644 index 0000000..743f987 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hans_MO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hans_SG.dat b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hans_SG.dat new file mode 100644 index 0000000..91d72c5 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hans_SG.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hant.dat b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hant.dat new file mode 100644 index 0000000..63f3b72 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hant.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hant_HK.dat b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hant_HK.dat new file mode 100644 index 0000000..2b6ae0c Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hant_HK.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hant_MO.dat b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hant_MO.dat new file mode 100644 index 0000000..ce036cd Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hant_MO.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hant_TW.dat b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hant_TW.dat new file mode 100644 index 0000000..be9cd1e Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/zh_Hant_TW.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/zu.dat b/venv/lib/python3.7/site-packages/babel/locale-data/zu.dat new file mode 100644 index 0000000..24a8517 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/zu.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/locale-data/zu_ZA.dat b/venv/lib/python3.7/site-packages/babel/locale-data/zu_ZA.dat new file mode 100644 index 0000000..70bc467 Binary files /dev/null and b/venv/lib/python3.7/site-packages/babel/locale-data/zu_ZA.dat differ diff --git a/venv/lib/python3.7/site-packages/babel/localedata.py b/venv/lib/python3.7/site-packages/babel/localedata.py new file mode 100644 index 0000000..e012abb --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/localedata.py @@ -0,0 +1,238 @@ +# -*- coding: utf-8 -*- +""" + babel.localedata + ~~~~~~~~~~~~~~~~ + + Low-level locale data access. + + :note: The `Locale` class, which uses this module under the hood, provides a + more convenient interface for accessing the locale data. + + :copyright: (c) 2013-2019 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +import os +import threading +from itertools import chain + +from babel._compat import pickle, string_types, abc + + +_cache = {} +_cache_lock = threading.RLock() +_dirname = os.path.join(os.path.dirname(__file__), 'locale-data') + + +def normalize_locale(name): + """Normalize a locale ID by stripping spaces and apply proper casing. + + Returns the normalized locale ID string or `None` if the ID is not + recognized. + """ + if not name or not isinstance(name, string_types): + return None + name = name.strip().lower() + for locale_id in chain.from_iterable([_cache, locale_identifiers()]): + if name == locale_id.lower(): + return locale_id + + +def exists(name): + """Check whether locale data is available for the given locale. + + Returns `True` if it exists, `False` otherwise. + + :param name: the locale identifier string + """ + if not name or not isinstance(name, string_types): + return False + if name in _cache: + return True + file_found = os.path.exists(os.path.join(_dirname, '%s.dat' % name)) + return True if file_found else bool(normalize_locale(name)) + + +def locale_identifiers(): + """Return a list of all locale identifiers for which locale data is + available. + + This data is cached after the first invocation in `locale_identifiers.cache`. + + Removing the `locale_identifiers.cache` attribute or setting it to `None` + will cause this function to re-read the list from disk. + + .. versionadded:: 0.8.1 + + :return: a list of locale identifiers (strings) + """ + data = getattr(locale_identifiers, 'cache', None) + if data is None: + locale_identifiers.cache = data = [ + stem + for stem, extension in + (os.path.splitext(filename) for filename in os.listdir(_dirname)) + if extension == '.dat' and stem != 'root' + ] + return data + + +def load(name, merge_inherited=True): + """Load the locale data for the given locale. + + The locale data is a dictionary that contains much of the data defined by + the Common Locale Data Repository (CLDR). This data is stored as a + collection of pickle files inside the ``babel`` package. + + >>> d = load('en_US') + >>> d['languages']['sv'] + u'Swedish' + + Note that the results are cached, and subsequent requests for the same + locale return the same dictionary: + + >>> d1 = load('en_US') + >>> d2 = load('en_US') + >>> d1 is d2 + True + + :param name: the locale identifier string (or "root") + :param merge_inherited: whether the inherited data should be merged into + the data of the requested locale + :raise `IOError`: if no locale data file is found for the given locale + identifer, or one of the locales it inherits from + """ + _cache_lock.acquire() + try: + data = _cache.get(name) + if not data: + # Load inherited data + if name == 'root' or not merge_inherited: + data = {} + else: + from babel.core import get_global + parent = get_global('parent_exceptions').get(name) + if not parent: + parts = name.split('_') + if len(parts) == 1: + parent = 'root' + else: + parent = '_'.join(parts[:-1]) + data = load(parent).copy() + filename = os.path.join(_dirname, '%s.dat' % name) + with open(filename, 'rb') as fileobj: + if name != 'root' and merge_inherited: + merge(data, pickle.load(fileobj)) + else: + data = pickle.load(fileobj) + _cache[name] = data + return data + finally: + _cache_lock.release() + + +def merge(dict1, dict2): + """Merge the data from `dict2` into the `dict1` dictionary, making copies + of nested dictionaries. + + >>> d = {1: 'foo', 3: 'baz'} + >>> merge(d, {1: 'Foo', 2: 'Bar'}) + >>> sorted(d.items()) + [(1, 'Foo'), (2, 'Bar'), (3, 'baz')] + + :param dict1: the dictionary to merge into + :param dict2: the dictionary containing the data that should be merged + """ + for key, val2 in dict2.items(): + if val2 is not None: + val1 = dict1.get(key) + if isinstance(val2, dict): + if val1 is None: + val1 = {} + if isinstance(val1, Alias): + val1 = (val1, val2) + elif isinstance(val1, tuple): + alias, others = val1 + others = others.copy() + merge(others, val2) + val1 = (alias, others) + else: + val1 = val1.copy() + merge(val1, val2) + else: + val1 = val2 + dict1[key] = val1 + + +class Alias(object): + """Representation of an alias in the locale data. + + An alias is a value that refers to some other part of the locale data, + as specified by the `keys`. + """ + + def __init__(self, keys): + self.keys = tuple(keys) + + def __repr__(self): + return '<%s %r>' % (type(self).__name__, self.keys) + + def resolve(self, data): + """Resolve the alias based on the given data. + + This is done recursively, so if one alias resolves to a second alias, + that second alias will also be resolved. + + :param data: the locale data + :type data: `dict` + """ + base = data + for key in self.keys: + data = data[key] + if isinstance(data, Alias): + data = data.resolve(base) + elif isinstance(data, tuple): + alias, others = data + data = alias.resolve(base) + return data + + +class LocaleDataDict(abc.MutableMapping): + """Dictionary wrapper that automatically resolves aliases to the actual + values. + """ + + def __init__(self, data, base=None): + self._data = data + if base is None: + base = data + self.base = base + + def __len__(self): + return len(self._data) + + def __iter__(self): + return iter(self._data) + + def __getitem__(self, key): + orig = val = self._data[key] + if isinstance(val, Alias): # resolve an alias + val = val.resolve(self.base) + if isinstance(val, tuple): # Merge a partial dict with an alias + alias, others = val + val = alias.resolve(self.base).copy() + merge(val, others) + if type(val) is dict: # Return a nested alias-resolving dict + val = LocaleDataDict(val, base=self.base) + if val is not orig: + self._data[key] = val + return val + + def __setitem__(self, key, value): + self._data[key] = value + + def __delitem__(self, key): + del self._data[key] + + def copy(self): + return LocaleDataDict(self._data.copy(), base=self.base) diff --git a/venv/lib/python3.7/site-packages/babel/localtime/__init__.py b/venv/lib/python3.7/site-packages/babel/localtime/__init__.py new file mode 100644 index 0000000..aefd8a3 --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/localtime/__init__.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +""" + babel.localtime + ~~~~~~~~~~~~~~~ + + Babel specific fork of tzlocal to determine the local timezone + of the system. + + :copyright: (c) 2013-2019 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +import sys +import pytz +import time +from datetime import timedelta +from datetime import tzinfo +from threading import RLock + +if sys.platform == 'win32': + from babel.localtime._win32 import _get_localzone +else: + from babel.localtime._unix import _get_localzone + + +_cached_tz = None +_cache_lock = RLock() + +STDOFFSET = timedelta(seconds=-time.timezone) +if time.daylight: + DSTOFFSET = timedelta(seconds=-time.altzone) +else: + DSTOFFSET = STDOFFSET + +DSTDIFF = DSTOFFSET - STDOFFSET +ZERO = timedelta(0) + + +class _FallbackLocalTimezone(tzinfo): + + def utcoffset(self, dt): + if self._isdst(dt): + return DSTOFFSET + else: + return STDOFFSET + + def dst(self, dt): + if self._isdst(dt): + return DSTDIFF + else: + return ZERO + + def tzname(self, dt): + return time.tzname[self._isdst(dt)] + + def _isdst(self, dt): + tt = (dt.year, dt.month, dt.day, + dt.hour, dt.minute, dt.second, + dt.weekday(), 0, -1) + stamp = time.mktime(tt) + tt = time.localtime(stamp) + return tt.tm_isdst > 0 + + +def get_localzone(): + """Returns the current underlying local timezone object. + Generally this function does not need to be used, it's a + better idea to use the :data:`LOCALTZ` singleton instead. + """ + return _get_localzone() + + +try: + LOCALTZ = get_localzone() +except pytz.UnknownTimeZoneError: + LOCALTZ = _FallbackLocalTimezone() diff --git a/venv/lib/python3.7/site-packages/babel/localtime/_unix.py b/venv/lib/python3.7/site-packages/babel/localtime/_unix.py new file mode 100644 index 0000000..c219469 --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/localtime/_unix.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- +from __future__ import with_statement +import os +import re +import sys +import pytz +import subprocess + +_systemconfig_tz = re.compile(r'^Time Zone: (.*)$', re.MULTILINE) + + +def _tz_from_env(tzenv): + if tzenv[0] == ':': + tzenv = tzenv[1:] + + # TZ specifies a file + if os.path.exists(tzenv): + with open(tzenv, 'rb') as tzfile: + return pytz.tzfile.build_tzinfo('local', tzfile) + + # TZ specifies a zoneinfo zone. + try: + tz = pytz.timezone(tzenv) + # That worked, so we return this: + return tz + except pytz.UnknownTimeZoneError: + raise pytz.UnknownTimeZoneError( + "tzlocal() does not support non-zoneinfo timezones like %s. \n" + "Please use a timezone in the form of Continent/City") + + +def _get_localzone(_root='/'): + """Tries to find the local timezone configuration. + This method prefers finding the timezone name and passing that to pytz, + over passing in the localtime file, as in the later case the zoneinfo + name is unknown. + The parameter _root makes the function look for files like /etc/localtime + beneath the _root directory. This is primarily used by the tests. + In normal usage you call the function without parameters. + """ + + tzenv = os.environ.get('TZ') + if tzenv: + return _tz_from_env(tzenv) + + # This is actually a pretty reliable way to test for the local time + # zone on operating systems like OS X. On OS X especially this is the + # only one that actually works. + try: + link_dst = os.readlink('/etc/localtime') + except OSError: + pass + else: + pos = link_dst.find('/zoneinfo/') + if pos >= 0: + zone_name = link_dst[pos + 10:] + try: + return pytz.timezone(zone_name) + except pytz.UnknownTimeZoneError: + pass + + # If we are on OS X now we are pretty sure that the rest of the + # code will fail and just fall through until it hits the reading + # of /etc/localtime and using it without name. At this point we + # can invoke systemconfig which internally invokes ICU. ICU itself + # does the same thing we do (readlink + compare file contents) but + # since it knows where the zone files are that should be a bit + # better than reimplementing the logic here. + if sys.platform == 'darwin': + c = subprocess.Popen(['systemsetup', '-gettimezone'], + stdout=subprocess.PIPE) + sys_result = c.communicate()[0] + c.wait() + tz_match = _systemconfig_tz.search(sys_result) + if tz_match is not None: + zone_name = tz_match.group(1) + try: + return pytz.timezone(zone_name) + except pytz.UnknownTimeZoneError: + pass + + # Now look for distribution specific configuration files + # that contain the timezone name. + tzpath = os.path.join(_root, 'etc/timezone') + if os.path.exists(tzpath): + with open(tzpath, 'rb') as tzfile: + data = tzfile.read() + + # Issue #3 in tzlocal was that /etc/timezone was a zoneinfo file. + # That's a misconfiguration, but we need to handle it gracefully: + if data[:5] != b'TZif2': + etctz = data.strip().decode() + # Get rid of host definitions and comments: + if ' ' in etctz: + etctz, dummy = etctz.split(' ', 1) + if '#' in etctz: + etctz, dummy = etctz.split('#', 1) + return pytz.timezone(etctz.replace(' ', '_')) + + # CentOS has a ZONE setting in /etc/sysconfig/clock, + # OpenSUSE has a TIMEZONE setting in /etc/sysconfig/clock and + # Gentoo has a TIMEZONE setting in /etc/conf.d/clock + # We look through these files for a timezone: + timezone_re = re.compile(r'\s*(TIME)?ZONE\s*=\s*"(?P.+)"') + + for filename in ('etc/sysconfig/clock', 'etc/conf.d/clock'): + tzpath = os.path.join(_root, filename) + if not os.path.exists(tzpath): + continue + with open(tzpath, 'rt') as tzfile: + for line in tzfile: + match = timezone_re.match(line) + if match is not None: + # We found a timezone + etctz = match.group("etctz") + return pytz.timezone(etctz.replace(' ', '_')) + + # No explicit setting existed. Use localtime + for filename in ('etc/localtime', 'usr/local/etc/localtime'): + tzpath = os.path.join(_root, filename) + + if not os.path.exists(tzpath): + continue + + with open(tzpath, 'rb') as tzfile: + return pytz.tzfile.build_tzinfo('local', tzfile) + + raise pytz.UnknownTimeZoneError('Can not find any timezone configuration') diff --git a/venv/lib/python3.7/site-packages/babel/localtime/_win32.py b/venv/lib/python3.7/site-packages/babel/localtime/_win32.py new file mode 100644 index 0000000..65cc088 --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/localtime/_win32.py @@ -0,0 +1,96 @@ +try: + import _winreg as winreg +except ImportError: + try: + import winreg + except ImportError: + winreg = None + +from babel.core import get_global +import pytz + + +# When building the cldr data on windows this module gets imported. +# Because at that point there is no global.dat yet this call will +# fail. We want to catch it down in that case then and just assume +# the mapping was empty. +try: + tz_names = get_global('windows_zone_mapping') +except RuntimeError: + tz_names = {} + + +def valuestodict(key): + """Convert a registry key's values to a dictionary.""" + dict = {} + size = winreg.QueryInfoKey(key)[1] + for i in range(size): + data = winreg.EnumValue(key, i) + dict[data[0]] = data[1] + return dict + + +def get_localzone_name(): + # Windows is special. It has unique time zone names (in several + # meanings of the word) available, but unfortunately, they can be + # translated to the language of the operating system, so we need to + # do a backwards lookup, by going through all time zones and see which + # one matches. + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + + TZLOCALKEYNAME = r'SYSTEM\CurrentControlSet\Control\TimeZoneInformation' + localtz = winreg.OpenKey(handle, TZLOCALKEYNAME) + keyvalues = valuestodict(localtz) + localtz.Close() + if 'TimeZoneKeyName' in keyvalues: + # Windows 7 (and Vista?) + + # For some reason this returns a string with loads of NUL bytes at + # least on some systems. I don't know if this is a bug somewhere, I + # just work around it. + tzkeyname = keyvalues['TimeZoneKeyName'].split('\x00', 1)[0] + else: + # Windows 2000 or XP + + # This is the localized name: + tzwin = keyvalues['StandardName'] + + # Open the list of timezones to look up the real name: + TZKEYNAME = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones' + tzkey = winreg.OpenKey(handle, TZKEYNAME) + + # Now, match this value to Time Zone information + tzkeyname = None + for i in range(winreg.QueryInfoKey(tzkey)[0]): + subkey = winreg.EnumKey(tzkey, i) + sub = winreg.OpenKey(tzkey, subkey) + data = valuestodict(sub) + sub.Close() + if data.get('Std', None) == tzwin: + tzkeyname = subkey + break + + tzkey.Close() + handle.Close() + + if tzkeyname is None: + raise LookupError('Can not find Windows timezone configuration') + + timezone = tz_names.get(tzkeyname) + if timezone is None: + # Nope, that didn't work. Try adding 'Standard Time', + # it seems to work a lot of times: + timezone = tz_names.get(tzkeyname + ' Standard Time') + + # Return what we have. + if timezone is None: + raise pytz.UnknownTimeZoneError('Can not find timezone ' + tzkeyname) + + return timezone + + +def _get_localzone(): + if winreg is None: + raise pytz.UnknownTimeZoneError( + 'Runtime support not available') + return pytz.timezone(get_localzone_name()) diff --git a/venv/lib/python3.7/site-packages/babel/messages/__init__.py b/venv/lib/python3.7/site-packages/babel/messages/__init__.py new file mode 100644 index 0000000..5b69675 --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/messages/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" + babel.messages + ~~~~~~~~~~~~~~ + + Support for ``gettext`` message catalogs. + + :copyright: (c) 2013-2019 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +from babel.messages.catalog import * diff --git a/venv/lib/python3.7/site-packages/babel/messages/catalog.py b/venv/lib/python3.7/site-packages/babel/messages/catalog.py new file mode 100644 index 0000000..2fcb461 --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/messages/catalog.py @@ -0,0 +1,851 @@ +# -*- coding: utf-8 -*- +""" + babel.messages.catalog + ~~~~~~~~~~~~~~~~~~~~~~ + + Data structures for message catalogs. + + :copyright: (c) 2013-2019 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +import re +import time + +from cgi import parse_header +from collections import OrderedDict +from datetime import datetime, time as time_ +from difflib import get_close_matches +from email import message_from_string +from copy import copy + +from babel import __version__ as VERSION +from babel.core import Locale, UnknownLocaleError +from babel.dates import format_datetime +from babel.messages.plurals import get_plural +from babel.util import distinct, LOCALTZ, FixedOffsetTimezone +from babel._compat import string_types, number_types, PY2, cmp, text_type, force_text + +__all__ = ['Message', 'Catalog', 'TranslationError'] + + +PYTHON_FORMAT = re.compile(r''' + \% + (?:\(([\w]*)\))? + ( + [-#0\ +]?(?:\*|[\d]+)? + (?:\.(?:\*|[\d]+))? + [hlL]? + ) + ([diouxXeEfFgGcrs%]) +''', re.VERBOSE) + + +def _parse_datetime_header(value): + match = re.match(r'^(?P.*?)(?P[+-]\d{4})?$', value) + + tt = time.strptime(match.group('datetime'), '%Y-%m-%d %H:%M') + ts = time.mktime(tt) + dt = datetime.fromtimestamp(ts) + + # Separate the offset into a sign component, hours, and # minutes + tzoffset = match.group('tzoffset') + if tzoffset is not None: + plus_minus_s, rest = tzoffset[0], tzoffset[1:] + hours_offset_s, mins_offset_s = rest[:2], rest[2:] + + # Make them all integers + plus_minus = int(plus_minus_s + '1') + hours_offset = int(hours_offset_s) + mins_offset = int(mins_offset_s) + + # Calculate net offset + net_mins_offset = hours_offset * 60 + net_mins_offset += mins_offset + net_mins_offset *= plus_minus + + # Create an offset object + tzoffset = FixedOffsetTimezone(net_mins_offset) + + # Store the offset in a datetime object + dt = dt.replace(tzinfo=tzoffset) + + return dt + + +class Message(object): + """Representation of a single message in a catalog.""" + + def __init__(self, id, string=u'', locations=(), flags=(), auto_comments=(), + user_comments=(), previous_id=(), lineno=None, context=None): + """Create the message object. + + :param id: the message ID, or a ``(singular, plural)`` tuple for + pluralizable messages + :param string: the translated message string, or a + ``(singular, plural)`` tuple for pluralizable messages + :param locations: a sequence of ``(filename, lineno)`` tuples + :param flags: a set or sequence of flags + :param auto_comments: a sequence of automatic comments for the message + :param user_comments: a sequence of user comments for the message + :param previous_id: the previous message ID, or a ``(singular, plural)`` + tuple for pluralizable messages + :param lineno: the line number on which the msgid line was found in the + PO file, if any + :param context: the message context + """ + self.id = id + if not string and self.pluralizable: + string = (u'', u'') + self.string = string + self.locations = list(distinct(locations)) + self.flags = set(flags) + if id and self.python_format: + self.flags.add('python-format') + else: + self.flags.discard('python-format') + self.auto_comments = list(distinct(auto_comments)) + self.user_comments = list(distinct(user_comments)) + if isinstance(previous_id, string_types): + self.previous_id = [previous_id] + else: + self.previous_id = list(previous_id) + self.lineno = lineno + self.context = context + + def __repr__(self): + return '<%s %r (flags: %r)>' % (type(self).__name__, self.id, + list(self.flags)) + + def __cmp__(self, other): + """Compare Messages, taking into account plural ids""" + def values_to_compare(obj): + if isinstance(obj, Message) and obj.pluralizable: + return obj.id[0], obj.context or '' + return obj.id, obj.context or '' + return cmp(values_to_compare(self), values_to_compare(other)) + + def __gt__(self, other): + return self.__cmp__(other) > 0 + + def __lt__(self, other): + return self.__cmp__(other) < 0 + + def __ge__(self, other): + return self.__cmp__(other) >= 0 + + def __le__(self, other): + return self.__cmp__(other) <= 0 + + def __eq__(self, other): + return self.__cmp__(other) == 0 + + def __ne__(self, other): + return self.__cmp__(other) != 0 + + def clone(self): + return Message(*map(copy, (self.id, self.string, self.locations, + self.flags, self.auto_comments, + self.user_comments, self.previous_id, + self.lineno, self.context))) + + def check(self, catalog=None): + """Run various validation checks on the message. Some validations + are only performed if the catalog is provided. This method returns + a sequence of `TranslationError` objects. + + :rtype: ``iterator`` + :param catalog: A catalog instance that is passed to the checkers + :see: `Catalog.check` for a way to perform checks for all messages + in a catalog. + """ + from babel.messages.checkers import checkers + errors = [] + for checker in checkers: + try: + checker(catalog, self) + except TranslationError as e: + errors.append(e) + return errors + + @property + def fuzzy(self): + """Whether the translation is fuzzy. + + >>> Message('foo').fuzzy + False + >>> msg = Message('foo', 'foo', flags=['fuzzy']) + >>> msg.fuzzy + True + >>> msg + + + :type: `bool`""" + return 'fuzzy' in self.flags + + @property + def pluralizable(self): + """Whether the message is plurizable. + + >>> Message('foo').pluralizable + False + >>> Message(('foo', 'bar')).pluralizable + True + + :type: `bool`""" + return isinstance(self.id, (list, tuple)) + + @property + def python_format(self): + """Whether the message contains Python-style parameters. + + >>> Message('foo %(name)s bar').python_format + True + >>> Message(('foo %(name)s', 'foo %(name)s')).python_format + True + + :type: `bool`""" + ids = self.id + if not isinstance(ids, (list, tuple)): + ids = [ids] + return any(PYTHON_FORMAT.search(id) for id in ids) + + +class TranslationError(Exception): + """Exception thrown by translation checkers when invalid message + translations are encountered.""" + + +DEFAULT_HEADER = u"""\ +# Translations template for PROJECT. +# Copyright (C) YEAR ORGANIZATION +# This file is distributed under the same license as the PROJECT project. +# FIRST AUTHOR , YEAR. +#""" + + +if PY2: + def _parse_header(header_string): + # message_from_string only works for str, not for unicode + headers = message_from_string(header_string.encode('utf8')) + decoded_headers = {} + for name, value in headers.items(): + name = name.decode('utf8') + value = value.decode('utf8') + decoded_headers[name] = value + return decoded_headers + +else: + _parse_header = message_from_string + + +class Catalog(object): + """Representation of a message catalog.""" + + def __init__(self, locale=None, domain=None, header_comment=DEFAULT_HEADER, + project=None, version=None, copyright_holder=None, + msgid_bugs_address=None, creation_date=None, + revision_date=None, last_translator=None, language_team=None, + charset=None, fuzzy=True): + """Initialize the catalog object. + + :param locale: the locale identifier or `Locale` object, or `None` + if the catalog is not bound to a locale (which basically + means it's a template) + :param domain: the message domain + :param header_comment: the header comment as string, or `None` for the + default header + :param project: the project's name + :param version: the project's version + :param copyright_holder: the copyright holder of the catalog + :param msgid_bugs_address: the email address or URL to submit bug + reports to + :param creation_date: the date the catalog was created + :param revision_date: the date the catalog was revised + :param last_translator: the name and email of the last translator + :param language_team: the name and email of the language team + :param charset: the encoding to use in the output (defaults to utf-8) + :param fuzzy: the fuzzy bit on the catalog header + """ + self.domain = domain + self.locale = locale + self._header_comment = header_comment + self._messages = OrderedDict() + + self.project = project or 'PROJECT' + self.version = version or 'VERSION' + self.copyright_holder = copyright_holder or 'ORGANIZATION' + self.msgid_bugs_address = msgid_bugs_address or 'EMAIL@ADDRESS' + + self.last_translator = last_translator or 'FULL NAME ' + """Name and email address of the last translator.""" + self.language_team = language_team or 'LANGUAGE ' + """Name and email address of the language team.""" + + self.charset = charset or 'utf-8' + + if creation_date is None: + creation_date = datetime.now(LOCALTZ) + elif isinstance(creation_date, datetime) and not creation_date.tzinfo: + creation_date = creation_date.replace(tzinfo=LOCALTZ) + self.creation_date = creation_date + if revision_date is None: + revision_date = 'YEAR-MO-DA HO:MI+ZONE' + elif isinstance(revision_date, datetime) and not revision_date.tzinfo: + revision_date = revision_date.replace(tzinfo=LOCALTZ) + self.revision_date = revision_date + self.fuzzy = fuzzy + + self.obsolete = OrderedDict() # Dictionary of obsolete messages + self._num_plurals = None + self._plural_expr = None + + def _set_locale(self, locale): + if locale is None: + self._locale_identifier = None + self._locale = None + return + + if isinstance(locale, Locale): + self._locale_identifier = text_type(locale) + self._locale = locale + return + + if isinstance(locale, string_types): + self._locale_identifier = text_type(locale) + try: + self._locale = Locale.parse(locale) + except UnknownLocaleError: + self._locale = None + return + + raise TypeError('`locale` must be a Locale, a locale identifier string, or None; got %r' % locale) + + def _get_locale(self): + return self._locale + + def _get_locale_identifier(self): + return self._locale_identifier + + locale = property(_get_locale, _set_locale) + locale_identifier = property(_get_locale_identifier) + + def _get_header_comment(self): + comment = self._header_comment + year = datetime.now(LOCALTZ).strftime('%Y') + if hasattr(self.revision_date, 'strftime'): + year = self.revision_date.strftime('%Y') + comment = comment.replace('PROJECT', self.project) \ + .replace('VERSION', self.version) \ + .replace('YEAR', year) \ + .replace('ORGANIZATION', self.copyright_holder) + locale_name = (self.locale.english_name if self.locale else self.locale_identifier) + if locale_name: + comment = comment.replace('Translations template', '%s translations' % locale_name) + return comment + + def _set_header_comment(self, string): + self._header_comment = string + + header_comment = property(_get_header_comment, _set_header_comment, doc="""\ + The header comment for the catalog. + + >>> catalog = Catalog(project='Foobar', version='1.0', + ... copyright_holder='Foo Company') + >>> print(catalog.header_comment) #doctest: +ELLIPSIS + # Translations template for Foobar. + # Copyright (C) ... Foo Company + # This file is distributed under the same license as the Foobar project. + # FIRST AUTHOR , .... + # + + The header can also be set from a string. Any known upper-case variables + will be replaced when the header is retrieved again: + + >>> catalog = Catalog(project='Foobar', version='1.0', + ... copyright_holder='Foo Company') + >>> catalog.header_comment = '''\\ + ... # The POT for my really cool PROJECT project. + ... # Copyright (C) 1990-2003 ORGANIZATION + ... # This file is distributed under the same license as the PROJECT + ... # project. + ... #''' + >>> print(catalog.header_comment) + # The POT for my really cool Foobar project. + # Copyright (C) 1990-2003 Foo Company + # This file is distributed under the same license as the Foobar + # project. + # + + :type: `unicode` + """) + + def _get_mime_headers(self): + headers = [] + headers.append(('Project-Id-Version', + '%s %s' % (self.project, self.version))) + headers.append(('Report-Msgid-Bugs-To', self.msgid_bugs_address)) + headers.append(('POT-Creation-Date', + format_datetime(self.creation_date, 'yyyy-MM-dd HH:mmZ', + locale='en'))) + if isinstance(self.revision_date, (datetime, time_) + number_types): + headers.append(('PO-Revision-Date', + format_datetime(self.revision_date, + 'yyyy-MM-dd HH:mmZ', locale='en'))) + else: + headers.append(('PO-Revision-Date', self.revision_date)) + headers.append(('Last-Translator', self.last_translator)) + if self.locale_identifier: + headers.append(('Language', str(self.locale_identifier))) + if self.locale_identifier and ('LANGUAGE' in self.language_team): + headers.append(('Language-Team', + self.language_team.replace('LANGUAGE', + str(self.locale_identifier)))) + else: + headers.append(('Language-Team', self.language_team)) + if self.locale is not None: + headers.append(('Plural-Forms', self.plural_forms)) + headers.append(('MIME-Version', '1.0')) + headers.append(('Content-Type', + 'text/plain; charset=%s' % self.charset)) + headers.append(('Content-Transfer-Encoding', '8bit')) + headers.append(('Generated-By', 'Babel %s\n' % VERSION)) + return headers + + def _set_mime_headers(self, headers): + for name, value in headers: + name = force_text(name.lower(), encoding=self.charset) + value = force_text(value, encoding=self.charset) + if name == 'project-id-version': + parts = value.split(' ') + self.project = u' '.join(parts[:-1]) + self.version = parts[-1] + elif name == 'report-msgid-bugs-to': + self.msgid_bugs_address = value + elif name == 'last-translator': + self.last_translator = value + elif name == 'language': + value = value.replace('-', '_') + self._set_locale(value) + elif name == 'language-team': + self.language_team = value + elif name == 'content-type': + mimetype, params = parse_header(value) + if 'charset' in params: + self.charset = params['charset'].lower() + elif name == 'plural-forms': + _, params = parse_header(' ;' + value) + self._num_plurals = int(params.get('nplurals', 2)) + self._plural_expr = params.get('plural', '(n != 1)') + elif name == 'pot-creation-date': + self.creation_date = _parse_datetime_header(value) + elif name == 'po-revision-date': + # Keep the value if it's not the default one + if 'YEAR' not in value: + self.revision_date = _parse_datetime_header(value) + + mime_headers = property(_get_mime_headers, _set_mime_headers, doc="""\ + The MIME headers of the catalog, used for the special ``msgid ""`` entry. + + The behavior of this property changes slightly depending on whether a locale + is set or not, the latter indicating that the catalog is actually a template + for actual translations. + + Here's an example of the output for such a catalog template: + + >>> from babel.dates import UTC + >>> created = datetime(1990, 4, 1, 15, 30, tzinfo=UTC) + >>> catalog = Catalog(project='Foobar', version='1.0', + ... creation_date=created) + >>> for name, value in catalog.mime_headers: + ... print('%s: %s' % (name, value)) + Project-Id-Version: Foobar 1.0 + Report-Msgid-Bugs-To: EMAIL@ADDRESS + POT-Creation-Date: 1990-04-01 15:30+0000 + PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE + Last-Translator: FULL NAME + Language-Team: LANGUAGE + MIME-Version: 1.0 + Content-Type: text/plain; charset=utf-8 + Content-Transfer-Encoding: 8bit + Generated-By: Babel ... + + And here's an example of the output when the locale is set: + + >>> revised = datetime(1990, 8, 3, 12, 0, tzinfo=UTC) + >>> catalog = Catalog(locale='de_DE', project='Foobar', version='1.0', + ... creation_date=created, revision_date=revised, + ... last_translator='John Doe ', + ... language_team='de_DE ') + >>> for name, value in catalog.mime_headers: + ... print('%s: %s' % (name, value)) + Project-Id-Version: Foobar 1.0 + Report-Msgid-Bugs-To: EMAIL@ADDRESS + POT-Creation-Date: 1990-04-01 15:30+0000 + PO-Revision-Date: 1990-08-03 12:00+0000 + Last-Translator: John Doe + Language: de_DE + Language-Team: de_DE + Plural-Forms: nplurals=2; plural=(n != 1) + MIME-Version: 1.0 + Content-Type: text/plain; charset=utf-8 + Content-Transfer-Encoding: 8bit + Generated-By: Babel ... + + :type: `list` + """) + + @property + def num_plurals(self): + """The number of plurals used by the catalog or locale. + + >>> Catalog(locale='en').num_plurals + 2 + >>> Catalog(locale='ga').num_plurals + 5 + + :type: `int`""" + if self._num_plurals is None: + num = 2 + if self.locale: + num = get_plural(self.locale)[0] + self._num_plurals = num + return self._num_plurals + + @property + def plural_expr(self): + """The plural expression used by the catalog or locale. + + >>> Catalog(locale='en').plural_expr + '(n != 1)' + >>> Catalog(locale='ga').plural_expr + '(n==1 ? 0 : n==2 ? 1 : n>=3 && n<=6 ? 2 : n>=7 && n<=10 ? 3 : 4)' + >>> Catalog(locale='ding').plural_expr # unknown locale + '(n != 1)' + + :type: `string_types`""" + if self._plural_expr is None: + expr = '(n != 1)' + if self.locale: + expr = get_plural(self.locale)[1] + self._plural_expr = expr + return self._plural_expr + + @property + def plural_forms(self): + """Return the plural forms declaration for the locale. + + >>> Catalog(locale='en').plural_forms + 'nplurals=2; plural=(n != 1)' + >>> Catalog(locale='pt_BR').plural_forms + 'nplurals=2; plural=(n > 1)' + + :type: `str`""" + return 'nplurals=%s; plural=%s' % (self.num_plurals, self.plural_expr) + + def __contains__(self, id): + """Return whether the catalog has a message with the specified ID.""" + return self._key_for(id) in self._messages + + def __len__(self): + """The number of messages in the catalog. + + This does not include the special ``msgid ""`` entry.""" + return len(self._messages) + + def __iter__(self): + """Iterates through all the entries in the catalog, in the order they + were added, yielding a `Message` object for every entry. + + :rtype: ``iterator``""" + buf = [] + for name, value in self.mime_headers: + buf.append('%s: %s' % (name, value)) + flags = set() + if self.fuzzy: + flags |= {'fuzzy'} + yield Message(u'', '\n'.join(buf), flags=flags) + for key in self._messages: + yield self._messages[key] + + def __repr__(self): + locale = '' + if self.locale: + locale = ' %s' % self.locale + return '<%s %r%s>' % (type(self).__name__, self.domain, locale) + + def __delitem__(self, id): + """Delete the message with the specified ID.""" + self.delete(id) + + def __getitem__(self, id): + """Return the message with the specified ID. + + :param id: the message ID + """ + return self.get(id) + + def __setitem__(self, id, message): + """Add or update the message with the specified ID. + + >>> catalog = Catalog() + >>> catalog[u'foo'] = Message(u'foo') + >>> catalog[u'foo'] + + + If a message with that ID is already in the catalog, it is updated + to include the locations and flags of the new message. + + >>> catalog = Catalog() + >>> catalog[u'foo'] = Message(u'foo', locations=[('main.py', 1)]) + >>> catalog[u'foo'].locations + [('main.py', 1)] + >>> catalog[u'foo'] = Message(u'foo', locations=[('utils.py', 5)]) + >>> catalog[u'foo'].locations + [('main.py', 1), ('utils.py', 5)] + + :param id: the message ID + :param message: the `Message` object + """ + assert isinstance(message, Message), 'expected a Message object' + key = self._key_for(id, message.context) + current = self._messages.get(key) + if current: + if message.pluralizable and not current.pluralizable: + # The new message adds pluralization + current.id = message.id + current.string = message.string + current.locations = list(distinct(current.locations + + message.locations)) + current.auto_comments = list(distinct(current.auto_comments + + message.auto_comments)) + current.user_comments = list(distinct(current.user_comments + + message.user_comments)) + current.flags |= message.flags + message = current + elif id == '': + # special treatment for the header message + self.mime_headers = _parse_header(message.string).items() + self.header_comment = '\n'.join([('# %s' % c).rstrip() for c + in message.user_comments]) + self.fuzzy = message.fuzzy + else: + if isinstance(id, (list, tuple)): + assert isinstance(message.string, (list, tuple)), \ + 'Expected sequence but got %s' % type(message.string) + self._messages[key] = message + + def add(self, id, string=None, locations=(), flags=(), auto_comments=(), + user_comments=(), previous_id=(), lineno=None, context=None): + """Add or update the message with the specified ID. + + >>> catalog = Catalog() + >>> catalog.add(u'foo') + + >>> catalog[u'foo'] + + + This method simply constructs a `Message` object with the given + arguments and invokes `__setitem__` with that object. + + :param id: the message ID, or a ``(singular, plural)`` tuple for + pluralizable messages + :param string: the translated message string, or a + ``(singular, plural)`` tuple for pluralizable messages + :param locations: a sequence of ``(filename, lineno)`` tuples + :param flags: a set or sequence of flags + :param auto_comments: a sequence of automatic comments + :param user_comments: a sequence of user comments + :param previous_id: the previous message ID, or a ``(singular, plural)`` + tuple for pluralizable messages + :param lineno: the line number on which the msgid line was found in the + PO file, if any + :param context: the message context + """ + message = Message(id, string, list(locations), flags, auto_comments, + user_comments, previous_id, lineno=lineno, + context=context) + self[id] = message + return message + + def check(self): + """Run various validation checks on the translations in the catalog. + + For every message which fails validation, this method yield a + ``(message, errors)`` tuple, where ``message`` is the `Message` object + and ``errors`` is a sequence of `TranslationError` objects. + + :rtype: ``iterator`` + """ + for message in self._messages.values(): + errors = message.check(catalog=self) + if errors: + yield message, errors + + def get(self, id, context=None): + """Return the message with the specified ID and context. + + :param id: the message ID + :param context: the message context, or ``None`` for no context + """ + return self._messages.get(self._key_for(id, context)) + + def delete(self, id, context=None): + """Delete the message with the specified ID and context. + + :param id: the message ID + :param context: the message context, or ``None`` for no context + """ + key = self._key_for(id, context) + if key in self._messages: + del self._messages[key] + + def update(self, template, no_fuzzy_matching=False, update_header_comment=False, keep_user_comments=True): + """Update the catalog based on the given template catalog. + + >>> from babel.messages import Catalog + >>> template = Catalog() + >>> template.add('green', locations=[('main.py', 99)]) + + >>> template.add('blue', locations=[('main.py', 100)]) + + >>> template.add(('salad', 'salads'), locations=[('util.py', 42)]) + + >>> catalog = Catalog(locale='de_DE') + >>> catalog.add('blue', u'blau', locations=[('main.py', 98)]) + + >>> catalog.add('head', u'Kopf', locations=[('util.py', 33)]) + + >>> catalog.add(('salad', 'salads'), (u'Salat', u'Salate'), + ... locations=[('util.py', 38)]) + + + >>> catalog.update(template) + >>> len(catalog) + 3 + + >>> msg1 = catalog['green'] + >>> msg1.string + >>> msg1.locations + [('main.py', 99)] + + >>> msg2 = catalog['blue'] + >>> msg2.string + u'blau' + >>> msg2.locations + [('main.py', 100)] + + >>> msg3 = catalog['salad'] + >>> msg3.string + (u'Salat', u'Salate') + >>> msg3.locations + [('util.py', 42)] + + Messages that are in the catalog but not in the template are removed + from the main collection, but can still be accessed via the `obsolete` + member: + + >>> 'head' in catalog + False + >>> list(catalog.obsolete.values()) + [] + + :param template: the reference catalog, usually read from a POT file + :param no_fuzzy_matching: whether to use fuzzy matching of message IDs + """ + messages = self._messages + remaining = messages.copy() + self._messages = OrderedDict() + + # Prepare for fuzzy matching + fuzzy_candidates = [] + if not no_fuzzy_matching: + fuzzy_candidates = dict([ + (self._key_for(msgid), messages[msgid].context) + for msgid in messages if msgid and messages[msgid].string + ]) + fuzzy_matches = set() + + def _merge(message, oldkey, newkey): + message = message.clone() + fuzzy = False + if oldkey != newkey: + fuzzy = True + fuzzy_matches.add(oldkey) + oldmsg = messages.get(oldkey) + if isinstance(oldmsg.id, string_types): + message.previous_id = [oldmsg.id] + else: + message.previous_id = list(oldmsg.id) + else: + oldmsg = remaining.pop(oldkey, None) + message.string = oldmsg.string + + if keep_user_comments: + message.user_comments = list(distinct(oldmsg.user_comments)) + + if isinstance(message.id, (list, tuple)): + if not isinstance(message.string, (list, tuple)): + fuzzy = True + message.string = tuple( + [message.string] + ([u''] * (len(message.id) - 1)) + ) + elif len(message.string) != self.num_plurals: + fuzzy = True + message.string = tuple(message.string[:len(oldmsg.string)]) + elif isinstance(message.string, (list, tuple)): + fuzzy = True + message.string = message.string[0] + message.flags |= oldmsg.flags + if fuzzy: + message.flags |= {u'fuzzy'} + self[message.id] = message + + for message in template: + if message.id: + key = self._key_for(message.id, message.context) + if key in messages: + _merge(message, key, key) + else: + if no_fuzzy_matching is False: + # do some fuzzy matching with difflib + if isinstance(key, tuple): + matchkey = key[0] # just the msgid, no context + else: + matchkey = key + matches = get_close_matches(matchkey.lower().strip(), + fuzzy_candidates.keys(), 1) + if matches: + newkey = matches[0] + newctxt = fuzzy_candidates[newkey] + if newctxt is not None: + newkey = newkey, newctxt + _merge(message, newkey, key) + continue + + self[message.id] = message + + for msgid in remaining: + if no_fuzzy_matching or msgid not in fuzzy_matches: + self.obsolete[msgid] = remaining[msgid] + + if update_header_comment: + # Allow the updated catalog's header to be rewritten based on the + # template's header + self.header_comment = template.header_comment + + # Make updated catalog's POT-Creation-Date equal to the template + # used to update the catalog + self.creation_date = template.creation_date + + def _key_for(self, id, context=None): + """The key for a message is just the singular ID even for pluralizable + messages, but is a ``(msgid, msgctxt)`` tuple for context-specific + messages. + """ + key = id + if isinstance(key, (list, tuple)): + key = id[0] + if context is not None: + key = (key, context) + return key diff --git a/venv/lib/python3.7/site-packages/babel/messages/checkers.py b/venv/lib/python3.7/site-packages/babel/messages/checkers.py new file mode 100644 index 0000000..8c1effa --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/messages/checkers.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +""" + babel.messages.checkers + ~~~~~~~~~~~~~~~~~~~~~~~ + + Various routines that help with validation of translations. + + :since: version 0.9 + + :copyright: (c) 2013-2019 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +from babel.messages.catalog import TranslationError, PYTHON_FORMAT +from babel._compat import string_types, izip + + +#: list of format chars that are compatible to each other +_string_format_compatibilities = [ + {'i', 'd', 'u'}, + {'x', 'X'}, + {'f', 'F', 'g', 'G'} +] + + +def num_plurals(catalog, message): + """Verify the number of plurals in the translation.""" + if not message.pluralizable: + if not isinstance(message.string, string_types): + raise TranslationError("Found plural forms for non-pluralizable " + "message") + return + + # skip further tests if no catalog is provided. + elif catalog is None: + return + + msgstrs = message.string + if not isinstance(msgstrs, (list, tuple)): + msgstrs = (msgstrs,) + if len(msgstrs) != catalog.num_plurals: + raise TranslationError("Wrong number of plural forms (expected %d)" % + catalog.num_plurals) + + +def python_format(catalog, message): + """Verify the format string placeholders in the translation.""" + if 'python-format' not in message.flags: + return + msgids = message.id + if not isinstance(msgids, (list, tuple)): + msgids = (msgids,) + msgstrs = message.string + if not isinstance(msgstrs, (list, tuple)): + msgstrs = (msgstrs,) + + for msgid, msgstr in izip(msgids, msgstrs): + if msgstr: + _validate_format(msgid, msgstr) + + +def _validate_format(format, alternative): + """Test format string `alternative` against `format`. `format` can be the + msgid of a message and `alternative` one of the `msgstr`\\s. The two + arguments are not interchangeable as `alternative` may contain less + placeholders if `format` uses named placeholders. + + The behavior of this function is undefined if the string does not use + string formattings. + + If the string formatting of `alternative` is compatible to `format` the + function returns `None`, otherwise a `TranslationError` is raised. + + Examples for compatible format strings: + + >>> _validate_format('Hello %s!', 'Hallo %s!') + >>> _validate_format('Hello %i!', 'Hallo %d!') + + Example for an incompatible format strings: + + >>> _validate_format('Hello %(name)s!', 'Hallo %s!') + Traceback (most recent call last): + ... + TranslationError: the format strings are of different kinds + + This function is used by the `python_format` checker. + + :param format: The original format string + :param alternative: The alternative format string that should be checked + against format + :raises TranslationError: on formatting errors + """ + + def _parse(string): + result = [] + for match in PYTHON_FORMAT.finditer(string): + name, format, typechar = match.groups() + if typechar == '%' and name is None: + continue + result.append((name, str(typechar))) + return result + + def _compatible(a, b): + if a == b: + return True + for set in _string_format_compatibilities: + if a in set and b in set: + return True + return False + + def _check_positional(results): + positional = None + for name, char in results: + if positional is None: + positional = name is None + else: + if (name is None) != positional: + raise TranslationError('format string mixes positional ' + 'and named placeholders') + return bool(positional) + + a, b = map(_parse, (format, alternative)) + + # now check if both strings are positional or named + a_positional, b_positional = map(_check_positional, (a, b)) + if a_positional and not b_positional and not b: + raise TranslationError('placeholders are incompatible') + elif a_positional != b_positional: + raise TranslationError('the format strings are of different kinds') + + # if we are operating on positional strings both must have the + # same number of format chars and those must be compatible + if a_positional: + if len(a) != len(b): + raise TranslationError('positional format placeholders are ' + 'unbalanced') + for idx, ((_, first), (_, second)) in enumerate(izip(a, b)): + if not _compatible(first, second): + raise TranslationError('incompatible format for placeholder ' + '%d: %r and %r are not compatible' % + (idx + 1, first, second)) + + # otherwise the second string must not have names the first one + # doesn't have and the types of those included must be compatible + else: + type_map = dict(a) + for name, typechar in b: + if name not in type_map: + raise TranslationError('unknown named placeholder %r' % name) + elif not _compatible(typechar, type_map[name]): + raise TranslationError('incompatible format for ' + 'placeholder %r: ' + '%r and %r are not compatible' % + (name, typechar, type_map[name])) + + +def _find_checkers(): + checkers = [] + try: + from pkg_resources import working_set + except ImportError: + pass + else: + for entry_point in working_set.iter_entry_points('babel.checkers'): + checkers.append(entry_point.load()) + if len(checkers) == 0: + # if pkg_resources is not available or no usable egg-info was found + # (see #230), just resort to hard-coded checkers + return [num_plurals, python_format] + return checkers + + +checkers = _find_checkers() diff --git a/venv/lib/python3.7/site-packages/babel/messages/extract.py b/venv/lib/python3.7/site-packages/babel/messages/extract.py new file mode 100644 index 0000000..db429b2 --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/messages/extract.py @@ -0,0 +1,642 @@ +# -*- coding: utf-8 -*- +""" + babel.messages.extract + ~~~~~~~~~~~~~~~~~~~~~~ + + Basic infrastructure for extracting localizable messages from source files. + + This module defines an extensible system for collecting localizable message + strings from a variety of sources. A native extractor for Python source + files is builtin, extractors for other sources can be added using very + simple plugins. + + The main entry points into the extraction functionality are the functions + `extract_from_dir` and `extract_from_file`. + + :copyright: (c) 2013-2019 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +import os +from os.path import relpath +import sys +from tokenize import generate_tokens, COMMENT, NAME, OP, STRING + +from babel.util import parse_encoding, parse_future_flags, pathmatch +from babel._compat import PY2, text_type +from textwrap import dedent + + +GROUP_NAME = 'babel.extractors' + +DEFAULT_KEYWORDS = { + '_': None, + 'gettext': None, + 'ngettext': (1, 2), + 'ugettext': None, + 'ungettext': (1, 2), + 'dgettext': (2,), + 'dngettext': (2, 3), + 'N_': None, + 'pgettext': ((1, 'c'), 2), + 'npgettext': ((1, 'c'), 2, 3) +} + +DEFAULT_MAPPING = [('**.py', 'python')] + +empty_msgid_warning = ( + '%s: warning: Empty msgid. It is reserved by GNU gettext: gettext("") ' + 'returns the header entry with meta information, not the empty string.') + + +def _strip_comment_tags(comments, tags): + """Helper function for `extract` that strips comment tags from strings + in a list of comment lines. This functions operates in-place. + """ + def _strip(line): + for tag in tags: + if line.startswith(tag): + return line[len(tag):].strip() + return line + comments[:] = map(_strip, comments) + + +def extract_from_dir(dirname=None, method_map=DEFAULT_MAPPING, + options_map=None, keywords=DEFAULT_KEYWORDS, + comment_tags=(), callback=None, strip_comment_tags=False): + """Extract messages from any source files found in the given directory. + + This function generates tuples of the form ``(filename, lineno, message, + comments, context)``. + + Which extraction method is used per file is determined by the `method_map` + parameter, which maps extended glob patterns to extraction method names. + For example, the following is the default mapping: + + >>> method_map = [ + ... ('**.py', 'python') + ... ] + + This basically says that files with the filename extension ".py" at any + level inside the directory should be processed by the "python" extraction + method. Files that don't match any of the mapping patterns are ignored. See + the documentation of the `pathmatch` function for details on the pattern + syntax. + + The following extended mapping would also use the "genshi" extraction + method on any file in "templates" subdirectory: + + >>> method_map = [ + ... ('**/templates/**.*', 'genshi'), + ... ('**.py', 'python') + ... ] + + The dictionary provided by the optional `options_map` parameter augments + these mappings. It uses extended glob patterns as keys, and the values are + dictionaries mapping options names to option values (both strings). + + The glob patterns of the `options_map` do not necessarily need to be the + same as those used in the method mapping. For example, while all files in + the ``templates`` folders in an application may be Genshi applications, the + options for those files may differ based on extension: + + >>> options_map = { + ... '**/templates/**.txt': { + ... 'template_class': 'genshi.template:TextTemplate', + ... 'encoding': 'latin-1' + ... }, + ... '**/templates/**.html': { + ... 'include_attrs': '' + ... } + ... } + + :param dirname: the path to the directory to extract messages from. If + not given the current working directory is used. + :param method_map: a list of ``(pattern, method)`` tuples that maps of + extraction method names to extended glob patterns + :param options_map: a dictionary of additional options (optional) + :param keywords: a dictionary mapping keywords (i.e. names of functions + that should be recognized as translation functions) to + tuples that specify which of their arguments contain + localizable strings + :param comment_tags: a list of tags of translator comments to search for + and include in the results + :param callback: a function that is called for every file that message are + extracted from, just before the extraction itself is + performed; the function is passed the filename, the name + of the extraction method and and the options dictionary as + positional arguments, in that order + :param strip_comment_tags: a flag that if set to `True` causes all comment + tags to be removed from the collected comments. + :see: `pathmatch` + """ + if dirname is None: + dirname = os.getcwd() + if options_map is None: + options_map = {} + + absname = os.path.abspath(dirname) + for root, dirnames, filenames in os.walk(absname): + dirnames[:] = [ + subdir for subdir in dirnames + if not (subdir.startswith('.') or subdir.startswith('_')) + ] + dirnames.sort() + filenames.sort() + for filename in filenames: + filepath = os.path.join(root, filename).replace(os.sep, '/') + + for message_tuple in check_and_call_extract_file( + filepath, + method_map, + options_map, + callback, + keywords, + comment_tags, + strip_comment_tags, + dirpath=absname, + ): + yield message_tuple + + +def check_and_call_extract_file(filepath, method_map, options_map, + callback, keywords, comment_tags, + strip_comment_tags, dirpath=None): + """Checks if the given file matches an extraction method mapping, and if so, calls extract_from_file. + + Note that the extraction method mappings are based relative to dirpath. + So, given an absolute path to a file `filepath`, we want to check using + just the relative path from `dirpath` to `filepath`. + + Yields 5-tuples (filename, lineno, messages, comments, context). + + :param filepath: An absolute path to a file that exists. + :param method_map: a list of ``(pattern, method)`` tuples that maps of + extraction method names to extended glob patterns + :param options_map: a dictionary of additional options (optional) + :param callback: a function that is called for every file that message are + extracted from, just before the extraction itself is + performed; the function is passed the filename, the name + of the extraction method and and the options dictionary as + positional arguments, in that order + :param keywords: a dictionary mapping keywords (i.e. names of functions + that should be recognized as translation functions) to + tuples that specify which of their arguments contain + localizable strings + :param comment_tags: a list of tags of translator comments to search for + and include in the results + :param strip_comment_tags: a flag that if set to `True` causes all comment + tags to be removed from the collected comments. + :param dirpath: the path to the directory to extract messages from. + :return: iterable of 5-tuples (filename, lineno, messages, comments, context) + :rtype: Iterable[tuple[str, int, str|tuple[str], list[str], str|None] + """ + # filename is the relative path from dirpath to the actual file + filename = relpath(filepath, dirpath) + + for pattern, method in method_map: + if not pathmatch(pattern, filename): + continue + + options = {} + for opattern, odict in options_map.items(): + if pathmatch(opattern, filename): + options = odict + if callback: + callback(filename, method, options) + for message_tuple in extract_from_file( + method, filepath, + keywords=keywords, + comment_tags=comment_tags, + options=options, + strip_comment_tags=strip_comment_tags + ): + yield (filename, ) + message_tuple + + break + + +def extract_from_file(method, filename, keywords=DEFAULT_KEYWORDS, + comment_tags=(), options=None, strip_comment_tags=False): + """Extract messages from a specific file. + + This function returns a list of tuples of the form ``(lineno, message, comments, context)``. + + :param filename: the path to the file to extract messages from + :param method: a string specifying the extraction method (.e.g. "python") + :param keywords: a dictionary mapping keywords (i.e. names of functions + that should be recognized as translation functions) to + tuples that specify which of their arguments contain + localizable strings + :param comment_tags: a list of translator tags to search for and include + in the results + :param strip_comment_tags: a flag that if set to `True` causes all comment + tags to be removed from the collected comments. + :param options: a dictionary of additional options (optional) + :returns: list of tuples of the form ``(lineno, message, comments, context)`` + :rtype: list[tuple[int, str|tuple[str], list[str], str|None] + """ + with open(filename, 'rb') as fileobj: + return list(extract(method, fileobj, keywords, comment_tags, options, + strip_comment_tags)) + + +def extract(method, fileobj, keywords=DEFAULT_KEYWORDS, comment_tags=(), + options=None, strip_comment_tags=False): + """Extract messages from the given file-like object using the specified + extraction method. + + This function returns tuples of the form ``(lineno, message, comments, context)``. + + The implementation dispatches the actual extraction to plugins, based on the + value of the ``method`` parameter. + + >>> source = b'''# foo module + ... def run(argv): + ... print(_('Hello, world!')) + ... ''' + + >>> from babel._compat import BytesIO + >>> for message in extract('python', BytesIO(source)): + ... print(message) + (3, u'Hello, world!', [], None) + + :param method: an extraction method (a callable), or + a string specifying the extraction method (.e.g. "python"); + if this is a simple name, the extraction function will be + looked up by entry point; if it is an explicit reference + to a function (of the form ``package.module:funcname`` or + ``package.module.funcname``), the corresponding function + will be imported and used + :param fileobj: the file-like object the messages should be extracted from + :param keywords: a dictionary mapping keywords (i.e. names of functions + that should be recognized as translation functions) to + tuples that specify which of their arguments contain + localizable strings + :param comment_tags: a list of translator tags to search for and include + in the results + :param options: a dictionary of additional options (optional) + :param strip_comment_tags: a flag that if set to `True` causes all comment + tags to be removed from the collected comments. + :raise ValueError: if the extraction method is not registered + :returns: iterable of tuples of the form ``(lineno, message, comments, context)`` + :rtype: Iterable[tuple[int, str|tuple[str], list[str], str|None] + """ + func = None + if callable(method): + func = method + elif ':' in method or '.' in method: + if ':' not in method: + lastdot = method.rfind('.') + module, attrname = method[:lastdot], method[lastdot + 1:] + else: + module, attrname = method.split(':', 1) + func = getattr(__import__(module, {}, {}, [attrname]), attrname) + else: + try: + from pkg_resources import working_set + except ImportError: + pass + else: + for entry_point in working_set.iter_entry_points(GROUP_NAME, + method): + func = entry_point.load(require=True) + break + if func is None: + # if pkg_resources is not available or no usable egg-info was found + # (see #230), we resort to looking up the builtin extractors + # directly + builtin = { + 'ignore': extract_nothing, + 'python': extract_python, + 'javascript': extract_javascript + } + func = builtin.get(method) + + if func is None: + raise ValueError('Unknown extraction method %r' % method) + + results = func(fileobj, keywords.keys(), comment_tags, + options=options or {}) + + for lineno, funcname, messages, comments in results: + if funcname: + spec = keywords[funcname] or (1,) + else: + spec = (1,) + if not isinstance(messages, (list, tuple)): + messages = [messages] + if not messages: + continue + + # Validate the messages against the keyword's specification + context = None + msgs = [] + invalid = False + # last_index is 1 based like the keyword spec + last_index = len(messages) + for index in spec: + if isinstance(index, tuple): + context = messages[index[0] - 1] + continue + if last_index < index: + # Not enough arguments + invalid = True + break + message = messages[index - 1] + if message is None: + invalid = True + break + msgs.append(message) + if invalid: + continue + + # keyword spec indexes are 1 based, therefore '-1' + if isinstance(spec[0], tuple): + # context-aware *gettext method + first_msg_index = spec[1] - 1 + else: + first_msg_index = spec[0] - 1 + if not messages[first_msg_index]: + # An empty string msgid isn't valid, emit a warning + where = '%s:%i' % (hasattr(fileobj, 'name') and + fileobj.name or '(unknown)', lineno) + sys.stderr.write((empty_msgid_warning % where) + '\n') + continue + + messages = tuple(msgs) + if len(messages) == 1: + messages = messages[0] + + if strip_comment_tags: + _strip_comment_tags(comments, comment_tags) + yield lineno, messages, comments, context + + +def extract_nothing(fileobj, keywords, comment_tags, options): + """Pseudo extractor that does not actually extract anything, but simply + returns an empty list. + """ + return [] + + +def extract_python(fileobj, keywords, comment_tags, options): + """Extract messages from Python source code. + + It returns an iterator yielding tuples in the following form ``(lineno, + funcname, message, comments)``. + + :param fileobj: the seekable, file-like object the messages should be + extracted from + :param keywords: a list of keywords (i.e. function names) that should be + recognized as translation functions + :param comment_tags: a list of translator tags to search for and include + in the results + :param options: a dictionary of additional options (optional) + :rtype: ``iterator`` + """ + funcname = lineno = message_lineno = None + call_stack = -1 + buf = [] + messages = [] + translator_comments = [] + in_def = in_translator_comments = False + comment_tag = None + + encoding = parse_encoding(fileobj) or options.get('encoding', 'UTF-8') + future_flags = parse_future_flags(fileobj, encoding) + + if PY2: + next_line = fileobj.readline + else: + next_line = lambda: fileobj.readline().decode(encoding) + + tokens = generate_tokens(next_line) + for tok, value, (lineno, _), _, _ in tokens: + if call_stack == -1 and tok == NAME and value in ('def', 'class'): + in_def = True + elif tok == OP and value == '(': + if in_def: + # Avoid false positives for declarations such as: + # def gettext(arg='message'): + in_def = False + continue + if funcname: + message_lineno = lineno + call_stack += 1 + elif in_def and tok == OP and value == ':': + # End of a class definition without parens + in_def = False + continue + elif call_stack == -1 and tok == COMMENT: + # Strip the comment token from the line + if PY2: + value = value.decode(encoding) + value = value[1:].strip() + if in_translator_comments and \ + translator_comments[-1][0] == lineno - 1: + # We're already inside a translator comment, continue appending + translator_comments.append((lineno, value)) + continue + # If execution reaches this point, let's see if comment line + # starts with one of the comment tags + for comment_tag in comment_tags: + if value.startswith(comment_tag): + in_translator_comments = True + translator_comments.append((lineno, value)) + break + elif funcname and call_stack == 0: + nested = (tok == NAME and value in keywords) + if (tok == OP and value == ')') or nested: + if buf: + messages.append(''.join(buf)) + del buf[:] + else: + messages.append(None) + + if len(messages) > 1: + messages = tuple(messages) + else: + messages = messages[0] + # Comments don't apply unless they immediately preceed the + # message + if translator_comments and \ + translator_comments[-1][0] < message_lineno - 1: + translator_comments = [] + + yield (message_lineno, funcname, messages, + [comment[1] for comment in translator_comments]) + + funcname = lineno = message_lineno = None + call_stack = -1 + messages = [] + translator_comments = [] + in_translator_comments = False + if nested: + funcname = value + elif tok == STRING: + # Unwrap quotes in a safe manner, maintaining the string's + # encoding + # https://sourceforge.net/tracker/?func=detail&atid=355470& + # aid=617979&group_id=5470 + code = compile('# coding=%s\n%s' % (str(encoding), value), + '', 'eval', future_flags) + value = eval(code, {'__builtins__': {}}, {}) + if PY2 and not isinstance(value, text_type): + value = value.decode(encoding) + buf.append(value) + elif tok == OP and value == ',': + if buf: + messages.append(''.join(buf)) + del buf[:] + else: + messages.append(None) + if translator_comments: + # We have translator comments, and since we're on a + # comma(,) user is allowed to break into a new line + # Let's increase the last comment's lineno in order + # for the comment to still be a valid one + old_lineno, old_comment = translator_comments.pop() + translator_comments.append((old_lineno + 1, old_comment)) + elif call_stack > 0 and tok == OP and value == ')': + call_stack -= 1 + elif funcname and call_stack == -1: + funcname = None + elif tok == NAME and value in keywords: + funcname = value + + +def extract_javascript(fileobj, keywords, comment_tags, options): + """Extract messages from JavaScript source code. + + :param fileobj: the seekable, file-like object the messages should be + extracted from + :param keywords: a list of keywords (i.e. function names) that should be + recognized as translation functions + :param comment_tags: a list of translator tags to search for and include + in the results + :param options: a dictionary of additional options (optional) + Supported options are: + * `jsx` -- set to false to disable JSX/E4X support. + * `template_string` -- set to false to disable ES6 + template string support. + """ + from babel.messages.jslexer import Token, tokenize, unquote_string + funcname = message_lineno = None + messages = [] + last_argument = None + translator_comments = [] + concatenate_next = False + encoding = options.get('encoding', 'utf-8') + last_token = None + call_stack = -1 + dotted = any('.' in kw for kw in keywords) + + for token in tokenize( + fileobj.read().decode(encoding), + jsx=options.get("jsx", True), + template_string=options.get("template_string", True), + dotted=dotted + ): + if ( # Turn keyword`foo` expressions into keyword("foo") calls: + funcname and # have a keyword... + (last_token and last_token.type == 'name') and # we've seen nothing after the keyword... + token.type == 'template_string' # this is a template string + ): + message_lineno = token.lineno + messages = [unquote_string(token.value)] + call_stack = 0 + token = Token('operator', ')', token.lineno) + + if token.type == 'operator' and token.value == '(': + if funcname: + message_lineno = token.lineno + call_stack += 1 + + elif call_stack == -1 and token.type == 'linecomment': + value = token.value[2:].strip() + if translator_comments and \ + translator_comments[-1][0] == token.lineno - 1: + translator_comments.append((token.lineno, value)) + continue + + for comment_tag in comment_tags: + if value.startswith(comment_tag): + translator_comments.append((token.lineno, value.strip())) + break + + elif token.type == 'multilinecomment': + # only one multi-line comment may preceed a translation + translator_comments = [] + value = token.value[2:-2].strip() + for comment_tag in comment_tags: + if value.startswith(comment_tag): + lines = value.splitlines() + if lines: + lines[0] = lines[0].strip() + lines[1:] = dedent('\n'.join(lines[1:])).splitlines() + for offset, line in enumerate(lines): + translator_comments.append((token.lineno + offset, + line)) + break + + elif funcname and call_stack == 0: + if token.type == 'operator' and token.value == ')': + if last_argument is not None: + messages.append(last_argument) + if len(messages) > 1: + messages = tuple(messages) + elif messages: + messages = messages[0] + else: + messages = None + + # Comments don't apply unless they immediately precede the + # message + if translator_comments and \ + translator_comments[-1][0] < message_lineno - 1: + translator_comments = [] + + if messages is not None: + yield (message_lineno, funcname, messages, + [comment[1] for comment in translator_comments]) + + funcname = message_lineno = last_argument = None + concatenate_next = False + translator_comments = [] + messages = [] + call_stack = -1 + + elif token.type in ('string', 'template_string'): + new_value = unquote_string(token.value) + if concatenate_next: + last_argument = (last_argument or '') + new_value + concatenate_next = False + else: + last_argument = new_value + + elif token.type == 'operator': + if token.value == ',': + if last_argument is not None: + messages.append(last_argument) + last_argument = None + else: + messages.append(None) + concatenate_next = False + elif token.value == '+': + concatenate_next = True + + elif call_stack > 0 and token.type == 'operator' \ + and token.value == ')': + call_stack -= 1 + + elif funcname and call_stack == -1: + funcname = None + + elif call_stack == -1 and token.type == 'name' and \ + token.value in keywords and \ + (last_token is None or last_token.type != 'name' or + last_token.value != 'function'): + funcname = token.value + + last_token = token diff --git a/venv/lib/python3.7/site-packages/babel/messages/frontend.py b/venv/lib/python3.7/site-packages/babel/messages/frontend.py new file mode 100644 index 0000000..4756055 --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/messages/frontend.py @@ -0,0 +1,1041 @@ +# -*- coding: utf-8 -*- +""" + babel.messages.frontend + ~~~~~~~~~~~~~~~~~~~~~~~ + + Frontends for the message extraction functionality. + + :copyright: (c) 2013-2019 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" +from __future__ import print_function + +import logging +import optparse +import os +import re +import shutil +import sys +import tempfile +from collections import OrderedDict +from datetime import datetime +from locale import getpreferredencoding + +from babel import __version__ as VERSION +from babel import Locale, localedata +from babel._compat import StringIO, string_types, text_type, PY2 +from babel.core import UnknownLocaleError +from babel.messages.catalog import Catalog +from babel.messages.extract import DEFAULT_KEYWORDS, DEFAULT_MAPPING, check_and_call_extract_file, extract_from_dir +from babel.messages.mofile import write_mo +from babel.messages.pofile import read_po, write_po +from babel.util import LOCALTZ +from distutils import log as distutils_log +from distutils.cmd import Command as _Command +from distutils.errors import DistutilsOptionError, DistutilsSetupError + +try: + from ConfigParser import RawConfigParser +except ImportError: + from configparser import RawConfigParser + + +po_file_read_mode = ('rU' if PY2 else 'r') + + +def listify_value(arg, split=None): + """ + Make a list out of an argument. + + Values from `distutils` argument parsing are always single strings; + values from `optparse` parsing may be lists of strings that may need + to be further split. + + No matter the input, this function returns a flat list of whitespace-trimmed + strings, with `None` values filtered out. + + >>> listify_value("foo bar") + ['foo', 'bar'] + >>> listify_value(["foo bar"]) + ['foo', 'bar'] + >>> listify_value([["foo"], "bar"]) + ['foo', 'bar'] + >>> listify_value([["foo"], ["bar", None, "foo"]]) + ['foo', 'bar', 'foo'] + >>> listify_value("foo, bar, quux", ",") + ['foo', 'bar', 'quux'] + + :param arg: A string or a list of strings + :param split: The argument to pass to `str.split()`. + :return: + """ + out = [] + + if not isinstance(arg, (list, tuple)): + arg = [arg] + + for val in arg: + if val is None: + continue + if isinstance(val, (list, tuple)): + out.extend(listify_value(val, split=split)) + continue + out.extend(s.strip() for s in text_type(val).split(split)) + assert all(isinstance(val, string_types) for val in out) + return out + + +class Command(_Command): + # This class is a small shim between Distutils commands and + # optparse option parsing in the frontend command line. + + #: Option name to be input as `args` on the script command line. + as_args = None + + #: Options which allow multiple values. + #: This is used by the `optparse` transmogrification code. + multiple_value_options = () + + #: Options which are booleans. + #: This is used by the `optparse` transmogrification code. + # (This is actually used by distutils code too, but is never + # declared in the base class.) + boolean_options = () + + #: Option aliases, to retain standalone command compatibility. + #: Distutils does not support option aliases, but optparse does. + #: This maps the distutils argument name to an iterable of aliases + #: that are usable with optparse. + option_aliases = {} + + #: Choices for options that needed to be restricted to specific + #: list of choices. + option_choices = {} + + #: Log object. To allow replacement in the script command line runner. + log = distutils_log + + def __init__(self, dist=None): + # A less strict version of distutils' `__init__`. + self.distribution = dist + self.initialize_options() + self._dry_run = None + self.verbose = False + self.force = None + self.help = 0 + self.finalized = 0 + + +class compile_catalog(Command): + """Catalog compilation command for use in ``setup.py`` scripts. + + If correctly installed, this command is available to Setuptools-using + setup scripts automatically. For projects using plain old ``distutils``, + the command needs to be registered explicitly in ``setup.py``:: + + from babel.messages.frontend import compile_catalog + + setup( + ... + cmdclass = {'compile_catalog': compile_catalog} + ) + + .. versionadded:: 0.9 + """ + + description = 'compile message catalogs to binary MO files' + user_options = [ + ('domain=', 'D', + "domains of PO files (space separated list, default 'messages')"), + ('directory=', 'd', + 'path to base directory containing the catalogs'), + ('input-file=', 'i', + 'name of the input file'), + ('output-file=', 'o', + "name of the output file (default " + "'//LC_MESSAGES/.mo')"), + ('locale=', 'l', + 'locale of the catalog to compile'), + ('use-fuzzy', 'f', + 'also include fuzzy translations'), + ('statistics', None, + 'print statistics about translations') + ] + boolean_options = ['use-fuzzy', 'statistics'] + + def initialize_options(self): + self.domain = 'messages' + self.directory = None + self.input_file = None + self.output_file = None + self.locale = None + self.use_fuzzy = False + self.statistics = False + + def finalize_options(self): + self.domain = listify_value(self.domain) + if not self.input_file and not self.directory: + raise DistutilsOptionError('you must specify either the input file ' + 'or the base directory') + if not self.output_file and not self.directory: + raise DistutilsOptionError('you must specify either the output file ' + 'or the base directory') + + def run(self): + n_errors = 0 + for domain in self.domain: + for catalog, errors in self._run_domain(domain).items(): + n_errors += len(errors) + if n_errors: + self.log.error('%d errors encountered.' % n_errors) + return (1 if n_errors else 0) + + def _run_domain(self, domain): + po_files = [] + mo_files = [] + + if not self.input_file: + if self.locale: + po_files.append((self.locale, + os.path.join(self.directory, self.locale, + 'LC_MESSAGES', + domain + '.po'))) + mo_files.append(os.path.join(self.directory, self.locale, + 'LC_MESSAGES', + domain + '.mo')) + else: + for locale in os.listdir(self.directory): + po_file = os.path.join(self.directory, locale, + 'LC_MESSAGES', domain + '.po') + if os.path.exists(po_file): + po_files.append((locale, po_file)) + mo_files.append(os.path.join(self.directory, locale, + 'LC_MESSAGES', + domain + '.mo')) + else: + po_files.append((self.locale, self.input_file)) + if self.output_file: + mo_files.append(self.output_file) + else: + mo_files.append(os.path.join(self.directory, self.locale, + 'LC_MESSAGES', + domain + '.mo')) + + if not po_files: + raise DistutilsOptionError('no message catalogs found') + + catalogs_and_errors = {} + + for idx, (locale, po_file) in enumerate(po_files): + mo_file = mo_files[idx] + with open(po_file, 'rb') as infile: + catalog = read_po(infile, locale) + + if self.statistics: + translated = 0 + for message in list(catalog)[1:]: + if message.string: + translated += 1 + percentage = 0 + if len(catalog): + percentage = translated * 100 // len(catalog) + self.log.info( + '%d of %d messages (%d%%) translated in %s', + translated, len(catalog), percentage, po_file + ) + + if catalog.fuzzy and not self.use_fuzzy: + self.log.info('catalog %s is marked as fuzzy, skipping', po_file) + continue + + catalogs_and_errors[catalog] = catalog_errors = list(catalog.check()) + for message, errors in catalog_errors: + for error in errors: + self.log.error( + 'error: %s:%d: %s', po_file, message.lineno, error + ) + + self.log.info('compiling catalog %s to %s', po_file, mo_file) + + with open(mo_file, 'wb') as outfile: + write_mo(outfile, catalog, use_fuzzy=self.use_fuzzy) + + return catalogs_and_errors + + +class extract_messages(Command): + """Message extraction command for use in ``setup.py`` scripts. + + If correctly installed, this command is available to Setuptools-using + setup scripts automatically. For projects using plain old ``distutils``, + the command needs to be registered explicitly in ``setup.py``:: + + from babel.messages.frontend import extract_messages + + setup( + ... + cmdclass = {'extract_messages': extract_messages} + ) + """ + + description = 'extract localizable strings from the project code' + user_options = [ + ('charset=', None, + 'charset to use in the output file (default "utf-8")'), + ('keywords=', 'k', + 'space-separated list of keywords to look for in addition to the ' + 'defaults (may be repeated multiple times)'), + ('no-default-keywords', None, + 'do not include the default keywords'), + ('mapping-file=', 'F', + 'path to the mapping configuration file'), + ('no-location', None, + 'do not include location comments with filename and line number'), + ('add-location=', None, + 'location lines format. If it is not given or "full", it generates ' + 'the lines with both file name and line number. If it is "file", ' + 'the line number part is omitted. If it is "never", it completely ' + 'suppresses the lines (same as --no-location).'), + ('omit-header', None, + 'do not include msgid "" entry in header'), + ('output-file=', 'o', + 'name of the output file'), + ('width=', 'w', + 'set output line width (default 76)'), + ('no-wrap', None, + 'do not break long message lines, longer than the output line width, ' + 'into several lines'), + ('sort-output', None, + 'generate sorted output (default False)'), + ('sort-by-file', None, + 'sort output by file location (default False)'), + ('msgid-bugs-address=', None, + 'set report address for msgid'), + ('copyright-holder=', None, + 'set copyright holder in output'), + ('project=', None, + 'set project name in output'), + ('version=', None, + 'set project version in output'), + ('add-comments=', 'c', + 'place comment block with TAG (or those preceding keyword lines) in ' + 'output file. Separate multiple TAGs with commas(,)'), # TODO: Support repetition of this argument + ('strip-comments', 's', + 'strip the comment TAGs from the comments.'), + ('input-paths=', None, + 'files or directories that should be scanned for messages. Separate multiple ' + 'files or directories with commas(,)'), # TODO: Support repetition of this argument + ('input-dirs=', None, # TODO (3.x): Remove me. + 'alias for input-paths (does allow files as well as directories).'), + ] + boolean_options = [ + 'no-default-keywords', 'no-location', 'omit-header', 'no-wrap', + 'sort-output', 'sort-by-file', 'strip-comments' + ] + as_args = 'input-paths' + multiple_value_options = ('add-comments', 'keywords') + option_aliases = { + 'keywords': ('--keyword',), + 'mapping-file': ('--mapping',), + 'output-file': ('--output',), + 'strip-comments': ('--strip-comment-tags',), + } + option_choices = { + 'add-location': ('full', 'file', 'never',), + } + + def initialize_options(self): + self.charset = 'utf-8' + self.keywords = None + self.no_default_keywords = False + self.mapping_file = None + self.no_location = False + self.add_location = None + self.omit_header = False + self.output_file = None + self.input_dirs = None + self.input_paths = None + self.width = None + self.no_wrap = False + self.sort_output = False + self.sort_by_file = False + self.msgid_bugs_address = None + self.copyright_holder = None + self.project = None + self.version = None + self.add_comments = None + self.strip_comments = False + self.include_lineno = True + + def finalize_options(self): + if self.input_dirs: + if not self.input_paths: + self.input_paths = self.input_dirs + else: + raise DistutilsOptionError( + 'input-dirs and input-paths are mutually exclusive' + ) + + if self.no_default_keywords: + keywords = {} + else: + keywords = DEFAULT_KEYWORDS.copy() + + keywords.update(parse_keywords(listify_value(self.keywords))) + + self.keywords = keywords + + if not self.keywords: + raise DistutilsOptionError('you must specify new keywords if you ' + 'disable the default ones') + + if not self.output_file: + raise DistutilsOptionError('no output file specified') + if self.no_wrap and self.width: + raise DistutilsOptionError("'--no-wrap' and '--width' are mutually " + "exclusive") + if not self.no_wrap and not self.width: + self.width = 76 + elif self.width is not None: + self.width = int(self.width) + + if self.sort_output and self.sort_by_file: + raise DistutilsOptionError("'--sort-output' and '--sort-by-file' " + "are mutually exclusive") + + if self.input_paths: + if isinstance(self.input_paths, string_types): + self.input_paths = re.split(r',\s*', self.input_paths) + elif self.distribution is not None: + self.input_paths = dict.fromkeys([ + k.split('.', 1)[0] + for k in (self.distribution.packages or ()) + ]).keys() + else: + self.input_paths = [] + + if not self.input_paths: + raise DistutilsOptionError("no input files or directories specified") + + for path in self.input_paths: + if not os.path.exists(path): + raise DistutilsOptionError("Input path: %s does not exist" % path) + + self.add_comments = listify_value(self.add_comments or (), ",") + + if self.distribution: + if not self.project: + self.project = self.distribution.get_name() + if not self.version: + self.version = self.distribution.get_version() + + if self.add_location == 'never': + self.no_location = True + elif self.add_location == 'file': + self.include_lineno = False + + def run(self): + mappings = self._get_mappings() + with open(self.output_file, 'wb') as outfile: + catalog = Catalog(project=self.project, + version=self.version, + msgid_bugs_address=self.msgid_bugs_address, + copyright_holder=self.copyright_holder, + charset=self.charset) + + for path, method_map, options_map in mappings: + def callback(filename, method, options): + if method == 'ignore': + return + + # If we explicitly provide a full filepath, just use that. + # Otherwise, path will be the directory path and filename + # is the relative path from that dir to the file. + # So we can join those to get the full filepath. + if os.path.isfile(path): + filepath = path + else: + filepath = os.path.normpath(os.path.join(path, filename)) + + optstr = '' + if options: + optstr = ' (%s)' % ', '.join(['%s="%s"' % (k, v) for + k, v in options.items()]) + self.log.info('extracting messages from %s%s', filepath, optstr) + + if os.path.isfile(path): + current_dir = os.getcwd() + extracted = check_and_call_extract_file( + path, method_map, options_map, + callback, self.keywords, self.add_comments, + self.strip_comments, current_dir + ) + else: + extracted = extract_from_dir( + path, method_map, options_map, + keywords=self.keywords, + comment_tags=self.add_comments, + callback=callback, + strip_comment_tags=self.strip_comments + ) + for filename, lineno, message, comments, context in extracted: + if os.path.isfile(path): + filepath = filename # already normalized + else: + filepath = os.path.normpath(os.path.join(path, filename)) + + catalog.add(message, None, [(filepath, lineno)], + auto_comments=comments, context=context) + + self.log.info('writing PO template file to %s', self.output_file) + write_po(outfile, catalog, width=self.width, + no_location=self.no_location, + omit_header=self.omit_header, + sort_output=self.sort_output, + sort_by_file=self.sort_by_file, + include_lineno=self.include_lineno) + + def _get_mappings(self): + mappings = [] + + if self.mapping_file: + with open(self.mapping_file, po_file_read_mode) as fileobj: + method_map, options_map = parse_mapping(fileobj) + for path in self.input_paths: + mappings.append((path, method_map, options_map)) + + elif getattr(self.distribution, 'message_extractors', None): + message_extractors = self.distribution.message_extractors + for path, mapping in message_extractors.items(): + if isinstance(mapping, string_types): + method_map, options_map = parse_mapping(StringIO(mapping)) + else: + method_map, options_map = [], {} + for pattern, method, options in mapping: + method_map.append((pattern, method)) + options_map[pattern] = options or {} + mappings.append((path, method_map, options_map)) + + else: + for path in self.input_paths: + mappings.append((path, DEFAULT_MAPPING, {})) + + return mappings + + +def check_message_extractors(dist, name, value): + """Validate the ``message_extractors`` keyword argument to ``setup()``. + + :param dist: the distutils/setuptools ``Distribution`` object + :param name: the name of the keyword argument (should always be + "message_extractors") + :param value: the value of the keyword argument + :raise `DistutilsSetupError`: if the value is not valid + """ + assert name == 'message_extractors' + if not isinstance(value, dict): + raise DistutilsSetupError('the value of the "message_extractors" ' + 'parameter must be a dictionary') + + +class init_catalog(Command): + """New catalog initialization command for use in ``setup.py`` scripts. + + If correctly installed, this command is available to Setuptools-using + setup scripts automatically. For projects using plain old ``distutils``, + the command needs to be registered explicitly in ``setup.py``:: + + from babel.messages.frontend import init_catalog + + setup( + ... + cmdclass = {'init_catalog': init_catalog} + ) + """ + + description = 'create a new catalog based on a POT file' + user_options = [ + ('domain=', 'D', + "domain of PO file (default 'messages')"), + ('input-file=', 'i', + 'name of the input file'), + ('output-dir=', 'd', + 'path to output directory'), + ('output-file=', 'o', + "name of the output file (default " + "'//LC_MESSAGES/.po')"), + ('locale=', 'l', + 'locale for the new localized catalog'), + ('width=', 'w', + 'set output line width (default 76)'), + ('no-wrap', None, + 'do not break long message lines, longer than the output line width, ' + 'into several lines'), + ] + boolean_options = ['no-wrap'] + + def initialize_options(self): + self.output_dir = None + self.output_file = None + self.input_file = None + self.locale = None + self.domain = 'messages' + self.no_wrap = False + self.width = None + + def finalize_options(self): + if not self.input_file: + raise DistutilsOptionError('you must specify the input file') + + if not self.locale: + raise DistutilsOptionError('you must provide a locale for the ' + 'new catalog') + try: + self._locale = Locale.parse(self.locale) + except UnknownLocaleError as e: + raise DistutilsOptionError(e) + + if not self.output_file and not self.output_dir: + raise DistutilsOptionError('you must specify the output directory') + if not self.output_file: + self.output_file = os.path.join(self.output_dir, self.locale, + 'LC_MESSAGES', self.domain + '.po') + + if not os.path.exists(os.path.dirname(self.output_file)): + os.makedirs(os.path.dirname(self.output_file)) + if self.no_wrap and self.width: + raise DistutilsOptionError("'--no-wrap' and '--width' are mutually " + "exclusive") + if not self.no_wrap and not self.width: + self.width = 76 + elif self.width is not None: + self.width = int(self.width) + + def run(self): + self.log.info( + 'creating catalog %s based on %s', self.output_file, self.input_file + ) + + with open(self.input_file, 'rb') as infile: + # Although reading from the catalog template, read_po must be fed + # the locale in order to correctly calculate plurals + catalog = read_po(infile, locale=self.locale) + + catalog.locale = self._locale + catalog.revision_date = datetime.now(LOCALTZ) + catalog.fuzzy = False + + with open(self.output_file, 'wb') as outfile: + write_po(outfile, catalog, width=self.width) + + +class update_catalog(Command): + """Catalog merging command for use in ``setup.py`` scripts. + + If correctly installed, this command is available to Setuptools-using + setup scripts automatically. For projects using plain old ``distutils``, + the command needs to be registered explicitly in ``setup.py``:: + + from babel.messages.frontend import update_catalog + + setup( + ... + cmdclass = {'update_catalog': update_catalog} + ) + + .. versionadded:: 0.9 + """ + + description = 'update message catalogs from a POT file' + user_options = [ + ('domain=', 'D', + "domain of PO file (default 'messages')"), + ('input-file=', 'i', + 'name of the input file'), + ('output-dir=', 'd', + 'path to base directory containing the catalogs'), + ('output-file=', 'o', + "name of the output file (default " + "'//LC_MESSAGES/.po')"), + ('omit-header', None, + "do not include msgid "" entry in header"), + ('locale=', 'l', + 'locale of the catalog to compile'), + ('width=', 'w', + 'set output line width (default 76)'), + ('no-wrap', None, + 'do not break long message lines, longer than the output line width, ' + 'into several lines'), + ('ignore-obsolete=', None, + 'whether to omit obsolete messages from the output'), + ('no-fuzzy-matching', 'N', + 'do not use fuzzy matching'), + ('update-header-comment', None, + 'update target header comment'), + ('previous', None, + 'keep previous msgids of translated messages'), + ] + boolean_options = [ + 'omit-header', 'no-wrap', 'ignore-obsolete', 'no-fuzzy-matching', + 'previous', 'update-header-comment', + ] + + def initialize_options(self): + self.domain = 'messages' + self.input_file = None + self.output_dir = None + self.output_file = None + self.omit_header = False + self.locale = None + self.width = None + self.no_wrap = False + self.ignore_obsolete = False + self.no_fuzzy_matching = False + self.update_header_comment = False + self.previous = False + + def finalize_options(self): + if not self.input_file: + raise DistutilsOptionError('you must specify the input file') + if not self.output_file and not self.output_dir: + raise DistutilsOptionError('you must specify the output file or ' + 'directory') + if self.output_file and not self.locale: + raise DistutilsOptionError('you must specify the locale') + if self.no_wrap and self.width: + raise DistutilsOptionError("'--no-wrap' and '--width' are mutually " + "exclusive") + if not self.no_wrap and not self.width: + self.width = 76 + elif self.width is not None: + self.width = int(self.width) + if self.no_fuzzy_matching and self.previous: + self.previous = False + + def run(self): + po_files = [] + if not self.output_file: + if self.locale: + po_files.append((self.locale, + os.path.join(self.output_dir, self.locale, + 'LC_MESSAGES', + self.domain + '.po'))) + else: + for locale in os.listdir(self.output_dir): + po_file = os.path.join(self.output_dir, locale, + 'LC_MESSAGES', + self.domain + '.po') + if os.path.exists(po_file): + po_files.append((locale, po_file)) + else: + po_files.append((self.locale, self.output_file)) + + if not po_files: + raise DistutilsOptionError('no message catalogs found') + + domain = self.domain + if not domain: + domain = os.path.splitext(os.path.basename(self.input_file))[0] + + with open(self.input_file, 'rb') as infile: + template = read_po(infile) + + for locale, filename in po_files: + self.log.info('updating catalog %s based on %s', filename, self.input_file) + with open(filename, 'rb') as infile: + catalog = read_po(infile, locale=locale, domain=domain) + + catalog.update( + template, self.no_fuzzy_matching, + update_header_comment=self.update_header_comment + ) + + tmpname = os.path.join(os.path.dirname(filename), + tempfile.gettempprefix() + + os.path.basename(filename)) + try: + with open(tmpname, 'wb') as tmpfile: + write_po(tmpfile, catalog, + omit_header=self.omit_header, + ignore_obsolete=self.ignore_obsolete, + include_previous=self.previous, width=self.width) + except: + os.remove(tmpname) + raise + + try: + os.rename(tmpname, filename) + except OSError: + # We're probably on Windows, which doesn't support atomic + # renames, at least not through Python + # If the error is in fact due to a permissions problem, that + # same error is going to be raised from one of the following + # operations + os.remove(filename) + shutil.copy(tmpname, filename) + os.remove(tmpname) + + +class CommandLineInterface(object): + """Command-line interface. + + This class provides a simple command-line interface to the message + extraction and PO file generation functionality. + """ + + usage = '%%prog %s [options] %s' + version = '%%prog %s' % VERSION + commands = { + 'compile': 'compile message catalogs to MO files', + 'extract': 'extract messages from source files and generate a POT file', + 'init': 'create new message catalogs from a POT file', + 'update': 'update existing message catalogs from a POT file' + } + + command_classes = { + 'compile': compile_catalog, + 'extract': extract_messages, + 'init': init_catalog, + 'update': update_catalog, + } + + log = None # Replaced on instance level + + def run(self, argv=None): + """Main entry point of the command-line interface. + + :param argv: list of arguments passed on the command-line + """ + + if argv is None: + argv = sys.argv + + self.parser = optparse.OptionParser(usage=self.usage % ('command', '[args]'), + version=self.version) + self.parser.disable_interspersed_args() + self.parser.print_help = self._help + self.parser.add_option('--list-locales', dest='list_locales', + action='store_true', + help="print all known locales and exit") + self.parser.add_option('-v', '--verbose', action='store_const', + dest='loglevel', const=logging.DEBUG, + help='print as much as possible') + self.parser.add_option('-q', '--quiet', action='store_const', + dest='loglevel', const=logging.ERROR, + help='print as little as possible') + self.parser.set_defaults(list_locales=False, loglevel=logging.INFO) + + options, args = self.parser.parse_args(argv[1:]) + + self._configure_logging(options.loglevel) + if options.list_locales: + identifiers = localedata.locale_identifiers() + longest = max([len(identifier) for identifier in identifiers]) + identifiers.sort() + format = u'%%-%ds %%s' % (longest + 1) + for identifier in identifiers: + locale = Locale.parse(identifier) + output = format % (identifier, locale.english_name) + print(output.encode(sys.stdout.encoding or + getpreferredencoding() or + 'ascii', 'replace')) + return 0 + + if not args: + self.parser.error('no valid command or option passed. ' + 'Try the -h/--help option for more information.') + + cmdname = args[0] + if cmdname not in self.commands: + self.parser.error('unknown command "%s"' % cmdname) + + cmdinst = self._configure_command(cmdname, args[1:]) + return cmdinst.run() + + def _configure_logging(self, loglevel): + self.log = logging.getLogger('babel') + self.log.setLevel(loglevel) + # Don't add a new handler for every instance initialization (#227), this + # would cause duplicated output when the CommandLineInterface as an + # normal Python class. + if self.log.handlers: + handler = self.log.handlers[0] + else: + handler = logging.StreamHandler() + self.log.addHandler(handler) + handler.setLevel(loglevel) + formatter = logging.Formatter('%(message)s') + handler.setFormatter(formatter) + + def _help(self): + print(self.parser.format_help()) + print("commands:") + longest = max([len(command) for command in self.commands]) + format = " %%-%ds %%s" % max(8, longest + 1) + commands = sorted(self.commands.items()) + for name, description in commands: + print(format % (name, description)) + + def _configure_command(self, cmdname, argv): + """ + :type cmdname: str + :type argv: list[str] + """ + cmdclass = self.command_classes[cmdname] + cmdinst = cmdclass() + if self.log: + cmdinst.log = self.log # Use our logger, not distutils'. + assert isinstance(cmdinst, Command) + cmdinst.initialize_options() + + parser = optparse.OptionParser( + usage=self.usage % (cmdname, ''), + description=self.commands[cmdname] + ) + as_args = getattr(cmdclass, "as_args", ()) + for long, short, help in cmdclass.user_options: + name = long.strip("=") + default = getattr(cmdinst, name.replace('-', '_')) + strs = ["--%s" % name] + if short: + strs.append("-%s" % short) + strs.extend(cmdclass.option_aliases.get(name, ())) + choices = cmdclass.option_choices.get(name, None) + if name == as_args: + parser.usage += "<%s>" % name + elif name in cmdclass.boolean_options: + parser.add_option(*strs, action="store_true", help=help) + elif name in cmdclass.multiple_value_options: + parser.add_option(*strs, action="append", help=help, choices=choices) + else: + parser.add_option(*strs, help=help, default=default, choices=choices) + options, args = parser.parse_args(argv) + + if as_args: + setattr(options, as_args.replace('-', '_'), args) + + for key, value in vars(options).items(): + setattr(cmdinst, key, value) + + try: + cmdinst.ensure_finalized() + except DistutilsOptionError as err: + parser.error(str(err)) + + return cmdinst + + +def main(): + return CommandLineInterface().run(sys.argv) + + +def parse_mapping(fileobj, filename=None): + """Parse an extraction method mapping from a file-like object. + + >>> buf = StringIO(''' + ... [extractors] + ... custom = mypackage.module:myfunc + ... + ... # Python source files + ... [python: **.py] + ... + ... # Genshi templates + ... [genshi: **/templates/**.html] + ... include_attrs = + ... [genshi: **/templates/**.txt] + ... template_class = genshi.template:TextTemplate + ... encoding = latin-1 + ... + ... # Some custom extractor + ... [custom: **/custom/*.*] + ... ''') + + >>> method_map, options_map = parse_mapping(buf) + >>> len(method_map) + 4 + + >>> method_map[0] + ('**.py', 'python') + >>> options_map['**.py'] + {} + >>> method_map[1] + ('**/templates/**.html', 'genshi') + >>> options_map['**/templates/**.html']['include_attrs'] + '' + >>> method_map[2] + ('**/templates/**.txt', 'genshi') + >>> options_map['**/templates/**.txt']['template_class'] + 'genshi.template:TextTemplate' + >>> options_map['**/templates/**.txt']['encoding'] + 'latin-1' + + >>> method_map[3] + ('**/custom/*.*', 'mypackage.module:myfunc') + >>> options_map['**/custom/*.*'] + {} + + :param fileobj: a readable file-like object containing the configuration + text to parse + :see: `extract_from_directory` + """ + extractors = {} + method_map = [] + options_map = {} + + parser = RawConfigParser() + parser._sections = OrderedDict(parser._sections) # We need ordered sections + + if PY2: + parser.readfp(fileobj, filename) + else: + parser.read_file(fileobj, filename) + + for section in parser.sections(): + if section == 'extractors': + extractors = dict(parser.items(section)) + else: + method, pattern = [part.strip() for part in section.split(':', 1)] + method_map.append((pattern, method)) + options_map[pattern] = dict(parser.items(section)) + + if extractors: + for idx, (pattern, method) in enumerate(method_map): + if method in extractors: + method = extractors[method] + method_map[idx] = (pattern, method) + + return method_map, options_map + + +def parse_keywords(strings=[]): + """Parse keywords specifications from the given list of strings. + + >>> kw = sorted(parse_keywords(['_', 'dgettext:2', 'dngettext:2,3', 'pgettext:1c,2']).items()) + >>> for keyword, indices in kw: + ... print((keyword, indices)) + ('_', None) + ('dgettext', (2,)) + ('dngettext', (2, 3)) + ('pgettext', ((1, 'c'), 2)) + """ + keywords = {} + for string in strings: + if ':' in string: + funcname, indices = string.split(':') + else: + funcname, indices = string, None + if funcname not in keywords: + if indices: + inds = [] + for x in indices.split(','): + if x[-1] == 'c': + inds.append((int(x[:-1]), 'c')) + else: + inds.append(int(x)) + indices = tuple(inds) + keywords[funcname] = indices + return keywords + + +if __name__ == '__main__': + main() diff --git a/venv/lib/python3.7/site-packages/babel/messages/jslexer.py b/venv/lib/python3.7/site-packages/babel/messages/jslexer.py new file mode 100644 index 0000000..ace0b47 --- /dev/null +++ b/venv/lib/python3.7/site-packages/babel/messages/jslexer.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- +""" + babel.messages.jslexer + ~~~~~~~~~~~~~~~~~~~~~~ + + A simple JavaScript 1.5 lexer which is used for the JavaScript + extractor. + + :copyright: (c) 2013-2019 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" +from collections import namedtuple +import re +from babel._compat import unichr + +operators = sorted([ + '+', '-', '*', '%', '!=', '==', '<', '>', '<=', '>=', '=', + '+=', '-=', '*=', '%=', '<<', '>>', '>>>', '<<=', '>>=', + '>>>=', '&', '&=', '|', '|=', '&&', '||', '^', '^=', '(', ')', + '[', ']', '{', '}', '!', '--', '++', '~', ',', ';', '.', ':' +], key=len, reverse=True) + +escapes = {'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r', 't': '\t'} + +name_re = re.compile(r'[\w$_][\w\d$_]*', re.UNICODE) +dotted_name_re = re.compile(r'[\w$_][\w\d$_.]*[\w\d$_.]', re.UNICODE) +division_re = re.compile(r'/=?') +regex_re = re.compile(r'/(?:[^/\\]*(?:\\.[^/\\]*)*)/[a-zA-Z]*', re.DOTALL) +line_re = re.compile(r'(\r\n|\n|\r)') +line_join_re = re.compile(r'\\' + line_re.pattern) +uni_escape_re = re.compile(r'[a-fA-F0-9]{1,4}') + +Token = namedtuple('Token', 'type value lineno') + +_rules = [ + (None, re.compile(r'\s+', re.UNICODE)), + (None, re.compile(r' 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +#Char to FreqOrder table +BIG5_TABLE_SIZE = 5376 + +BIG5_CHAR_TO_FREQ_ORDER = ( + 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 +3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 +1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 + 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 +3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 +4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 +5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 + 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 + 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 + 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 +2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 +1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 +3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 + 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 +3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 +2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 + 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 +3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 +1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 +5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 + 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 +5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 +1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 + 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 + 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 +3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 +3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 + 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 +2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 +2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 + 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 + 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 +3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 +1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 +1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 +1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 +2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 + 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 +4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 +1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 +5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 +2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 + 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 + 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 + 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 + 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 +5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 + 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 +1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 + 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 + 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 +5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 +1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 + 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 +3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 +4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 +3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 + 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 + 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 +1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 +4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 +3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 +3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 +2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 +5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 +3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 +5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 +1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 +2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 +1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 + 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 +1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 +4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 +3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 + 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 + 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 + 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 +2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 +5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 +1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 +2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 +1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 +1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 +5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 +5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 +5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 +3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 +4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 +4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 +2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 +5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 +3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 + 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 +5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 +5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 +1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 +2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 +3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 +4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 +5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 +3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 +4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 +1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 +1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 +4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 +1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 + 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 +1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 +1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 +3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 + 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 +5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 +2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 +1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 +1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 +5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 + 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 +4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 + 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 +2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 + 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 +1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 +1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 + 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 +4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 +4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 +1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 +3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 +5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 +5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 +1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 +2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 +1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 +3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 +2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 +3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 +2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 +4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 +4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 +3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 + 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 +3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 + 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 +3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 +4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 +3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 +1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 +5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 + 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 +5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 +1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 + 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 +4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 +4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 + 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 +2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 +2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 +3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 +1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 +4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 +2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 +1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 +1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 +2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 +3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 +1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 +5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 +1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 +4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 +1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 + 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 +1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 +4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 +4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 +2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 +1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 +4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 + 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 +5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 +2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 +3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 +4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 + 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 +5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 +5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 +1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 +4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 +4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 +2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 +3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 +3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 +2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 +1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 +4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 +3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 +3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 +2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 +4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 +5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 +3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 +2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 +3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 +1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 +2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 +3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 +4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 +2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 +2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 +5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 +1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 +2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 +1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 +3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 +4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 +2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 +3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 +3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 +2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 +4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 +2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 +3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 +4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 +5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 +3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 + 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 +1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 +4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 +1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 +4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 +5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 + 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 +5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 +5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 +2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 +3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 +2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 +2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 + 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 +1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 +4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 +3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 +3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 + 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 +2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 + 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 +2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 +4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 +1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 +4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 +1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 +3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 + 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 +3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 +5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 +5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 +3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 +3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 +1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 +2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 +5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 +1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 +1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 +3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 + 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 +1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 +4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 +5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 +2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 +3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 + 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 +1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 +2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 +2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 +5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 +5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 +5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 +2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 +2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 +1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 +4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 +3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 +3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 +4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 +4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 +2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 +2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 +5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 +4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 +5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 +4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 + 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 + 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 +1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 +3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 +4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 +1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 +5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 +2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 +2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 +3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 +5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 +1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 +3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 +5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 +1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 +5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 +2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 +3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 +2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 +3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 +3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 +3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 +4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 + 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 +2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 +4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 +3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 +5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 +1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 +5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 + 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 +1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 + 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 +4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 +1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 +4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 +1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 + 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 +3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 +4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 +5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 + 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 +3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 + 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 +2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 +) + diff --git a/venv/lib/python3.7/site-packages/chardet/big5prober.py b/venv/lib/python3.7/site-packages/chardet/big5prober.py new file mode 100644 index 0000000..98f9970 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/big5prober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import Big5DistributionAnalysis +from .mbcssm import BIG5_SM_MODEL + + +class Big5Prober(MultiByteCharSetProber): + def __init__(self): + super(Big5Prober, self).__init__() + self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) + self.distribution_analyzer = Big5DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "Big5" + + @property + def language(self): + return "Chinese" diff --git a/venv/lib/python3.7/site-packages/chardet/chardistribution.py b/venv/lib/python3.7/site-packages/chardet/chardistribution.py new file mode 100644 index 0000000..c0395f4 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/chardistribution.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, + EUCTW_TYPICAL_DISTRIBUTION_RATIO) +from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, + EUCKR_TYPICAL_DISTRIBUTION_RATIO) +from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, + GB2312_TYPICAL_DISTRIBUTION_RATIO) +from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, + BIG5_TYPICAL_DISTRIBUTION_RATIO) +from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, + JIS_TYPICAL_DISTRIBUTION_RATIO) + + +class CharDistributionAnalysis(object): + ENOUGH_DATA_THRESHOLD = 1024 + SURE_YES = 0.99 + SURE_NO = 0.01 + MINIMUM_DATA_THRESHOLD = 3 + + def __init__(self): + # Mapping table to get frequency order from char order (get from + # GetOrder()) + self._char_to_freq_order = None + self._table_size = None # Size of above table + # This is a constant value which varies from language to language, + # used in calculating confidence. See + # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html + # for further detail. + self.typical_distribution_ratio = None + self._done = None + self._total_chars = None + self._freq_chars = None + self.reset() + + def reset(self): + """reset analyser, clear any state""" + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + self._total_chars = 0 # Total characters encountered + # The number of characters whose frequency order is less than 512 + self._freq_chars = 0 + + def feed(self, char, char_len): + """feed a character with known length""" + if char_len == 2: + # we only care about 2-bytes character in our distribution analysis + order = self.get_order(char) + else: + order = -1 + if order >= 0: + self._total_chars += 1 + # order is valid + if order < self._table_size: + if 512 > self._char_to_freq_order[order]: + self._freq_chars += 1 + + def get_confidence(self): + """return confidence based on existing data""" + # if we didn't receive any character in our consideration range, + # return negative answer + if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: + return self.SURE_NO + + if self._total_chars != self._freq_chars: + r = (self._freq_chars / ((self._total_chars - self._freq_chars) + * self.typical_distribution_ratio)) + if r < self.SURE_YES: + return r + + # normalize confidence (we don't want to be 100% sure) + return self.SURE_YES + + def got_enough_data(self): + # It is not necessary to receive all data to draw conclusion. + # For charset detection, certain amount of data is enough + return self._total_chars > self.ENOUGH_DATA_THRESHOLD + + def get_order(self, byte_str): + # We do not handle characters based on the original encoding string, + # but convert this encoding string to a number, here called order. + # This allows multiple encodings of a language to share one frequency + # table. + return -1 + + +class EUCTWDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCTWDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER + self._table_size = EUCTW_TABLE_SIZE + self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-TW encoding, we are interested + # first byte range: 0xc4 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xC4: + return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 + else: + return -1 + + +class EUCKRDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCKRDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER + self._table_size = EUCKR_TABLE_SIZE + self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-KR encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xB0: + return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 + else: + return -1 + + +class GB2312DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(GB2312DistributionAnalysis, self).__init__() + self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER + self._table_size = GB2312_TABLE_SIZE + self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for GB2312 encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0xB0) and (second_char >= 0xA1): + return 94 * (first_char - 0xB0) + second_char - 0xA1 + else: + return -1 + + +class Big5DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(Big5DistributionAnalysis, self).__init__() + self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER + self._table_size = BIG5_TABLE_SIZE + self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for big5 encoding, we are interested + # first byte range: 0xa4 -- 0xfe + # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if first_char >= 0xA4: + if second_char >= 0xA1: + return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 + else: + return 157 * (first_char - 0xA4) + second_char - 0x40 + else: + return -1 + + +class SJISDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(SJISDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for sjis encoding, we are interested + # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe + # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0x81) and (first_char <= 0x9F): + order = 188 * (first_char - 0x81) + elif (first_char >= 0xE0) and (first_char <= 0xEF): + order = 188 * (first_char - 0xE0 + 31) + else: + return -1 + order = order + second_char - 0x40 + if second_char > 0x7F: + order = -1 + return order + + +class EUCJPDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCJPDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-JP encoding, we are interested + # first byte range: 0xa0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + char = byte_str[0] + if char >= 0xA0: + return 94 * (char - 0xA1) + byte_str[1] - 0xa1 + else: + return -1 diff --git a/venv/lib/python3.7/site-packages/chardet/charsetgroupprober.py b/venv/lib/python3.7/site-packages/chardet/charsetgroupprober.py new file mode 100644 index 0000000..8b3738e --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/charsetgroupprober.py @@ -0,0 +1,106 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState +from .charsetprober import CharSetProber + + +class CharSetGroupProber(CharSetProber): + def __init__(self, lang_filter=None): + super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) + self._active_num = 0 + self.probers = [] + self._best_guess_prober = None + + def reset(self): + super(CharSetGroupProber, self).reset() + self._active_num = 0 + for prober in self.probers: + if prober: + prober.reset() + prober.active = True + self._active_num += 1 + self._best_guess_prober = None + + @property + def charset_name(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.charset_name + + @property + def language(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.language + + def feed(self, byte_str): + for prober in self.probers: + if not prober: + continue + if not prober.active: + continue + state = prober.feed(byte_str) + if not state: + continue + if state == ProbingState.FOUND_IT: + self._best_guess_prober = prober + return self.state + elif state == ProbingState.NOT_ME: + prober.active = False + self._active_num -= 1 + if self._active_num <= 0: + self._state = ProbingState.NOT_ME + return self.state + return self.state + + def get_confidence(self): + state = self.state + if state == ProbingState.FOUND_IT: + return 0.99 + elif state == ProbingState.NOT_ME: + return 0.01 + best_conf = 0.0 + self._best_guess_prober = None + for prober in self.probers: + if not prober: + continue + if not prober.active: + self.logger.debug('%s not active', prober.charset_name) + continue + conf = prober.get_confidence() + self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) + if best_conf < conf: + best_conf = conf + self._best_guess_prober = prober + if not self._best_guess_prober: + return 0.0 + return best_conf diff --git a/venv/lib/python3.7/site-packages/chardet/charsetprober.py b/venv/lib/python3.7/site-packages/chardet/charsetprober.py new file mode 100644 index 0000000..eac4e59 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/charsetprober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging +import re + +from .enums import ProbingState + + +class CharSetProber(object): + + SHORTCUT_THRESHOLD = 0.95 + + def __init__(self, lang_filter=None): + self._state = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + + def reset(self): + self._state = ProbingState.DETECTING + + @property + def charset_name(self): + return None + + def feed(self, buf): + pass + + @property + def state(self): + return self._state + + def get_confidence(self): + return 0.0 + + @staticmethod + def filter_high_byte_only(buf): + buf = re.sub(b'([\x00-\x7F])+', b' ', buf) + return buf + + @staticmethod + def filter_international_words(buf): + """ + We define three types of bytes: + alphabet: english alphabets [a-zA-Z] + international: international characters [\x80-\xFF] + marker: everything else [^a-zA-Z\x80-\xFF] + + The input buffer can be thought to contain a series of words delimited + by markers. This function works to filter all words that contain at + least one international character. All contiguous sequences of markers + are replaced by a single space ascii character. + + This filter applies to all scripts which do not use English characters. + """ + filtered = bytearray() + + # This regex expression filters out only words that have at-least one + # international character. The word may include one marker character at + # the end. + words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', + buf) + + for word in words: + filtered.extend(word[:-1]) + + # If the last character in the word is a marker, replace it with a + # space as markers shouldn't affect our analysis (they are used + # similarly across all languages and may thus have similar + # frequencies). + last_char = word[-1:] + if not last_char.isalpha() and last_char < b'\x80': + last_char = b' ' + filtered.extend(last_char) + + return filtered + + @staticmethod + def filter_with_english_letters(buf): + """ + Returns a copy of ``buf`` that retains only the sequences of English + alphabet and high byte characters that are not between <> characters. + Also retains English alphabet and high byte characters immediately + before occurrences of >. + + This filter can be applied to all scripts which contain both English + characters and extended ASCII characters, but is currently only used by + ``Latin1Prober``. + """ + filtered = bytearray() + in_tag = False + prev = 0 + + for curr in range(len(buf)): + # Slice here to get bytes instead of an int with Python 3 + buf_char = buf[curr:curr + 1] + # Check if we're coming out of or entering an HTML tag + if buf_char == b'>': + in_tag = False + elif buf_char == b'<': + in_tag = True + + # If current character is not extended-ASCII and not alphabetic... + if buf_char < b'\x80' and not buf_char.isalpha(): + # ...and we're not in a tag + if curr > prev and not in_tag: + # Keep everything after last non-extended-ASCII, + # non-alphabetic character + filtered.extend(buf[prev:curr]) + # Output a space to delimit stretch we kept + filtered.extend(b' ') + prev = curr + 1 + + # If we're not in a tag... + if not in_tag: + # Keep everything after last non-extended-ASCII, non-alphabetic + # character + filtered.extend(buf[prev:]) + + return filtered diff --git a/venv/lib/python3.7/site-packages/chardet/cli/__init__.py b/venv/lib/python3.7/site-packages/chardet/cli/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/cli/__init__.py @@ -0,0 +1 @@ + diff --git a/venv/lib/python3.7/site-packages/chardet/cli/chardetect.py b/venv/lib/python3.7/site-packages/chardet/cli/chardetect.py new file mode 100644 index 0000000..f0a4cc5 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/cli/chardetect.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python +""" +Script which takes one or more file paths and reports on their detected +encodings + +Example:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +If no paths are provided, it takes its input from stdin. + +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import argparse +import sys + +from chardet import __version__ +from chardet.compat import PY2 +from chardet.universaldetector import UniversalDetector + + +def description_of(lines, name='stdin'): + """ + Return a string describing the probable encoding of a file or + list of strings. + + :param lines: The lines to get the encoding of. + :type lines: Iterable of bytes + :param name: Name of file or collection of lines + :type name: str + """ + u = UniversalDetector() + for line in lines: + line = bytearray(line) + u.feed(line) + # shortcut out of the loop to save reading further - particularly useful if we read a BOM. + if u.done: + break + u.close() + result = u.result + if PY2: + name = name.decode(sys.getfilesystemencoding(), 'ignore') + if result['encoding']: + return '{0}: {1} with confidence {2}'.format(name, result['encoding'], + result['confidence']) + else: + return '{0}: no result'.format(name) + + +def main(argv=None): + """ + Handles command line arguments and gets things started. + + :param argv: List of arguments, as if specified on the command-line. + If None, ``sys.argv[1:]`` is used instead. + :type argv: list of str + """ + # Get command line arguments + parser = argparse.ArgumentParser( + description="Takes one or more file paths and reports their detected \ + encodings") + parser.add_argument('input', + help='File whose encoding we would like to determine. \ + (default: stdin)', + type=argparse.FileType('rb'), nargs='*', + default=[sys.stdin if PY2 else sys.stdin.buffer]) + parser.add_argument('--version', action='version', + version='%(prog)s {0}'.format(__version__)) + args = parser.parse_args(argv) + + for f in args.input: + if f.isatty(): + print("You are running chardetect interactively. Press " + + "CTRL-D twice at the start of a blank line to signal the " + + "end of your input. If you want help, run chardetect " + + "--help\n", file=sys.stderr) + print(description_of(f, f.name)) + + +if __name__ == '__main__': + main() diff --git a/venv/lib/python3.7/site-packages/chardet/codingstatemachine.py b/venv/lib/python3.7/site-packages/chardet/codingstatemachine.py new file mode 100644 index 0000000..68fba44 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/codingstatemachine.py @@ -0,0 +1,88 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging + +from .enums import MachineState + + +class CodingStateMachine(object): + """ + A state machine to verify a byte sequence for a particular encoding. For + each byte the detector receives, it will feed that byte to every active + state machine available, one byte at a time. The state machine changes its + state based on its previous state and the byte it receives. There are 3 + states in a state machine that are of interest to an auto-detector: + + START state: This is the state to start with, or a legal byte sequence + (i.e. a valid code point) for character has been identified. + + ME state: This indicates that the state machine identified a byte sequence + that is specific to the charset it is designed for and that + there is no other possible encoding which can contain this byte + sequence. This will to lead to an immediate positive answer for + the detector. + + ERROR state: This indicates the state machine identified an illegal byte + sequence for that encoding. This will lead to an immediate + negative answer for this encoding. Detector will exclude this + encoding from consideration from here on. + """ + def __init__(self, sm): + self._model = sm + self._curr_byte_pos = 0 + self._curr_char_len = 0 + self._curr_state = None + self.logger = logging.getLogger(__name__) + self.reset() + + def reset(self): + self._curr_state = MachineState.START + + def next_state(self, c): + # for each byte we get its class + # if it is first byte, we also get byte length + byte_class = self._model['class_table'][c] + if self._curr_state == MachineState.START: + self._curr_byte_pos = 0 + self._curr_char_len = self._model['char_len_table'][byte_class] + # from byte's class and state_table, we get its next state + curr_state = (self._curr_state * self._model['class_factor'] + + byte_class) + self._curr_state = self._model['state_table'][curr_state] + self._curr_byte_pos += 1 + return self._curr_state + + def get_current_charlen(self): + return self._curr_char_len + + def get_coding_state_machine(self): + return self._model['name'] + + @property + def language(self): + return self._model['language'] diff --git a/venv/lib/python3.7/site-packages/chardet/compat.py b/venv/lib/python3.7/site-packages/chardet/compat.py new file mode 100644 index 0000000..ddd7468 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/compat.py @@ -0,0 +1,34 @@ +######################## BEGIN LICENSE BLOCK ######################## +# Contributor(s): +# Dan Blanchard +# Ian Cordasco +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys + + +if sys.version_info < (3, 0): + PY2 = True + PY3 = False + base_str = (str, unicode) + text_type = unicode +else: + PY2 = False + PY3 = True + base_str = (bytes, str) + text_type = str diff --git a/venv/lib/python3.7/site-packages/chardet/cp949prober.py b/venv/lib/python3.7/site-packages/chardet/cp949prober.py new file mode 100644 index 0000000..efd793a --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/cp949prober.py @@ -0,0 +1,49 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import EUCKRDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import CP949_SM_MODEL + + +class CP949Prober(MultiByteCharSetProber): + def __init__(self): + super(CP949Prober, self).__init__() + self.coding_sm = CodingStateMachine(CP949_SM_MODEL) + # NOTE: CP949 is a superset of EUC-KR, so the distribution should be + # not different. + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "CP949" + + @property + def language(self): + return "Korean" diff --git a/venv/lib/python3.7/site-packages/chardet/enums.py b/venv/lib/python3.7/site-packages/chardet/enums.py new file mode 100644 index 0000000..0451207 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/enums.py @@ -0,0 +1,76 @@ +""" +All of the Enums that are used throughout the chardet package. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + + +class InputState(object): + """ + This enum represents the different states a universal detector can be in. + """ + PURE_ASCII = 0 + ESC_ASCII = 1 + HIGH_BYTE = 2 + + +class LanguageFilter(object): + """ + This enum represents the different language filters we can apply to a + ``UniversalDetector``. + """ + CHINESE_SIMPLIFIED = 0x01 + CHINESE_TRADITIONAL = 0x02 + JAPANESE = 0x04 + KOREAN = 0x08 + NON_CJK = 0x10 + ALL = 0x1F + CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL + CJK = CHINESE | JAPANESE | KOREAN + + +class ProbingState(object): + """ + This enum represents the different states a prober can be in. + """ + DETECTING = 0 + FOUND_IT = 1 + NOT_ME = 2 + + +class MachineState(object): + """ + This enum represents the different states a state machine can be in. + """ + START = 0 + ERROR = 1 + ITS_ME = 2 + + +class SequenceLikelihood(object): + """ + This enum represents the likelihood of a character following the previous one. + """ + NEGATIVE = 0 + UNLIKELY = 1 + LIKELY = 2 + POSITIVE = 3 + + @classmethod + def get_num_categories(cls): + """:returns: The number of likelihood categories in the enum.""" + return 4 + + +class CharacterCategory(object): + """ + This enum represents the different categories language models for + ``SingleByteCharsetProber`` put characters into. + + Anything less than CONTROL is considered a letter. + """ + UNDEFINED = 255 + LINE_BREAK = 254 + SYMBOL = 253 + DIGIT = 252 + CONTROL = 251 diff --git a/venv/lib/python3.7/site-packages/chardet/escprober.py b/venv/lib/python3.7/site-packages/chardet/escprober.py new file mode 100644 index 0000000..c70493f --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/escprober.py @@ -0,0 +1,101 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .enums import LanguageFilter, ProbingState, MachineState +from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, + ISO2022KR_SM_MODEL) + + +class EscCharSetProber(CharSetProber): + """ + This CharSetProber uses a "code scheme" approach for detecting encodings, + whereby easily recognizable escape or shift sequences are relied on to + identify these encodings. + """ + + def __init__(self, lang_filter=None): + super(EscCharSetProber, self).__init__(lang_filter=lang_filter) + self.coding_sm = [] + if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: + self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) + self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) + if self.lang_filter & LanguageFilter.JAPANESE: + self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) + if self.lang_filter & LanguageFilter.KOREAN: + self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) + self.active_sm_count = None + self._detected_charset = None + self._detected_language = None + self._state = None + self.reset() + + def reset(self): + super(EscCharSetProber, self).reset() + for coding_sm in self.coding_sm: + if not coding_sm: + continue + coding_sm.active = True + coding_sm.reset() + self.active_sm_count = len(self.coding_sm) + self._detected_charset = None + self._detected_language = None + + @property + def charset_name(self): + return self._detected_charset + + @property + def language(self): + return self._detected_language + + def get_confidence(self): + if self._detected_charset: + return 0.99 + else: + return 0.00 + + def feed(self, byte_str): + for c in byte_str: + for coding_sm in self.coding_sm: + if not coding_sm or not coding_sm.active: + continue + coding_state = coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + coding_sm.active = False + self.active_sm_count -= 1 + if self.active_sm_count <= 0: + self._state = ProbingState.NOT_ME + return self.state + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + self._detected_charset = coding_sm.get_coding_state_machine() + self._detected_language = coding_sm.language + return self.state + + return self.state diff --git a/venv/lib/python3.7/site-packages/chardet/escsm.py b/venv/lib/python3.7/site-packages/chardet/escsm.py new file mode 100644 index 0000000..0069523 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/escsm.py @@ -0,0 +1,246 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +HZ_CLS = ( +1,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,0,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,4,0,5,2,0, # 78 - 7f +1,1,1,1,1,1,1,1, # 80 - 87 +1,1,1,1,1,1,1,1, # 88 - 8f +1,1,1,1,1,1,1,1, # 90 - 97 +1,1,1,1,1,1,1,1, # 98 - 9f +1,1,1,1,1,1,1,1, # a0 - a7 +1,1,1,1,1,1,1,1, # a8 - af +1,1,1,1,1,1,1,1, # b0 - b7 +1,1,1,1,1,1,1,1, # b8 - bf +1,1,1,1,1,1,1,1, # c0 - c7 +1,1,1,1,1,1,1,1, # c8 - cf +1,1,1,1,1,1,1,1, # d0 - d7 +1,1,1,1,1,1,1,1, # d8 - df +1,1,1,1,1,1,1,1, # e0 - e7 +1,1,1,1,1,1,1,1, # e8 - ef +1,1,1,1,1,1,1,1, # f0 - f7 +1,1,1,1,1,1,1,1, # f8 - ff +) + +HZ_ST = ( +MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 + 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f + 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 + 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f +) + +HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +HZ_SM_MODEL = {'class_table': HZ_CLS, + 'class_factor': 6, + 'state_table': HZ_ST, + 'char_len_table': HZ_CHAR_LEN_TABLE, + 'name': "HZ-GB-2312", + 'language': 'Chinese'} + +ISO2022CN_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,3,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,4,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022CN_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 + 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f +) + +ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, + 'class_factor': 9, + 'state_table': ISO2022CN_ST, + 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, + 'name': "ISO-2022-CN", + 'language': 'Chinese'} + +ISO2022JP_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,2,2, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,7,0,0,0, # 20 - 27 +3,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +6,0,4,0,8,0,0,0, # 40 - 47 +0,9,5,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022JP_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 +) + +ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, + 'class_factor': 10, + 'state_table': ISO2022JP_ST, + 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, + 'name': "ISO-2022-JP", + 'language': 'Japanese'} + +ISO2022KR_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,3,0,0,0, # 20 - 27 +0,4,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,5,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022KR_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 +) + +ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, + 'class_factor': 6, + 'state_table': ISO2022KR_ST, + 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, + 'name': "ISO-2022-KR", + 'language': 'Korean'} + + diff --git a/venv/lib/python3.7/site-packages/chardet/eucjpprober.py b/venv/lib/python3.7/site-packages/chardet/eucjpprober.py new file mode 100644 index 0000000..20ce8f7 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/eucjpprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState, MachineState +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCJPDistributionAnalysis +from .jpcntx import EUCJPContextAnalysis +from .mbcssm import EUCJP_SM_MODEL + + +class EUCJPProber(MultiByteCharSetProber): + def __init__(self): + super(EUCJPProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) + self.distribution_analyzer = EUCJPDistributionAnalysis() + self.context_analyzer = EUCJPContextAnalysis() + self.reset() + + def reset(self): + super(EUCJPProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return "EUC-JP" + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char, char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/venv/lib/python3.7/site-packages/chardet/euckrfreq.py b/venv/lib/python3.7/site-packages/chardet/euckrfreq.py new file mode 100644 index 0000000..b68078c --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/euckrfreq.py @@ -0,0 +1,195 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology + +# 128 --> 0.79 +# 256 --> 0.92 +# 512 --> 0.986 +# 1024 --> 0.99944 +# 2048 --> 0.99999 +# +# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 +# Random Distribution Ration = 512 / (2350-512) = 0.279. +# +# Typical Distribution Ratio + +EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 + +EUCKR_TABLE_SIZE = 2352 + +# Char to FreqOrder table , +EUCKR_CHAR_TO_FREQ_ORDER = ( + 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, +1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, +1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, + 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, + 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, + 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, +1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, + 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, + 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, +1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, +1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, +1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, +1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, +1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, + 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, +1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, +1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, +1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, +1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, + 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, +1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, + 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, + 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, +1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, + 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, +1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, + 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, + 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, +1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, +1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, +1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, +1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, + 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, +1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, + 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, + 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, +1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, +1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, +1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, +1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, +1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, +1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, + 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, + 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, + 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, +1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, + 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, +1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, + 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, + 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, +2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, + 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, + 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, +2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, +2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, +2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, + 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, + 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, +2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, + 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, +1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, +2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, +1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, +2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, +2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, +1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, + 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, +2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, +2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, + 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, + 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, +2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, +1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, +2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, +2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, +2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, +2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, +2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, +2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, +1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, +2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, +2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, +2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, +2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, +2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, +1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, +1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, +2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, +1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, +2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, +1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, + 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, +2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, + 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, +2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, + 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, +2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, +2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, + 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, +2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, +1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, + 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, +1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, +2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, +1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, +2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, + 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, +2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, +1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, +2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, +1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, +2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, +1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, + 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, +2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, +2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, + 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, + 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, +1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, +1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, + 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, +2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, +2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, + 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, + 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, + 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, +2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, + 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, + 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, +2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, +2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, + 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, +2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, +1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, + 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, +2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, +2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, +2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, + 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, + 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, + 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, +2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, +2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, +2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, +1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, +2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, + 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 +) + diff --git a/venv/lib/python3.7/site-packages/chardet/euckrprober.py b/venv/lib/python3.7/site-packages/chardet/euckrprober.py new file mode 100644 index 0000000..345a060 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/euckrprober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCKRDistributionAnalysis +from .mbcssm import EUCKR_SM_MODEL + + +class EUCKRProber(MultiByteCharSetProber): + def __init__(self): + super(EUCKRProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-KR" + + @property + def language(self): + return "Korean" diff --git a/venv/lib/python3.7/site-packages/chardet/euctwfreq.py b/venv/lib/python3.7/site-packages/chardet/euctwfreq.py new file mode 100644 index 0000000..ed7a995 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/euctwfreq.py @@ -0,0 +1,387 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# EUCTW frequency table +# Converted from big5 work +# by Taiwan's Mandarin Promotion Council +# + +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +# Char to FreqOrder table , +EUCTW_TABLE_SIZE = 5376 + +EUCTW_CHAR_TO_FREQ_ORDER = ( + 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 +3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 +1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 + 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 +3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 +4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 +7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 + 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 + 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 + 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 +2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 +1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 +3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 + 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 +3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 +2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 + 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 +3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 +1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 +7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 + 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 +7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 +1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 + 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 + 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 +3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 +3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 + 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 +2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 +2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 + 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 + 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 +3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 +1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 +1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 +1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 +2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 + 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 +4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 +1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 +7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 +2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 + 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 + 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 + 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 + 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 +7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 + 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 +1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 + 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 + 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 +7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 +1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 + 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 +3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 +4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 +3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 + 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 + 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 +1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 +4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 +3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 +3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 +2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 +7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 +3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 +7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 +1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 +2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 +1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 + 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 +1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 +4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 +3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 + 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 + 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 + 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 +2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 +7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 +1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 +2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 +1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 +1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 +7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 +7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 +7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 +3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 +4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 +1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 +7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 +2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 +7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 +3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 +3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 +7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 +2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 +7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 + 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 +4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 +2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 +7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 +3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 +2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 +2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 + 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 +2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 +1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 +1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 +2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 +1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 +7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 +7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 +2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 +4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 +1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 +7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 + 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 +4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 + 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 +2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 + 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 +1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 +1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 + 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 +3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 +3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 +1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 +3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 +7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 +7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 +1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 +2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 +1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 +3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 +2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 +3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 +2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 +4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 +4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 +3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 + 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 +3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 + 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 +3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 +3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 +3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 +1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 +7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 + 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 +7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 +1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 + 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 +4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 +3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 + 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 +2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 +2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 +3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 +1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 +4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 +2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 +1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 +1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 +2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 +3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 +1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 +7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 +1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 +4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 +1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 + 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 +1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 +3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 +3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 +2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 +1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 +4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 + 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 +7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 +2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 +3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 +4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 + 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 +7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 +7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 +1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 +4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 +3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 +2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 +3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 +3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 +2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 +1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 +4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 +3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 +3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 +2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 +4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 +7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 +3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 +2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 +3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 +1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 +2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 +3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 +4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 +2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 +2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 +7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 +1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 +2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 +1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 +3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 +4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 +2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 +3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 +3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 +2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 +4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 +2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 +3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 +4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 +7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 +3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 + 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 +1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 +4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 +1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 +4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 +7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 + 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 +7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 +2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 +1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 +1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 +3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 + 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 + 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 + 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 +3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 +2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 + 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 +7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 +1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 +3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 +7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 +1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 +7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 +4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 +1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 +2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 +2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 +4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 + 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 + 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 +3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 +3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 +1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 +2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 +7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 +1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 +1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 +3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 + 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 +1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 +4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 +7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 +2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 +3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 + 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 +1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 +2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 +2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 +7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 +7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 +7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 +2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 +2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 +1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 +4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 +3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 +3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 +4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 +4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 +2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 +2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 +7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 +4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 +7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 +2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 +1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 +3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 +4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 +2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 + 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 +2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 +1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 +2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 +2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 +4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 +7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 +1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 +3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 +7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 +1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 +8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 +2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 +8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 +2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 +2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 +8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 +8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 +8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 + 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 +8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 +4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 +3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 +8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 +1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 +8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 + 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 +1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 + 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 +4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 +1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 +4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 +1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 + 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 +3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 +4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 +8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 + 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 +3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 + 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 +2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 +) + diff --git a/venv/lib/python3.7/site-packages/chardet/euctwprober.py b/venv/lib/python3.7/site-packages/chardet/euctwprober.py new file mode 100644 index 0000000..35669cc --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/euctwprober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCTWDistributionAnalysis +from .mbcssm import EUCTW_SM_MODEL + +class EUCTWProber(MultiByteCharSetProber): + def __init__(self): + super(EUCTWProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) + self.distribution_analyzer = EUCTWDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-TW" + + @property + def language(self): + return "Taiwan" diff --git a/venv/lib/python3.7/site-packages/chardet/gb2312freq.py b/venv/lib/python3.7/site-packages/chardet/gb2312freq.py new file mode 100644 index 0000000..697837b --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/gb2312freq.py @@ -0,0 +1,283 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# GB2312 most frequently used character table +# +# Char to FreqOrder table , from hz6763 + +# 512 --> 0.79 -- 0.79 +# 1024 --> 0.92 -- 0.13 +# 2048 --> 0.98 -- 0.06 +# 6768 --> 1.00 -- 0.02 +# +# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 +# Random Distribution Ration = 512 / (3755 - 512) = 0.157 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR + +GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 + +GB2312_TABLE_SIZE = 3760 + +GB2312_CHAR_TO_FREQ_ORDER = ( +1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, +2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, +2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, + 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, +1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, +1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, + 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, +1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, +2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, +3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, + 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, +1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, + 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, +2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, + 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, +2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, +1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, +3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, + 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, +1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, + 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, +2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, +1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, +3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, +1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, +2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, +1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, + 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, +3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, +3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, + 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, +3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, + 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, +1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, +3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, +2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, +1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, + 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, +1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, +4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, + 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, +3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, +3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, + 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, +1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, +2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, +1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, +1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, + 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, +3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, +3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, +4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, + 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, +3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, +1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, +1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, +4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, + 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, + 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, +3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, +1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, + 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, +1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, +2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, + 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, + 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, + 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, +3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, +4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, +3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, + 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, +2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, +2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, +2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, + 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, +2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, + 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, + 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, + 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, +3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, +2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, +2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, +1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, + 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, +2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, + 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, + 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, +1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, +1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, + 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, + 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, +1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, +2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, +3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, +2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, +2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, +2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, +3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, +1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, +1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, +2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, +1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, +3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, +1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, +1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, +3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, + 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, +2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, +1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, +4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, +1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, +1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, +3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, +1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, + 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, + 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, +1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, + 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, +1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, +1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, + 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, +3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, +4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, +3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, +2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, +2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, +1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, +3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, +2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, +1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, +1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, + 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, +2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, +2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, +3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, +4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, +3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, + 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, +3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, +2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, +1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, + 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, + 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, +3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, +4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, +2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, +1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, +1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, + 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, +1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, +3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, + 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, + 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, +1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, + 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, +1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, + 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, +2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, + 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, +2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, +2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, +1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, +1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, +2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, + 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, +1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, +1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, +2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, +2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, +3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, +1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, +4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, + 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, + 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, +3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, +1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, + 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, +3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, +1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, +4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, +1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, +2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, +1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, + 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, +1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, +3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, + 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, +2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, + 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, +1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, +1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, +1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, +3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, +2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, +3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, +3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, +3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, + 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, +2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, + 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, +2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, + 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, +1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, + 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, + 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, +1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, +3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, +3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, +1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, +1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, +3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, +2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, +2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, +1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, +3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, + 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, +4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, +1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, +2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, +3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, +3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, +1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, + 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, + 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, +2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, + 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, +1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, + 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, +1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, +1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, +1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, +1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, +1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, + 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, + 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 +) + diff --git a/venv/lib/python3.7/site-packages/chardet/gb2312prober.py b/venv/lib/python3.7/site-packages/chardet/gb2312prober.py new file mode 100644 index 0000000..8446d2d --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/gb2312prober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import GB2312DistributionAnalysis +from .mbcssm import GB2312_SM_MODEL + +class GB2312Prober(MultiByteCharSetProber): + def __init__(self): + super(GB2312Prober, self).__init__() + self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) + self.distribution_analyzer = GB2312DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "GB2312" + + @property + def language(self): + return "Chinese" diff --git a/venv/lib/python3.7/site-packages/chardet/hebrewprober.py b/venv/lib/python3.7/site-packages/chardet/hebrewprober.py new file mode 100644 index 0000000..b0e1bf4 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/hebrewprober.py @@ -0,0 +1,292 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Shy Shalom +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +# This prober doesn't actually recognize a language or a charset. +# It is a helper prober for the use of the Hebrew model probers + +### General ideas of the Hebrew charset recognition ### +# +# Four main charsets exist in Hebrew: +# "ISO-8859-8" - Visual Hebrew +# "windows-1255" - Logical Hebrew +# "ISO-8859-8-I" - Logical Hebrew +# "x-mac-hebrew" - ?? Logical Hebrew ?? +# +# Both "ISO" charsets use a completely identical set of code points, whereas +# "windows-1255" and "x-mac-hebrew" are two different proper supersets of +# these code points. windows-1255 defines additional characters in the range +# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific +# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. +# x-mac-hebrew defines similar additional code points but with a different +# mapping. +# +# As far as an average Hebrew text with no diacritics is concerned, all four +# charsets are identical with respect to code points. Meaning that for the +# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters +# (including final letters). +# +# The dominant difference between these charsets is their directionality. +# "Visual" directionality means that the text is ordered as if the renderer is +# not aware of a BIDI rendering algorithm. The renderer sees the text and +# draws it from left to right. The text itself when ordered naturally is read +# backwards. A buffer of Visual Hebrew generally looks like so: +# "[last word of first line spelled backwards] [whole line ordered backwards +# and spelled backwards] [first word of first line spelled backwards] +# [end of line] [last word of second line] ... etc' " +# adding punctuation marks, numbers and English text to visual text is +# naturally also "visual" and from left to right. +# +# "Logical" directionality means the text is ordered "naturally" according to +# the order it is read. It is the responsibility of the renderer to display +# the text from right to left. A BIDI algorithm is used to place general +# punctuation marks, numbers and English text in the text. +# +# Texts in x-mac-hebrew are almost impossible to find on the Internet. From +# what little evidence I could find, it seems that its general directionality +# is Logical. +# +# To sum up all of the above, the Hebrew probing mechanism knows about two +# charsets: +# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are +# backwards while line order is natural. For charset recognition purposes +# the line order is unimportant (In fact, for this implementation, even +# word order is unimportant). +# Logical Hebrew - "windows-1255" - normal, naturally ordered text. +# +# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be +# specifically identified. +# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew +# that contain special punctuation marks or diacritics is displayed with +# some unconverted characters showing as question marks. This problem might +# be corrected using another model prober for x-mac-hebrew. Due to the fact +# that x-mac-hebrew texts are so rare, writing another model prober isn't +# worth the effort and performance hit. +# +#### The Prober #### +# +# The prober is divided between two SBCharSetProbers and a HebrewProber, +# all of which are managed, created, fed data, inquired and deleted by the +# SBCSGroupProber. The two SBCharSetProbers identify that the text is in +# fact some kind of Hebrew, Logical or Visual. The final decision about which +# one is it is made by the HebrewProber by combining final-letter scores +# with the scores of the two SBCharSetProbers to produce a final answer. +# +# The SBCSGroupProber is responsible for stripping the original text of HTML +# tags, English characters, numbers, low-ASCII punctuation characters, spaces +# and new lines. It reduces any sequence of such characters to a single space. +# The buffer fed to each prober in the SBCS group prober is pure text in +# high-ASCII. +# The two SBCharSetProbers (model probers) share the same language model: +# Win1255Model. +# The first SBCharSetProber uses the model normally as any other +# SBCharSetProber does, to recognize windows-1255, upon which this model was +# built. The second SBCharSetProber is told to make the pair-of-letter +# lookup in the language model backwards. This in practice exactly simulates +# a visual Hebrew model using the windows-1255 logical Hebrew model. +# +# The HebrewProber is not using any language model. All it does is look for +# final-letter evidence suggesting the text is either logical Hebrew or visual +# Hebrew. Disjointed from the model probers, the results of the HebrewProber +# alone are meaningless. HebrewProber always returns 0.00 as confidence +# since it never identifies a charset by itself. Instead, the pointer to the +# HebrewProber is passed to the model probers as a helper "Name Prober". +# When the Group prober receives a positive identification from any prober, +# it asks for the name of the charset identified. If the prober queried is a +# Hebrew model prober, the model prober forwards the call to the +# HebrewProber to make the final decision. In the HebrewProber, the +# decision is made according to the final-letters scores maintained and Both +# model probers scores. The answer is returned in the form of the name of the +# charset identified, either "windows-1255" or "ISO-8859-8". + +class HebrewProber(CharSetProber): + # windows-1255 / ISO-8859-8 code points of interest + FINAL_KAF = 0xea + NORMAL_KAF = 0xeb + FINAL_MEM = 0xed + NORMAL_MEM = 0xee + FINAL_NUN = 0xef + NORMAL_NUN = 0xf0 + FINAL_PE = 0xf3 + NORMAL_PE = 0xf4 + FINAL_TSADI = 0xf5 + NORMAL_TSADI = 0xf6 + + # Minimum Visual vs Logical final letter score difference. + # If the difference is below this, don't rely solely on the final letter score + # distance. + MIN_FINAL_CHAR_DISTANCE = 5 + + # Minimum Visual vs Logical model score difference. + # If the difference is below this, don't rely at all on the model score + # distance. + MIN_MODEL_DISTANCE = 0.01 + + VISUAL_HEBREW_NAME = "ISO-8859-8" + LOGICAL_HEBREW_NAME = "windows-1255" + + def __init__(self): + super(HebrewProber, self).__init__() + self._final_char_logical_score = None + self._final_char_visual_score = None + self._prev = None + self._before_prev = None + self._logical_prober = None + self._visual_prober = None + self.reset() + + def reset(self): + self._final_char_logical_score = 0 + self._final_char_visual_score = 0 + # The two last characters seen in the previous buffer, + # mPrev and mBeforePrev are initialized to space in order to simulate + # a word delimiter at the beginning of the data + self._prev = ' ' + self._before_prev = ' ' + # These probers are owned by the group prober. + + def set_model_probers(self, logicalProber, visualProber): + self._logical_prober = logicalProber + self._visual_prober = visualProber + + def is_final(self, c): + return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, + self.FINAL_PE, self.FINAL_TSADI] + + def is_non_final(self, c): + # The normal Tsadi is not a good Non-Final letter due to words like + # 'lechotet' (to chat) containing an apostrophe after the tsadi. This + # apostrophe is converted to a space in FilterWithoutEnglishLetters + # causing the Non-Final tsadi to appear at an end of a word even + # though this is not the case in the original text. + # The letters Pe and Kaf rarely display a related behavior of not being + # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' + # for example legally end with a Non-Final Pe or Kaf. However, the + # benefit of these letters as Non-Final letters outweighs the damage + # since these words are quite rare. + return c in [self.NORMAL_KAF, self.NORMAL_MEM, + self.NORMAL_NUN, self.NORMAL_PE] + + def feed(self, byte_str): + # Final letter analysis for logical-visual decision. + # Look for evidence that the received buffer is either logical Hebrew + # or visual Hebrew. + # The following cases are checked: + # 1) A word longer than 1 letter, ending with a final letter. This is + # an indication that the text is laid out "naturally" since the + # final letter really appears at the end. +1 for logical score. + # 2) A word longer than 1 letter, ending with a Non-Final letter. In + # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, + # should not end with the Non-Final form of that letter. Exceptions + # to this rule are mentioned above in isNonFinal(). This is an + # indication that the text is laid out backwards. +1 for visual + # score + # 3) A word longer than 1 letter, starting with a final letter. Final + # letters should not appear at the beginning of a word. This is an + # indication that the text is laid out backwards. +1 for visual + # score. + # + # The visual score and logical score are accumulated throughout the + # text and are finally checked against each other in GetCharSetName(). + # No checking for final letters in the middle of words is done since + # that case is not an indication for either Logical or Visual text. + # + # We automatically filter out all 7-bit characters (replace them with + # spaces) so the word boundary detection works properly. [MAP] + + if self.state == ProbingState.NOT_ME: + # Both model probers say it's not them. No reason to continue. + return ProbingState.NOT_ME + + byte_str = self.filter_high_byte_only(byte_str) + + for cur in byte_str: + if cur == ' ': + # We stand on a space - a word just ended + if self._before_prev != ' ': + # next-to-last char was not a space so self._prev is not a + # 1 letter word + if self.is_final(self._prev): + # case (1) [-2:not space][-1:final letter][cur:space] + self._final_char_logical_score += 1 + elif self.is_non_final(self._prev): + # case (2) [-2:not space][-1:Non-Final letter][ + # cur:space] + self._final_char_visual_score += 1 + else: + # Not standing on a space + if ((self._before_prev == ' ') and + (self.is_final(self._prev)) and (cur != ' ')): + # case (3) [-2:space][-1:final letter][cur:not space] + self._final_char_visual_score += 1 + self._before_prev = self._prev + self._prev = cur + + # Forever detecting, till the end or until both model probers return + # ProbingState.NOT_ME (handled above) + return ProbingState.DETECTING + + @property + def charset_name(self): + # Make the decision: is it Logical or Visual? + # If the final letter score distance is dominant enough, rely on it. + finalsub = self._final_char_logical_score - self._final_char_visual_score + if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # It's not dominant enough, try to rely on the model scores instead. + modelsub = (self._logical_prober.get_confidence() + - self._visual_prober.get_confidence()) + if modelsub > self.MIN_MODEL_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if modelsub < -self.MIN_MODEL_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # Still no good, back to final letter distance, maybe it'll save the + # day. + if finalsub < 0.0: + return self.VISUAL_HEBREW_NAME + + # (finalsub > 0 - Logical) or (don't know what to do) default to + # Logical. + return self.LOGICAL_HEBREW_NAME + + @property + def language(self): + return 'Hebrew' + + @property + def state(self): + # Remain active as long as any of the model probers are active. + if (self._logical_prober.state == ProbingState.NOT_ME) and \ + (self._visual_prober.state == ProbingState.NOT_ME): + return ProbingState.NOT_ME + return ProbingState.DETECTING diff --git a/venv/lib/python3.7/site-packages/chardet/jisfreq.py b/venv/lib/python3.7/site-packages/chardet/jisfreq.py new file mode 100644 index 0000000..83fc082 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/jisfreq.py @@ -0,0 +1,325 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology +# +# Japanese frequency table, applied to both S-JIS and EUC-JP +# They are sorted in order. + +# 128 --> 0.77094 +# 256 --> 0.85710 +# 512 --> 0.92635 +# 1024 --> 0.97130 +# 2048 --> 0.99431 +# +# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 +# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 +# +# Typical Distribution Ratio, 25% of IDR + +JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 + +# Char to FreqOrder table , +JIS_TABLE_SIZE = 4368 + +JIS_CHAR_TO_FREQ_ORDER = ( + 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 +3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 +1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 +2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 +2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 +5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 +1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 +5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 +5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 +5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 +5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 +5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 +5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 +1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 +1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 +1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 +2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 +3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 +3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 + 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 + 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 +1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 + 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 +5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 + 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 + 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 + 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 + 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 + 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 +5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 +5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 +5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 +4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 +5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 +5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 +5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 +5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 +5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 +5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 +5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 +5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 +5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 +3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 +5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 +5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 +5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 +5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 +5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 +5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 +5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 +5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 +5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 +5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 +5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 +5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 +5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 +5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 +5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 +5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 +5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 +5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 +5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 +5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 +5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 +5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 +5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 +5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 +5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 +5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 +5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 +5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 +5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 +5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 +5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 +5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 +5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 +5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 +5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 +5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 +5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 +5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 +6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 +6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 +6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 +6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 +6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 +6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 +6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 +6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 +4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 + 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 + 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 +1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 +1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 + 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 +3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 +3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 + 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 +3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 +3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 + 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 +2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 + 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 +3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 +1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 + 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 +1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 + 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 +2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 +2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 +2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 +2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 +1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 +1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 +1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 +1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 +2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 +1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 +2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 +1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 +1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 +1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 +1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 +1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 +1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 + 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 + 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 +1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 +2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 +2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 +2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 +3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 +3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 + 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 +3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 +1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 + 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 +2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 +1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 + 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 +3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 +4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 +2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 +1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 +2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 +1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 + 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 + 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 +1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 +2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 +2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 +2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 +3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 +1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 +2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 + 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 + 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 + 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 +1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 +2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 + 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 +1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 +1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 + 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 +1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 +1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 +1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 + 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 +2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 + 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 +2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 +3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 +2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 +1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 +6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 +1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 +2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 +1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 + 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 + 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 +3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 +3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 +1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 +1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 +1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 +1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 + 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 + 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 +2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 + 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 +3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 +2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 + 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 +1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 +2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 + 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 +1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 + 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 +4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 +2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 +1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 + 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 +1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 +2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 + 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 +6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 +1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 +1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 +2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 +3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 + 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 +3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 +1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 + 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 +1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 + 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 +3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 + 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 +2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 + 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 +4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 +2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 +1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 +1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 +1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 + 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 +1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 +3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 +1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 +3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 + 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 + 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 + 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 +2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 +1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 + 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 +1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 + 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 +1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 + 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 + 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 + 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 +1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 +1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 +2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 +4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 + 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 +1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 + 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 +1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 +3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 +1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 +2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 +2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 +1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 +1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 +2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 + 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 +2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 +1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 +1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 +1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 +1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 +3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 +2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 +2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 + 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 +3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 +3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 +1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 +2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 +1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 +2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 +) + + diff --git a/venv/lib/python3.7/site-packages/chardet/jpcntx.py b/venv/lib/python3.7/site-packages/chardet/jpcntx.py new file mode 100644 index 0000000..20044e4 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/jpcntx.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +# This is hiragana 2-char sequence table, the number in each cell represents its frequency category +jp2CharContext = ( +(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), +(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), +(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), +(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), +(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), +(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), +(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), +(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), +(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), +(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), +(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), +(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), +(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), +(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), +(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), +(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), +(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), +(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), +(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), +(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), +(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), +(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), +(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), +(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), +(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), +(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), +(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), +(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), +(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), +(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), +(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), +(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), +(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), +(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), +(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), +(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), +(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), +(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), +(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), +(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), +(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), +(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), +(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), +(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), +(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), +(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), +(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), +(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), +(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), +(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), +(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), +(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), +(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), +(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), +(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), +(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), +(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), +(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), +(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), +(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), +(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), +(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), +(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), +(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), +(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), +(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), +(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), +(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), +(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), +(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), +(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), +(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), +(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), +(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), +) + +class JapaneseContextAnalysis(object): + NUM_OF_CATEGORY = 6 + DONT_KNOW = -1 + ENOUGH_REL_THRESHOLD = 100 + MAX_REL_THRESHOLD = 1000 + MINIMUM_DATA_THRESHOLD = 4 + + def __init__(self): + self._total_rel = None + self._rel_sample = None + self._need_to_skip_char_num = None + self._last_char_order = None + self._done = None + self.reset() + + def reset(self): + self._total_rel = 0 # total sequence received + # category counters, each integer counts sequence in its category + self._rel_sample = [0] * self.NUM_OF_CATEGORY + # if last byte in current buffer is not the last byte of a character, + # we need to know how many bytes to skip in next buffer + self._need_to_skip_char_num = 0 + self._last_char_order = -1 # The order of previous char + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + + def feed(self, byte_str, num_bytes): + if self._done: + return + + # The buffer we got is byte oriented, and a character may span in more than one + # buffers. In case the last one or two byte in last buffer is not + # complete, we record how many byte needed to complete that character + # and skip these bytes here. We can choose to record those bytes as + # well and analyse the character once it is complete, but since a + # character will not make much difference, by simply skipping + # this character will simply our logic and improve performance. + i = self._need_to_skip_char_num + while i < num_bytes: + order, char_len = self.get_order(byte_str[i:i + 2]) + i += char_len + if i > num_bytes: + self._need_to_skip_char_num = i - num_bytes + self._last_char_order = -1 + else: + if (order != -1) and (self._last_char_order != -1): + self._total_rel += 1 + if self._total_rel > self.MAX_REL_THRESHOLD: + self._done = True + break + self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 + self._last_char_order = order + + def got_enough_data(self): + return self._total_rel > self.ENOUGH_REL_THRESHOLD + + def get_confidence(self): + # This is just one way to calculate confidence. It works well for me. + if self._total_rel > self.MINIMUM_DATA_THRESHOLD: + return (self._total_rel - self._rel_sample[0]) / self._total_rel + else: + return self.DONT_KNOW + + def get_order(self, byte_str): + return -1, 1 + +class SJISContextAnalysis(JapaneseContextAnalysis): + def __init__(self): + super(SJISContextAnalysis, self).__init__() + self._charset_name = "SHIFT_JIS" + + @property + def charset_name(self): + return self._charset_name + + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): + char_len = 2 + if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): + self._charset_name = "CP932" + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 202) and (0x9F <= second_char <= 0xF1): + return second_char - 0x9F, char_len + + return -1, char_len + +class EUCJPContextAnalysis(JapaneseContextAnalysis): + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): + char_len = 2 + elif first_char == 0x8F: + char_len = 3 + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): + return second_char - 0xA1, char_len + + return -1, char_len + + diff --git a/venv/lib/python3.7/site-packages/chardet/langbulgarianmodel.py b/venv/lib/python3.7/site-packages/chardet/langbulgarianmodel.py new file mode 100644 index 0000000..2aa4fb2 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/langbulgarianmodel.py @@ -0,0 +1,228 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +# this table is modified base on win1251BulgarianCharToOrderMap, so +# only number <64 is sure valid + +Latin5_BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 +210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 + 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 + 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 +) + +win1251BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 +221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 + 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 + 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 96.9392% +# first 1024 sequences:3.0618% +# rest sequences: 0.2992% +# negative sequences: 0.0020% +BulgarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, +3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, +0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, +0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, +0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, +0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, +0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, +2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, +3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, +1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, +3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, +1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, +2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, +2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, +3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, +1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, +2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, +2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, +1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, +2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, +2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, +2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, +1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, +2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, +1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, +3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, +1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, +3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, +1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, +2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, +1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, +2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, +1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, +2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, +1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, +2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, +1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, +0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, +1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, +1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, +1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, +0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, +1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, +1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +) + +Latin5BulgarianModel = { + 'char_to_order_map': Latin5_BulgarianCharToOrderMap, + 'precedence_matrix': BulgarianLangModel, + 'typical_positive_ratio': 0.969392, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-5", + 'language': 'Bulgairan', +} + +Win1251BulgarianModel = { + 'char_to_order_map': win1251BulgarianCharToOrderMap, + 'precedence_matrix': BulgarianLangModel, + 'typical_positive_ratio': 0.969392, + 'keep_english_letter': False, + 'charset_name': "windows-1251", + 'language': 'Bulgarian', +} diff --git a/venv/lib/python3.7/site-packages/chardet/langcyrillicmodel.py b/venv/lib/python3.7/site-packages/chardet/langcyrillicmodel.py new file mode 100644 index 0000000..e5f9a1f --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/langcyrillicmodel.py @@ -0,0 +1,333 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# KOI8-R language model +# Character Mapping Table: +KOI8R_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 +223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 +238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 + 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 + 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 + 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 + 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 +) + +win1251_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +) + +latin5_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +macCyrillic_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, +) + +IBM855_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, +206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, + 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, +220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, +230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, + 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, + 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, +250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, +) + +IBM866_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 97.6601% +# first 1024 sequences: 2.3389% +# rest sequences: 0.1237% +# negative sequences: 0.0009% +RussianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, +1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, +1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, +2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, +1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, +3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, +1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, +2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, +1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, +1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, +1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, +1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, +3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, +1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, +2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, +1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, +2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, +1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, +1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, +1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, +3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, +3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, +1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, +1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, +0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, +1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, +1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, +0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, +1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, +2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, +1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, +1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, +2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, +1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, +1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, +1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, +0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, +0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, +0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, +2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, +0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +) + +Koi8rModel = { + 'char_to_order_map': KOI8R_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "KOI8-R", + 'language': 'Russian', +} + +Win1251CyrillicModel = { + 'char_to_order_map': win1251_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "windows-1251", + 'language': 'Russian', +} + +Latin5CyrillicModel = { + 'char_to_order_map': latin5_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-5", + 'language': 'Russian', +} + +MacCyrillicModel = { + 'char_to_order_map': macCyrillic_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "MacCyrillic", + 'language': 'Russian', +} + +Ibm866Model = { + 'char_to_order_map': IBM866_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "IBM866", + 'language': 'Russian', +} + +Ibm855Model = { + 'char_to_order_map': IBM855_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "IBM855", + 'language': 'Russian', +} diff --git a/venv/lib/python3.7/site-packages/chardet/langgreekmodel.py b/venv/lib/python3.7/site-packages/chardet/langgreekmodel.py new file mode 100644 index 0000000..5332221 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/langgreekmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin7_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +win1253_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.2851% +# first 1024 sequences:1.7001% +# rest sequences: 0.0359% +# negative sequences: 0.0148% +GreekLangModel = ( +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, +2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, +2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, +2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, +0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, +3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, +2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, +0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, +0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, +0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, +0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, +0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, +0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, +0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, +0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, +0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, +0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, +0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, +0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, +0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, +0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, +0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, +0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, +0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, +0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +Latin7GreekModel = { + 'char_to_order_map': Latin7_char_to_order_map, + 'precedence_matrix': GreekLangModel, + 'typical_positive_ratio': 0.982851, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-7", + 'language': 'Greek', +} + +Win1253GreekModel = { + 'char_to_order_map': win1253_char_to_order_map, + 'precedence_matrix': GreekLangModel, + 'typical_positive_ratio': 0.982851, + 'keep_english_letter': False, + 'charset_name': "windows-1253", + 'language': 'Greek', +} diff --git a/venv/lib/python3.7/site-packages/chardet/langhebrewmodel.py b/venv/lib/python3.7/site-packages/chardet/langhebrewmodel.py new file mode 100644 index 0000000..58f4c87 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/langhebrewmodel.py @@ -0,0 +1,200 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Simon Montagu +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Shoshannah Forbes - original C code (?) +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Windows-1255 language model +# Character Mapping Table: +WIN1255_CHAR_TO_ORDER_MAP = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 + 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 +253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 + 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 +124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, +215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, + 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, +106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, + 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, +238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, + 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, + 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.4004% +# first 1024 sequences: 1.5981% +# rest sequences: 0.087% +# negative sequences: 0.0015% +HEBREW_LANG_MODEL = ( +0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, +3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, +1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, +1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, +1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, +1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, +0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, +0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, +0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, +0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, +0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, +0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, +0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, +0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, +0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, +1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, +0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, +0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, +0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, +0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, +0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, +1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, +1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, +2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, +0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, +0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, +) + +Win1255HebrewModel = { + 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP, + 'precedence_matrix': HEBREW_LANG_MODEL, + 'typical_positive_ratio': 0.984004, + 'keep_english_letter': False, + 'charset_name': "windows-1255", + 'language': 'Hebrew', +} diff --git a/venv/lib/python3.7/site-packages/chardet/langhungarianmodel.py b/venv/lib/python3.7/site-packages/chardet/langhungarianmodel.py new file mode 100644 index 0000000..bb7c095 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/langhungarianmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin2_HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, +175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, + 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, + 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, +245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +win1250HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, +177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, + 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, + 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, +245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 94.7368% +# first 1024 sequences:5.2623% +# rest sequences: 0.8894% +# negative sequences: 0.0009% +HungarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, +3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, +0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, +1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, +1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, +3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, +2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, +2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, +2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, +2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, +1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, +1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, +3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, +1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, +1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, +2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, +2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, +2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, +3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, +1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, +1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, +1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, +2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, +1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, +2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, +2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, +1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, +1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, +0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, +2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, +2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, +1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, +1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, +2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, +2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, +2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, +1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, +0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +) + +Latin2HungarianModel = { + 'char_to_order_map': Latin2_HungarianCharToOrderMap, + 'precedence_matrix': HungarianLangModel, + 'typical_positive_ratio': 0.947368, + 'keep_english_letter': True, + 'charset_name': "ISO-8859-2", + 'language': 'Hungarian', +} + +Win1250HungarianModel = { + 'char_to_order_map': win1250HungarianCharToOrderMap, + 'precedence_matrix': HungarianLangModel, + 'typical_positive_ratio': 0.947368, + 'keep_english_letter': True, + 'charset_name': "windows-1250", + 'language': 'Hungarian', +} diff --git a/venv/lib/python3.7/site-packages/chardet/langthaimodel.py b/venv/lib/python3.7/site-packages/chardet/langthaimodel.py new file mode 100644 index 0000000..15f94c2 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/langthaimodel.py @@ -0,0 +1,199 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# The following result for thai was collected from a limited sample (1M). + +# Character Mapping Table: +TIS620CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 +188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 +253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 + 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 +209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, +223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, +236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, + 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, + 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, + 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, + 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, + 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 92.6386% +# first 1024 sequences:7.3177% +# rest sequences: 1.0230% +# negative sequences: 0.0436% +ThaiLangModel = ( +0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, +0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, +3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, +0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, +3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, +3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, +3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, +3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, +2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, +3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, +1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, +3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, +1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, +0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, +0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, +2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, +0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, +3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, +2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, +3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, +2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, +3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, +3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, +3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, +3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, +1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, +0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, +0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, +3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, +3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, +1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, +3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, +3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, +0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, +0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, +1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, +1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, +3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, +0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, +3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, +0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, +0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, +0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, +0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, +0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, +0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, +0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, +0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, +0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, +3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, +2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, +0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, +3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, +1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, +1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +TIS620ThaiModel = { + 'char_to_order_map': TIS620CharToOrderMap, + 'precedence_matrix': ThaiLangModel, + 'typical_positive_ratio': 0.926386, + 'keep_english_letter': False, + 'charset_name': "TIS-620", + 'language': 'Thai', +} diff --git a/venv/lib/python3.7/site-packages/chardet/langturkishmodel.py b/venv/lib/python3.7/site-packages/chardet/langturkishmodel.py new file mode 100644 index 0000000..a427a45 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/langturkishmodel.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Özgür Baskın - Turkish Language Model +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin5_TurkishCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42, + 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255, +255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15, + 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255, +180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165, +164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106, +150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136, + 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125, +124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119, + 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86, + 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96, + 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107, +) + +TurkishLangModel = ( +3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3, +3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, +3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3, +3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1, +3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3, +3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1, +3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2, +2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1, +3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2, +2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0, +1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2, +3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1, +3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2, +2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1, +3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2, +2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0, +3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2, +3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, +3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3, +0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1, +3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1, +0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0, +3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1, +3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3, +2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3, +2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, +3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0, +0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0, +1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2, +3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1, +1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0, +3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0, +0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0, +3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1, +0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1, +1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0, +1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3, +2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1, +2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0, +0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, +3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0, +0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0, +3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1, +1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2, +0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1, +3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1, +0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0, +3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0, +3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0, +1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2, +2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1, +0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0, +3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0, +0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0, +0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0, +3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0, +0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0, +0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0, +3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0, +0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1, +3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0, +0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1, +0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0, +3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, +0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0, +0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0, +3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0, +0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0, +0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0, +0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0, +3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0, +0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, +0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0, +0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0, +0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0, +1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0, +0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1, +0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0, +3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0, +0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0, +2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0, +2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0, +0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1, +0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +Latin5TurkishModel = { + 'char_to_order_map': Latin5_TurkishCharToOrderMap, + 'precedence_matrix': TurkishLangModel, + 'typical_positive_ratio': 0.970290, + 'keep_english_letter': True, + 'charset_name': "ISO-8859-9", + 'language': 'Turkish', +} diff --git a/venv/lib/python3.7/site-packages/chardet/latin1prober.py b/venv/lib/python3.7/site-packages/chardet/latin1prober.py new file mode 100644 index 0000000..7d1e8c2 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/latin1prober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +FREQ_CAT_NUM = 4 + +UDF = 0 # undefined +OTH = 1 # other +ASC = 2 # ascii capital letter +ASS = 3 # ascii small letter +ACV = 4 # accent capital vowel +ACO = 5 # accent capital other +ASV = 6 # accent small vowel +ASO = 7 # accent small other +CLASS_NUM = 8 # total classes + +Latin1_CharToClass = ( + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F + OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 + ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F + OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 + ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F + OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 + OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F + UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 + OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF + ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 + ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF + ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 + ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF + ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 + ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF + ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 + ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF +) + +# 0 : illegal +# 1 : very unlikely +# 2 : normal +# 3 : very likely +Latin1ClassModel = ( +# UDF OTH ASC ASS ACV ACO ASV ASO + 0, 0, 0, 0, 0, 0, 0, 0, # UDF + 0, 3, 3, 3, 3, 3, 3, 3, # OTH + 0, 3, 3, 3, 3, 3, 3, 3, # ASC + 0, 3, 3, 3, 1, 1, 3, 3, # ASS + 0, 3, 3, 3, 1, 2, 1, 2, # ACV + 0, 3, 3, 3, 3, 3, 3, 3, # ACO + 0, 3, 1, 3, 1, 1, 1, 3, # ASV + 0, 3, 1, 3, 1, 1, 3, 3, # ASO +) + + +class Latin1Prober(CharSetProber): + def __init__(self): + super(Latin1Prober, self).__init__() + self._last_char_class = None + self._freq_counter = None + self.reset() + + def reset(self): + self._last_char_class = OTH + self._freq_counter = [0] * FREQ_CAT_NUM + CharSetProber.reset(self) + + @property + def charset_name(self): + return "ISO-8859-1" + + @property + def language(self): + return "" + + def feed(self, byte_str): + byte_str = self.filter_with_english_letters(byte_str) + for c in byte_str: + char_class = Latin1_CharToClass[c] + freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) + + char_class] + if freq == 0: + self._state = ProbingState.NOT_ME + break + self._freq_counter[freq] += 1 + self._last_char_class = char_class + + return self.state + + def get_confidence(self): + if self.state == ProbingState.NOT_ME: + return 0.01 + + total = sum(self._freq_counter) + if total < 0.01: + confidence = 0.0 + else: + confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) + / total) + if confidence < 0.0: + confidence = 0.0 + # lower the confidence of latin1 so that other more accurate + # detector can take priority. + confidence = confidence * 0.73 + return confidence diff --git a/venv/lib/python3.7/site-packages/chardet/mbcharsetprober.py b/venv/lib/python3.7/site-packages/chardet/mbcharsetprober.py new file mode 100644 index 0000000..6256ecf --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/mbcharsetprober.py @@ -0,0 +1,91 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState + + +class MultiByteCharSetProber(CharSetProber): + """ + MultiByteCharSetProber + """ + + def __init__(self, lang_filter=None): + super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) + self.distribution_analyzer = None + self.coding_sm = None + self._last_char = [0, 0] + + def reset(self): + super(MultiByteCharSetProber, self).reset() + if self.coding_sm: + self.coding_sm.reset() + if self.distribution_analyzer: + self.distribution_analyzer.reset() + self._last_char = [0, 0] + + @property + def charset_name(self): + raise NotImplementedError + + @property + def language(self): + raise NotImplementedError + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.distribution_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + return self.distribution_analyzer.get_confidence() diff --git a/venv/lib/python3.7/site-packages/chardet/mbcsgroupprober.py b/venv/lib/python3.7/site-packages/chardet/mbcsgroupprober.py new file mode 100644 index 0000000..530abe7 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/mbcsgroupprober.py @@ -0,0 +1,54 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .utf8prober import UTF8Prober +from .sjisprober import SJISProber +from .eucjpprober import EUCJPProber +from .gb2312prober import GB2312Prober +from .euckrprober import EUCKRProber +from .cp949prober import CP949Prober +from .big5prober import Big5Prober +from .euctwprober import EUCTWProber + + +class MBCSGroupProber(CharSetGroupProber): + def __init__(self, lang_filter=None): + super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) + self.probers = [ + UTF8Prober(), + SJISProber(), + EUCJPProber(), + GB2312Prober(), + EUCKRProber(), + CP949Prober(), + Big5Prober(), + EUCTWProber() + ] + self.reset() diff --git a/venv/lib/python3.7/site-packages/chardet/mbcssm.py b/venv/lib/python3.7/site-packages/chardet/mbcssm.py new file mode 100644 index 0000000..8360d0f --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/mbcssm.py @@ -0,0 +1,572 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +# BIG5 + +BIG5_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 4,4,4,4,4,4,4,4, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 4,3,3,3,3,3,3,3, # a0 - a7 + 3,3,3,3,3,3,3,3, # a8 - af + 3,3,3,3,3,3,3,3, # b0 - b7 + 3,3,3,3,3,3,3,3, # b8 - bf + 3,3,3,3,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +BIG5_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 +) + +BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) + +BIG5_SM_MODEL = {'class_table': BIG5_CLS, + 'class_factor': 5, + 'state_table': BIG5_ST, + 'char_len_table': BIG5_CHAR_LEN_TABLE, + 'name': 'Big5'} + +# CP949 + +CP949_CLS = ( + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f + 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f + 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f + 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f + 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f + 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f + 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f + 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f + 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af + 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf + 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff +) + +CP949_ST = ( +#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 +) + +CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) + +CP949_SM_MODEL = {'class_table': CP949_CLS, + 'class_factor': 10, + 'state_table': CP949_ST, + 'char_len_table': CP949_CHAR_LEN_TABLE, + 'name': 'CP949'} + +# EUC-JP + +EUCJP_CLS = ( + 4,4,4,4,4,4,4,4, # 00 - 07 + 4,4,4,4,4,4,5,5, # 08 - 0f + 4,4,4,4,4,4,4,4, # 10 - 17 + 4,4,4,5,4,4,4,4, # 18 - 1f + 4,4,4,4,4,4,4,4, # 20 - 27 + 4,4,4,4,4,4,4,4, # 28 - 2f + 4,4,4,4,4,4,4,4, # 30 - 37 + 4,4,4,4,4,4,4,4, # 38 - 3f + 4,4,4,4,4,4,4,4, # 40 - 47 + 4,4,4,4,4,4,4,4, # 48 - 4f + 4,4,4,4,4,4,4,4, # 50 - 57 + 4,4,4,4,4,4,4,4, # 58 - 5f + 4,4,4,4,4,4,4,4, # 60 - 67 + 4,4,4,4,4,4,4,4, # 68 - 6f + 4,4,4,4,4,4,4,4, # 70 - 77 + 4,4,4,4,4,4,4,4, # 78 - 7f + 5,5,5,5,5,5,5,5, # 80 - 87 + 5,5,5,5,5,5,1,3, # 88 - 8f + 5,5,5,5,5,5,5,5, # 90 - 97 + 5,5,5,5,5,5,5,5, # 98 - 9f + 5,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,0,5 # f8 - ff +) + +EUCJP_ST = ( + 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f + 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 +) + +EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) + +EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, + 'class_factor': 6, + 'state_table': EUCJP_ST, + 'char_len_table': EUCJP_CHAR_LEN_TABLE, + 'name': 'EUC-JP'} + +# EUC-KR + +EUCKR_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,3,3,3, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,3,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 2,2,2,2,2,2,2,2, # e0 - e7 + 2,2,2,2,2,2,2,2, # e8 - ef + 2,2,2,2,2,2,2,2, # f0 - f7 + 2,2,2,2,2,2,2,0 # f8 - ff +) + +EUCKR_ST = ( + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f +) + +EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) + +EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, + 'class_factor': 4, + 'state_table': EUCKR_ST, + 'char_len_table': EUCKR_CHAR_LEN_TABLE, + 'name': 'EUC-KR'} + +# EUC-TW + +EUCTW_CLS = ( + 2,2,2,2,2,2,2,2, # 00 - 07 + 2,2,2,2,2,2,0,0, # 08 - 0f + 2,2,2,2,2,2,2,2, # 10 - 17 + 2,2,2,0,2,2,2,2, # 18 - 1f + 2,2,2,2,2,2,2,2, # 20 - 27 + 2,2,2,2,2,2,2,2, # 28 - 2f + 2,2,2,2,2,2,2,2, # 30 - 37 + 2,2,2,2,2,2,2,2, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,2, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,6,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,3,4,4,4,4,4,4, # a0 - a7 + 5,5,1,1,1,1,1,1, # a8 - af + 1,1,1,1,1,1,1,1, # b0 - b7 + 1,1,1,1,1,1,1,1, # b8 - bf + 1,1,3,1,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +EUCTW_ST = ( + MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 + MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 + MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) + +EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, + 'class_factor': 7, + 'state_table': EUCTW_ST, + 'char_len_table': EUCTW_CHAR_LEN_TABLE, + 'name': 'x-euc-tw'} + +# GB2312 + +GB2312_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 3,3,3,3,3,3,3,3, # 30 - 37 + 3,3,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,4, # 78 - 7f + 5,6,6,6,6,6,6,6, # 80 - 87 + 6,6,6,6,6,6,6,6, # 88 - 8f + 6,6,6,6,6,6,6,6, # 90 - 97 + 6,6,6,6,6,6,6,6, # 98 - 9f + 6,6,6,6,6,6,6,6, # a0 - a7 + 6,6,6,6,6,6,6,6, # a8 - af + 6,6,6,6,6,6,6,6, # b0 - b7 + 6,6,6,6,6,6,6,6, # b8 - bf + 6,6,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 6,6,6,6,6,6,6,6, # e0 - e7 + 6,6,6,6,6,6,6,6, # e8 - ef + 6,6,6,6,6,6,6,6, # f0 - f7 + 6,6,6,6,6,6,6,0 # f8 - ff +) + +GB2312_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 + 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +# To be accurate, the length of class 6 can be either 2 or 4. +# But it is not necessary to discriminate between the two since +# it is used for frequency analysis only, and we are validating +# each code range there as well. So it is safe to set it to be +# 2 here. +GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) + +GB2312_SM_MODEL = {'class_table': GB2312_CLS, + 'class_factor': 7, + 'state_table': GB2312_ST, + 'char_len_table': GB2312_CHAR_LEN_TABLE, + 'name': 'GB2312'} + +# Shift_JIS + +SJIS_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 3,3,3,3,3,2,2,3, # 80 - 87 + 3,3,3,3,3,3,3,3, # 88 - 8f + 3,3,3,3,3,3,3,3, # 90 - 97 + 3,3,3,3,3,3,3,3, # 98 - 9f + #0xa0 is illegal in sjis encoding, but some pages does + #contain such byte. We need to be more error forgiven. + 2,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,4,4,4, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,0,0,0) # f8 - ff + + +SJIS_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 +) + +SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) + +SJIS_SM_MODEL = {'class_table': SJIS_CLS, + 'class_factor': 6, + 'state_table': SJIS_ST, + 'char_len_table': SJIS_CHAR_LEN_TABLE, + 'name': 'Shift_JIS'} + +# UCS2-BE + +UCS2BE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2BE_ST = ( + 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 + 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f + 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 + 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f + 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) + +UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, + 'class_factor': 6, + 'state_table': UCS2BE_ST, + 'char_len_table': UCS2BE_CHAR_LEN_TABLE, + 'name': 'UTF-16BE'} + +# UCS2-LE + +UCS2LE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2LE_ST = ( + 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f + 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 + 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) + +UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, + 'class_factor': 6, + 'state_table': UCS2LE_ST, + 'char_len_table': UCS2LE_CHAR_LEN_TABLE, + 'name': 'UTF-16LE'} + +# UTF-8 + +UTF8_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 2,2,2,2,3,3,3,3, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 5,5,5,5,5,5,5,5, # a0 - a7 + 5,5,5,5,5,5,5,5, # a8 - af + 5,5,5,5,5,5,5,5, # b0 - b7 + 5,5,5,5,5,5,5,5, # b8 - bf + 0,0,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 7,8,8,8,8,8,8,8, # e0 - e7 + 8,8,8,8,8,9,8,8, # e8 - ef + 10,11,11,11,11,11,11,11, # f0 - f7 + 12,13,13,13,14,15,0,0 # f8 - ff +) + +UTF8_ST = ( + MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 + 9, 11, 8, 7, 6, 5, 4, 3,#08-0f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f + MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f + MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f + MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f + MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af + MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf +) + +UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) + +UTF8_SM_MODEL = {'class_table': UTF8_CLS, + 'class_factor': 16, + 'state_table': UTF8_ST, + 'char_len_table': UTF8_CHAR_LEN_TABLE, + 'name': 'UTF-8'} diff --git a/venv/lib/python3.7/site-packages/chardet/sbcharsetprober.py b/venv/lib/python3.7/site-packages/chardet/sbcharsetprober.py new file mode 100644 index 0000000..0adb51d --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/sbcharsetprober.py @@ -0,0 +1,132 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import CharacterCategory, ProbingState, SequenceLikelihood + + +class SingleByteCharSetProber(CharSetProber): + SAMPLE_SIZE = 64 + SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 + POSITIVE_SHORTCUT_THRESHOLD = 0.95 + NEGATIVE_SHORTCUT_THRESHOLD = 0.05 + + def __init__(self, model, reversed=False, name_prober=None): + super(SingleByteCharSetProber, self).__init__() + self._model = model + # TRUE if we need to reverse every pair in the model lookup + self._reversed = reversed + # Optional auxiliary prober for name decision + self._name_prober = name_prober + self._last_order = None + self._seq_counters = None + self._total_seqs = None + self._total_char = None + self._freq_char = None + self.reset() + + def reset(self): + super(SingleByteCharSetProber, self).reset() + # char order of last character + self._last_order = 255 + self._seq_counters = [0] * SequenceLikelihood.get_num_categories() + self._total_seqs = 0 + self._total_char = 0 + # characters that fall in our sampling range + self._freq_char = 0 + + @property + def charset_name(self): + if self._name_prober: + return self._name_prober.charset_name + else: + return self._model['charset_name'] + + @property + def language(self): + if self._name_prober: + return self._name_prober.language + else: + return self._model.get('language') + + def feed(self, byte_str): + if not self._model['keep_english_letter']: + byte_str = self.filter_international_words(byte_str) + if not byte_str: + return self.state + char_to_order_map = self._model['char_to_order_map'] + for i, c in enumerate(byte_str): + # XXX: Order is in range 1-64, so one would think we want 0-63 here, + # but that leads to 27 more test failures than before. + order = char_to_order_map[c] + # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but + # CharacterCategory.SYMBOL is actually 253, so we use CONTROL + # to make it closer to the original intent. The only difference + # is whether or not we count digits and control characters for + # _total_char purposes. + if order < CharacterCategory.CONTROL: + self._total_char += 1 + if order < self.SAMPLE_SIZE: + self._freq_char += 1 + if self._last_order < self.SAMPLE_SIZE: + self._total_seqs += 1 + if not self._reversed: + i = (self._last_order * self.SAMPLE_SIZE) + order + model = self._model['precedence_matrix'][i] + else: # reverse the order of the letters in the lookup + i = (order * self.SAMPLE_SIZE) + self._last_order + model = self._model['precedence_matrix'][i] + self._seq_counters[model] += 1 + self._last_order = order + + charset_name = self._model['charset_name'] + if self.state == ProbingState.DETECTING: + if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: + confidence = self.get_confidence() + if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, we have a winner', + charset_name, confidence) + self._state = ProbingState.FOUND_IT + elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, below negative ' + 'shortcut threshhold %s', charset_name, + confidence, + self.NEGATIVE_SHORTCUT_THRESHOLD) + self._state = ProbingState.NOT_ME + + return self.state + + def get_confidence(self): + r = 0.01 + if self._total_seqs > 0: + r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / + self._total_seqs / self._model['typical_positive_ratio']) + r = r * self._freq_char / self._total_char + if r >= 1.0: + r = 0.99 + return r diff --git a/venv/lib/python3.7/site-packages/chardet/sbcsgroupprober.py b/venv/lib/python3.7/site-packages/chardet/sbcsgroupprober.py new file mode 100644 index 0000000..98e95dc --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/sbcsgroupprober.py @@ -0,0 +1,73 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .sbcharsetprober import SingleByteCharSetProber +from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, + Latin5CyrillicModel, MacCyrillicModel, + Ibm866Model, Ibm855Model) +from .langgreekmodel import Latin7GreekModel, Win1253GreekModel +from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel +# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel +from .langthaimodel import TIS620ThaiModel +from .langhebrewmodel import Win1255HebrewModel +from .hebrewprober import HebrewProber +from .langturkishmodel import Latin5TurkishModel + + +class SBCSGroupProber(CharSetGroupProber): + def __init__(self): + super(SBCSGroupProber, self).__init__() + self.probers = [ + SingleByteCharSetProber(Win1251CyrillicModel), + SingleByteCharSetProber(Koi8rModel), + SingleByteCharSetProber(Latin5CyrillicModel), + SingleByteCharSetProber(MacCyrillicModel), + SingleByteCharSetProber(Ibm866Model), + SingleByteCharSetProber(Ibm855Model), + SingleByteCharSetProber(Latin7GreekModel), + SingleByteCharSetProber(Win1253GreekModel), + SingleByteCharSetProber(Latin5BulgarianModel), + SingleByteCharSetProber(Win1251BulgarianModel), + # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) + # after we retrain model. + # SingleByteCharSetProber(Latin2HungarianModel), + # SingleByteCharSetProber(Win1250HungarianModel), + SingleByteCharSetProber(TIS620ThaiModel), + SingleByteCharSetProber(Latin5TurkishModel), + ] + hebrew_prober = HebrewProber() + logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, + False, hebrew_prober) + visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True, + hebrew_prober) + hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) + self.probers.extend([hebrew_prober, logical_hebrew_prober, + visual_hebrew_prober]) + + self.reset() diff --git a/venv/lib/python3.7/site-packages/chardet/sjisprober.py b/venv/lib/python3.7/site-packages/chardet/sjisprober.py new file mode 100644 index 0000000..9e29623 --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/sjisprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import SJISDistributionAnalysis +from .jpcntx import SJISContextAnalysis +from .mbcssm import SJIS_SM_MODEL +from .enums import ProbingState, MachineState + + +class SJISProber(MultiByteCharSetProber): + def __init__(self): + super(SJISProber, self).__init__() + self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) + self.distribution_analyzer = SJISDistributionAnalysis() + self.context_analyzer = SJISContextAnalysis() + self.reset() + + def reset(self): + super(SJISProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return self.context_analyzer.charset_name + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char[2 - char_len:], + char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 + - char_len], char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/venv/lib/python3.7/site-packages/chardet/universaldetector.py b/venv/lib/python3.7/site-packages/chardet/universaldetector.py new file mode 100644 index 0000000..7b4e92d --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/universaldetector.py @@ -0,0 +1,286 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### +""" +Module containing the UniversalDetector detector class, which is the primary +class a user of ``chardet`` should use. + +:author: Mark Pilgrim (initial port to Python) +:author: Shy Shalom (original C code) +:author: Dan Blanchard (major refactoring for 3.0) +:author: Ian Cordasco +""" + + +import codecs +import logging +import re + +from .charsetgroupprober import CharSetGroupProber +from .enums import InputState, LanguageFilter, ProbingState +from .escprober import EscCharSetProber +from .latin1prober import Latin1Prober +from .mbcsgroupprober import MBCSGroupProber +from .sbcsgroupprober import SBCSGroupProber + + +class UniversalDetector(object): + """ + The ``UniversalDetector`` class underlies the ``chardet.detect`` function + and coordinates all of the different charset probers. + + To get a ``dict`` containing an encoding and its confidence, you can simply + run: + + .. code:: + + u = UniversalDetector() + u.feed(some_bytes) + u.close() + detected = u.result + + """ + + MINIMUM_THRESHOLD = 0.20 + HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') + ESC_DETECTOR = re.compile(b'(\033|~{)') + WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') + ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', + 'iso-8859-2': 'Windows-1250', + 'iso-8859-5': 'Windows-1251', + 'iso-8859-6': 'Windows-1256', + 'iso-8859-7': 'Windows-1253', + 'iso-8859-8': 'Windows-1255', + 'iso-8859-9': 'Windows-1254', + 'iso-8859-13': 'Windows-1257'} + + def __init__(self, lang_filter=LanguageFilter.ALL): + self._esc_charset_prober = None + self._charset_probers = [] + self.result = None + self.done = None + self._got_data = None + self._input_state = None + self._last_char = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + self._has_win_bytes = None + self.reset() + + def reset(self): + """ + Reset the UniversalDetector and all of its probers back to their + initial states. This is called by ``__init__``, so you only need to + call this directly in between analyses of different documents. + """ + self.result = {'encoding': None, 'confidence': 0.0, 'language': None} + self.done = False + self._got_data = False + self._has_win_bytes = False + self._input_state = InputState.PURE_ASCII + self._last_char = b'' + if self._esc_charset_prober: + self._esc_charset_prober.reset() + for prober in self._charset_probers: + prober.reset() + + def feed(self, byte_str): + """ + Takes a chunk of a document and feeds it through all of the relevant + charset probers. + + After calling ``feed``, you can check the value of the ``done`` + attribute to see if you need to continue feeding the + ``UniversalDetector`` more data, or if it has made a prediction + (in the ``result`` attribute). + + .. note:: + You should always call ``close`` when you're done feeding in your + document if ``done`` is not already ``True``. + """ + if self.done: + return + + if not len(byte_str): + return + + if not isinstance(byte_str, bytearray): + byte_str = bytearray(byte_str) + + # First check for known BOMs, since these are guaranteed to be correct + if not self._got_data: + # If the data starts with BOM, we know it is UTF + if byte_str.startswith(codecs.BOM_UTF8): + # EF BB BF UTF-8 with BOM + self.result = {'encoding': "UTF-8-SIG", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_UTF32_LE, + codecs.BOM_UTF32_BE)): + # FF FE 00 00 UTF-32, little-endian BOM + # 00 00 FE FF UTF-32, big-endian BOM + self.result = {'encoding': "UTF-32", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\xFE\xFF\x00\x00'): + # FE FF 00 00 UCS-4, unusual octet order BOM (3412) + self.result = {'encoding': "X-ISO-10646-UCS-4-3412", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\x00\x00\xFF\xFE'): + # 00 00 FF FE UCS-4, unusual octet order BOM (2143) + self.result = {'encoding': "X-ISO-10646-UCS-4-2143", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): + # FF FE UTF-16, little endian BOM + # FE FF UTF-16, big endian BOM + self.result = {'encoding': "UTF-16", + 'confidence': 1.0, + 'language': ''} + + self._got_data = True + if self.result['encoding'] is not None: + self.done = True + return + + # If none of those matched and we've only see ASCII so far, check + # for high bytes and escape sequences + if self._input_state == InputState.PURE_ASCII: + if self.HIGH_BYTE_DETECTOR.search(byte_str): + self._input_state = InputState.HIGH_BYTE + elif self._input_state == InputState.PURE_ASCII and \ + self.ESC_DETECTOR.search(self._last_char + byte_str): + self._input_state = InputState.ESC_ASCII + + self._last_char = byte_str[-1:] + + # If we've seen escape sequences, use the EscCharSetProber, which + # uses a simple state machine to check for known escape sequences in + # HZ and ISO-2022 encodings, since those are the only encodings that + # use such sequences. + if self._input_state == InputState.ESC_ASCII: + if not self._esc_charset_prober: + self._esc_charset_prober = EscCharSetProber(self.lang_filter) + if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': + self._esc_charset_prober.charset_name, + 'confidence': + self._esc_charset_prober.get_confidence(), + 'language': + self._esc_charset_prober.language} + self.done = True + # If we've seen high bytes (i.e., those with values greater than 127), + # we need to do more complicated checks using all our multi-byte and + # single-byte probers that are left. The single-byte probers + # use character bigram distributions to determine the encoding, whereas + # the multi-byte probers use a combination of character unigram and + # bigram distributions. + elif self._input_state == InputState.HIGH_BYTE: + if not self._charset_probers: + self._charset_probers = [MBCSGroupProber(self.lang_filter)] + # If we're checking non-CJK encodings, use single-byte prober + if self.lang_filter & LanguageFilter.NON_CJK: + self._charset_probers.append(SBCSGroupProber()) + self._charset_probers.append(Latin1Prober()) + for prober in self._charset_probers: + if prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': prober.charset_name, + 'confidence': prober.get_confidence(), + 'language': prober.language} + self.done = True + break + if self.WIN_BYTE_DETECTOR.search(byte_str): + self._has_win_bytes = True + + def close(self): + """ + Stop analyzing the current document and come up with a final + prediction. + + :returns: The ``result`` attribute, a ``dict`` with the keys + `encoding`, `confidence`, and `language`. + """ + # Don't bother with checks if we're already done + if self.done: + return self.result + self.done = True + + if not self._got_data: + self.logger.debug('no data received!') + + # Default to ASCII if it is all we've seen so far + elif self._input_state == InputState.PURE_ASCII: + self.result = {'encoding': 'ascii', + 'confidence': 1.0, + 'language': ''} + + # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD + elif self._input_state == InputState.HIGH_BYTE: + prober_confidence = None + max_prober_confidence = 0.0 + max_prober = None + for prober in self._charset_probers: + if not prober: + continue + prober_confidence = prober.get_confidence() + if prober_confidence > max_prober_confidence: + max_prober_confidence = prober_confidence + max_prober = prober + if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): + charset_name = max_prober.charset_name + lower_charset_name = max_prober.charset_name.lower() + confidence = max_prober.get_confidence() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith('iso-8859'): + if self._has_win_bytes: + charset_name = self.ISO_WIN_MAP.get(lower_charset_name, + charset_name) + self.result = {'encoding': charset_name, + 'confidence': confidence, + 'language': max_prober.language} + + # Log all prober confidences if none met MINIMUM_THRESHOLD + if self.logger.getEffectiveLevel() == logging.DEBUG: + if self.result['encoding'] is None: + self.logger.debug('no probers hit minimum threshold') + for group_prober in self._charset_probers: + if not group_prober: + continue + if isinstance(group_prober, CharSetGroupProber): + for prober in group_prober.probers: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + else: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + return self.result diff --git a/venv/lib/python3.7/site-packages/chardet/utf8prober.py b/venv/lib/python3.7/site-packages/chardet/utf8prober.py new file mode 100644 index 0000000..6c3196c --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/utf8prober.py @@ -0,0 +1,82 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState +from .codingstatemachine import CodingStateMachine +from .mbcssm import UTF8_SM_MODEL + + + +class UTF8Prober(CharSetProber): + ONE_CHAR_PROB = 0.5 + + def __init__(self): + super(UTF8Prober, self).__init__() + self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) + self._num_mb_chars = None + self.reset() + + def reset(self): + super(UTF8Prober, self).reset() + self.coding_sm.reset() + self._num_mb_chars = 0 + + @property + def charset_name(self): + return "utf-8" + + @property + def language(self): + return "" + + def feed(self, byte_str): + for c in byte_str: + coding_state = self.coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + if self.coding_sm.get_current_charlen() >= 2: + self._num_mb_chars += 1 + + if self.state == ProbingState.DETECTING: + if self.get_confidence() > self.SHORTCUT_THRESHOLD: + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + unlike = 0.99 + if self._num_mb_chars < 6: + unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars + return 1.0 - unlike + else: + return unlike diff --git a/venv/lib/python3.7/site-packages/chardet/version.py b/venv/lib/python3.7/site-packages/chardet/version.py new file mode 100644 index 0000000..bb2a34a --- /dev/null +++ b/venv/lib/python3.7/site-packages/chardet/version.py @@ -0,0 +1,9 @@ +""" +This module exists only to simplify retrieving the version number of chardet +from within setup.py and from chardet subpackages. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + +__version__ = "3.0.4" +VERSION = __version__.split('.') diff --git a/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/INSTALLER b/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/LICENSE b/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/LICENSE new file mode 100644 index 0000000..5e795a6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/LICENSE @@ -0,0 +1,7 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/METADATA b/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/METADATA new file mode 100644 index 0000000..e805cc9 --- /dev/null +++ b/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/METADATA @@ -0,0 +1,259 @@ +Metadata-Version: 2.1 +Name: configparser +Version: 4.0.2 +Summary: Updated configparser from Python 3.7 for Python 2.6+. +Home-page: https://github.com/jaraco/configparser/ +Author: Łukasz Langa +Author-email: lukasz@langa.pl +Maintainer: Jason R. Coombs +Maintainer-email: jaraco@jaraco.com +License: UNKNOWN +Keywords: configparser ini parsing conf cfg configuration file +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Requires-Python: >=2.6 +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=3.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (!=3.7.3,>=3.5) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=1.2) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-black-multipy ; extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/configparser.svg + :target: https://pypi.org/project/configparser + +.. image:: https://img.shields.io/pypi/pyversions/configparser.svg + +.. image:: https://img.shields.io/travis/jaraco/configparser/master.svg + :target: https://travis-ci.org/jaraco/configparser + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/ambv/black + :alt: Code style: Black + +.. .. image:: https://img.shields.io/appveyor/ci/jaraco/configparser/master.svg +.. :target: https://ci.appveyor.com/project/jaraco/configparser/branch/master + +.. image:: https://readthedocs.org/projects/configparser/badge/?version=latest + :target: https://configparser.readthedocs.io/en/latest/?badge=latest + +.. image:: https://tidelift.com/badges/package/pypi/configparser + :target: https://tidelift.com/subscription/pkg/pypi-configparser?utm_source=pypi-configparser&utm_medium=readme + + +The ancient ``ConfigParser`` module available in the standard library 2.x has +seen a major update in Python 3.2. This is a backport of those changes so that +they can be used directly in Python 2.6 - 3.5. + +To use the ``configparser`` backport instead of the built-in version on both +Python 2 and Python 3, simply import it explicitly as a backport:: + + from backports import configparser + +If you'd like to use the backport on Python 2 and the built-in version on +Python 3, use that invocation instead:: + + import configparser + +For detailed documentation consult the vanilla version at +http://docs.python.org/3/library/configparser.html. + +Why you'll love ``configparser`` +-------------------------------- + +Whereas almost completely compatible with its older brother, ``configparser`` +sports a bunch of interesting new features: + +* full mapping protocol access (`more info + `_):: + + >>> parser = ConfigParser() + >>> parser.read_string(""" + [DEFAULT] + location = upper left + visible = yes + editable = no + color = blue + + [main] + title = Main Menu + color = green + + [options] + title = Options + """) + >>> parser['main']['color'] + 'green' + >>> parser['main']['editable'] + 'no' + >>> section = parser['options'] + >>> section['title'] + 'Options' + >>> section['title'] = 'Options (editable: %(editable)s)' + >>> section['title'] + 'Options (editable: no)' + +* there's now one default ``ConfigParser`` class, which basically is the old + ``SafeConfigParser`` with a bunch of tweaks which make it more predictable for + users. Don't need interpolation? Simply use + ``ConfigParser(interpolation=None)``, no need to use a distinct + ``RawConfigParser`` anymore. + +* the parser is highly `customizable upon instantiation + `__ + supporting things like changing option delimiters, comment characters, the + name of the DEFAULT section, the interpolation syntax, etc. + +* you can easily create your own interpolation syntax but there are two powerful + implementations built-in (`more info + `__): + + * the classic ``%(string-like)s`` syntax (called ``BasicInterpolation``) + + * a new ``${buildout:like}`` syntax (called ``ExtendedInterpolation``) + +* fallback values may be specified in getters (`more info + `__):: + + >>> config.get('closet', 'monster', + ... fallback='No such things as monsters') + 'No such things as monsters' + +* ``ConfigParser`` objects can now read data directly `from strings + `__ + and `from dictionaries + `__. + That means importing configuration from JSON or specifying default values for + the whole configuration (multiple sections) is now a single line of code. Same + goes for copying data from another ``ConfigParser`` instance, thanks to its + mapping protocol support. + +* many smaller tweaks, updates and fixes + +A few words about Unicode +------------------------- + +``configparser`` comes from Python 3 and as such it works well with Unicode. +The library is generally cleaned up in terms of internal data storage and +reading/writing files. There are a couple of incompatibilities with the old +``ConfigParser`` due to that. However, the work required to migrate is well +worth it as it shows the issues that would likely come up during migration of +your project to Python 3. + +The design assumes that Unicode strings are used whenever possible [1]_. That +gives you the certainty that what's stored in a configuration object is text. +Once your configuration is read, the rest of your application doesn't have to +deal with encoding issues. All you have is text [2]_. The only two phases when +you should explicitly state encoding is when you either read from an external +source (e.g. a file) or write back. + +Versioning +---------- + +This project uses `semver `_ to +communicate the impact of various releases while periodically syncing +with the upstream implementation in CPython. +`The changelog `_ +serves as a reference indicating which versions incorporate +which upstream functionality. + +Prior to the ``4.0.0`` release, `another scheme +`_ +was used to associate the CPython and backports releases. + +Maintenance +----------- + +This backport was originally authored by Łukasz Langa, the current vanilla +``configparser`` maintainer for CPython and is currently maintained by +Jason R. Coombs: + +* `configparser repository `_ + +* `configparser issue tracker `_ + +Security Contact +---------------- + +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. + +Conversion Process +------------------ + +This section is technical and should bother you only if you are wondering how +this backport is produced. If the implementation details of this backport are +not important for you, feel free to ignore the following content. + +``configparser`` is converted using `python-future +`_. The project takes the following +branching approach: + +* the ``3.x`` branch holds unchanged files synchronized from the upstream + CPython repository. The synchronization is currently done by manually copying + the required files and stating from which CPython changeset they come from. + +* the ``master`` branch holds a version of the ``3.x`` code with some tweaks + that make it independent from libraries and constructions unavailable on 2.x. + Code on this branch still *must* work on the corresponding Python 3.x but + will also work on Python 2.6 and 2.7 (including PyPy). You can check this + running the supplied unit tests with ``tox``. + +The process works like this: + +1. In the ``3.x`` branch, run ``pip-run -- sync-upstream.py``, which + downloads the latest stable release of Python and copies the relevant + files from there into their new locations here and then commits those + changes with a nice reference to the relevant upstream commit hash. + +2. I check for new names in ``__all__`` and update imports in + ``configparser.py`` accordingly. I run the tests on Python 3. Commit. + +3. I merge the new commit to ``master``. I run ``tox``. Commit. + +4. If there are necessary changes, I do them now (on ``master``). Note that + the changes should be written in the syntax subset supported by Python + 2.6. + +5. I run ``tox``. If it works, I update the docs and release the new version. + Otherwise, I go back to point 3. I might use ``pasteurize`` to suggest me + required changes but usually I do them manually to keep resulting code in + a nicer form. + + +Footnotes +--------- + +.. [1] To somewhat ease migration, passing bytestrings is still supported but + they are converted to Unicode for internal storage anyway. This means + that for the vast majority of strings used in configuration files, it + won't matter if you pass them as bytestrings or Unicode. However, if you + pass a bytestring that cannot be converted to Unicode using the naive + ASCII codec, a ``UnicodeDecodeError`` will be raised. This is purposeful + and helps you manage proper encoding for all content you store in + memory, read from various sources and write back. + +.. [2] Life gets much easier when you understand that you basically manage + **text** in your application. You don't care about bytes but about + letters. In that regard the concept of content encoding is meaningless. + The only time when you deal with raw bytes is when you write the data to + a file. Then you have to specify how your text should be encoded. On + the other end, to get meaningful text from a file, the application + reading it has to know which encoding was used during its creation. But + once the bytes are read and properly decoded, all you have is text. This + is especially powerful when you start interacting with multiple data + sources. Even if each of them uses a different encoding, inside your + application data is held in abstract text form. You can program your + business logic without worrying about which data came from which source. + You can freely exchange the data you store between sources. Only + reading/writing files requires encoding your text to bytes. + + diff --git a/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/RECORD b/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/RECORD new file mode 100644 index 0000000..12729cc --- /dev/null +++ b/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/RECORD @@ -0,0 +1,14 @@ +__pycache__/configparser.cpython-37.pyc,, +backports/__init__.py,sha256=elt6uFwbaEv80X8iGWsCJ_w_n_h1X8repgOoNrN0Syg,212 +backports/__pycache__/__init__.cpython-37.pyc,, +backports/configparser/__init__.py,sha256=thhQqB1qWNKf-F3CpZFYsjC8YT-_I_vF0w4JiuQfiWI,56628 +backports/configparser/__pycache__/__init__.cpython-37.pyc,, +backports/configparser/__pycache__/helpers.cpython-37.pyc,, +backports/configparser/helpers.py,sha256=TxT00ldsHvIciQpml1YaoHfdtTl033Km6ywwT-U2nRc,7543 +configparser-4.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +configparser-4.0.2.dist-info/LICENSE,sha256=pV4v_ptEmY5iHVHYwJS-0JrMS1I27nPX3zlaM7o8GP0,1050 +configparser-4.0.2.dist-info/METADATA,sha256=oDqeXQXq8JhFEDHKDOBmbUtXmQ3CiiXB1Mr4UheTZ8Y,10910 +configparser-4.0.2.dist-info/RECORD,, +configparser-4.0.2.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 +configparser-4.0.2.dist-info/top_level.txt,sha256=mIs8gajd7cvEWhVluv4u6ocaHw_TJ9rOrpkZEFv-7Hc,23 +configparser.py,sha256=4VADEswCwzy_RDVgvje3BmZhD6iwo3k4EkUZcgzLD4M,1546 diff --git a/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/WHEEL b/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/WHEEL new file mode 100644 index 0000000..8b701e9 --- /dev/null +++ b/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.6) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/top_level.txt b/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/top_level.txt new file mode 100644 index 0000000..a6cb03a --- /dev/null +++ b/venv/lib/python3.7/site-packages/configparser-4.0.2.dist-info/top_level.txt @@ -0,0 +1,2 @@ +backports +configparser diff --git a/venv/lib/python3.7/site-packages/configparser.py b/venv/lib/python3.7/site-packages/configparser.py new file mode 100644 index 0000000..0a18360 --- /dev/null +++ b/venv/lib/python3.7/site-packages/configparser.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Convenience module importing everything from backports.configparser.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from backports.configparser import ( + RawConfigParser, + ConfigParser, + SafeConfigParser, + SectionProxy, + Interpolation, + BasicInterpolation, + ExtendedInterpolation, + LegacyInterpolation, + NoSectionError, + DuplicateSectionError, + DuplicateOptionError, + NoOptionError, + InterpolationError, + InterpolationMissingOptionError, + InterpolationSyntaxError, + InterpolationDepthError, + ParsingError, + MissingSectionHeaderError, + ConverterMapping, + DEFAULTSECT, + MAX_INTERPOLATION_DEPTH, +) + +from backports.configparser import Error, _UNSET, _default_dict, _ChainMap # noqa: F401 + +__all__ = [ + "NoSectionError", + "DuplicateOptionError", + "DuplicateSectionError", + "NoOptionError", + "InterpolationError", + "InterpolationDepthError", + "InterpolationMissingOptionError", + "InterpolationSyntaxError", + "ParsingError", + "MissingSectionHeaderError", + "ConfigParser", + "SafeConfigParser", + "RawConfigParser", + "Interpolation", + "BasicInterpolation", + "ExtendedInterpolation", + "LegacyInterpolation", + "SectionProxy", + "ConverterMapping", + "DEFAULTSECT", + "MAX_INTERPOLATION_DEPTH", +] + +# NOTE: names missing from __all__ imported anyway for backwards compatibility. diff --git a/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/INSTALLER b/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/LICENSE b/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/LICENSE new file mode 100644 index 0000000..4003396 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/METADATA b/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/METADATA new file mode 100644 index 0000000..7ea276d --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/METADATA @@ -0,0 +1,149 @@ +Metadata-Version: 2.1 +Name: discord.py +Version: 1.2.5 +Summary: A python wrapper for the Discord API +Home-page: https://github.com/Rapptz/discord.py +Author: Rapptz +License: MIT +Project-URL: Documentation, https://discordpy.readthedocs.io/en/latest/ +Project-URL: Issue tracker, https://github.com/Rapptz/discord.py/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities +Requires-Python: >=3.5.3 +Description-Content-Type: text/x-rst +Requires-Dist: aiohttp (<3.6.0,>=3.3.0) +Requires-Dist: websockets (<7.0,>=6.0) +Provides-Extra: docs +Requires-Dist: sphinx (==1.7.4) ; extra == 'docs' +Requires-Dist: sphinxcontrib-asyncio ; extra == 'docs' +Requires-Dist: sphinxcontrib-websupport ; extra == 'docs' +Provides-Extra: voice +Requires-Dist: PyNaCl (==1.3.0) ; extra == 'voice' + +discord.py +========== + +.. image:: https://discordapp.com/api/guilds/336642139381301249/embed.png + :target: https://discord.gg/r3sSKJJ + :alt: Discord server invite +.. image:: https://img.shields.io/pypi/v/discord.py.svg + :target: https://pypi.python.org/pypi/discord.py + :alt: PyPI version info +.. image:: https://img.shields.io/pypi/pyversions/discord.py.svg + :target: https://pypi.python.org/pypi/discord.py + :alt: PyPI supported Python versions + +A modern, easy to use, feature-rich, and async ready API wrapper for Discord written in Python. + +Key Features +------------- + +- Modern Pythonic API using ``async`` and ``await``. +- Proper rate limit handling. +- 100% coverage of the supported Discord API. +- Optimised in both speed and memory. + +Installing +---------- + +**Python 3.5.3 or higher is required** + +To install the library without full voice support, you can just run the following command: + +.. code:: sh + + # Linux/OS X + python3 -m pip install -U discord.py + + # Windows + py -3 -m pip install -U discord.py + +Otherwise to get voice support you should run the following command: + +.. code:: sh + + # Linux/OS X + python3 -m pip install -U discord.py[voice] + + # Windows + py -3 -m pip install -U discord.py[voice] + + +To install the development version, do the following: + +.. code:: sh + + $ git clone https://github.com/Rapptz/discord.py + $ cd discord.py + $ python3 -m pip install -U .[voice] + + +Optional Packages +~~~~~~~~~~~~~~~~~~ + +* PyNaCl (for voice support) + +Please note that on Linux installing voice you must install the following packages via your favourite package manager (e.g. ``apt``, ``yum``, etc) before running the above commands: + +* libffi-dev (or ``libffi-devel`` on some systems) +* python-dev (e.g. ``python3.6-dev`` for Python 3.6) + +Quick Example +-------------- + +.. code:: py + + import discord + + class MyClient(discord.Client): + async def on_ready(self): + print('Logged on as', self.user) + + async def on_message(self, message): + # don't respond to ourselves + if message.author == self.user: + return + + if message.content == 'ping': + await message.channel.send('pong') + + client = MyClient() + client.run('token') + +Bot Example +~~~~~~~~~~~~~ + +.. code:: py + + import discord + from discord.ext import commands + + bot = commands.Bot(command_prefix='>') + + @bot.command() + async def ping(ctx): + await ctx.send('pong') + + bot.run('token') + +You can find more examples in the examples directory. + +Links +------ + +- `Documentation `_ +- `Official Discord Server `_ +- `Discord API `_ + + diff --git a/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/RECORD b/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/RECORD new file mode 100644 index 0000000..1c53fd0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/RECORD @@ -0,0 +1,115 @@ +discord.py-1.2.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +discord.py-1.2.5.dist-info/LICENSE,sha256=aO-NCRCqWZoqyWgZscava6GuYviQuKu9tYsDtayNl40,1078 +discord.py-1.2.5.dist-info/METADATA,sha256=ZEf5qzw-Wb7mBBOdWGJ5UNfCJn6ecpy-415cITo9YS4,4073 +discord.py-1.2.5.dist-info/RECORD,, +discord.py-1.2.5.dist-info/WHEEL,sha256=v8slff5hmCpvciQ3G55d2d1CnOBupjDFJHDE2dUb1Ao,97 +discord.py-1.2.5.dist-info/authors.sh,sha256=voAGwpshKMpc51nQU3Zst1sKCUJvtjhjZCo6MKAp9kU,203 +discord.py-1.2.5.dist-info/top_level.txt,sha256=fJkrNbR-_8ubMBUcDEJBcfkpECrvSEmMrNKgvLlQFoM,8 +discord/__init__.py,sha256=E9o0Mh1OzB5XLTN5djqR2RRVz18vRTjZlUzAou6r0To,1803 +discord/__main__.py,sha256=r-aBB756bSS9ZA_hXOnEp8JWOp59sjQgFHf4P4k42Bk,9974 +discord/__pycache__/__init__.cpython-37.pyc,, +discord/__pycache__/__main__.cpython-37.pyc,, +discord/__pycache__/abc.cpython-37.pyc,, +discord/__pycache__/activity.cpython-37.pyc,, +discord/__pycache__/appinfo.cpython-37.pyc,, +discord/__pycache__/asset.cpython-37.pyc,, +discord/__pycache__/audit_logs.cpython-37.pyc,, +discord/__pycache__/backoff.cpython-37.pyc,, +discord/__pycache__/calls.cpython-37.pyc,, +discord/__pycache__/channel.cpython-37.pyc,, +discord/__pycache__/client.cpython-37.pyc,, +discord/__pycache__/colour.cpython-37.pyc,, +discord/__pycache__/context_managers.cpython-37.pyc,, +discord/__pycache__/embeds.cpython-37.pyc,, +discord/__pycache__/emoji.cpython-37.pyc,, +discord/__pycache__/enums.cpython-37.pyc,, +discord/__pycache__/errors.cpython-37.pyc,, +discord/__pycache__/file.cpython-37.pyc,, +discord/__pycache__/gateway.cpython-37.pyc,, +discord/__pycache__/guild.cpython-37.pyc,, +discord/__pycache__/http.cpython-37.pyc,, +discord/__pycache__/invite.cpython-37.pyc,, +discord/__pycache__/iterators.cpython-37.pyc,, +discord/__pycache__/member.cpython-37.pyc,, +discord/__pycache__/message.cpython-37.pyc,, +discord/__pycache__/mixins.cpython-37.pyc,, +discord/__pycache__/object.cpython-37.pyc,, +discord/__pycache__/opus.cpython-37.pyc,, +discord/__pycache__/permissions.cpython-37.pyc,, +discord/__pycache__/player.cpython-37.pyc,, +discord/__pycache__/raw_models.cpython-37.pyc,, +discord/__pycache__/reaction.cpython-37.pyc,, +discord/__pycache__/relationship.cpython-37.pyc,, +discord/__pycache__/role.cpython-37.pyc,, +discord/__pycache__/shard.cpython-37.pyc,, +discord/__pycache__/state.cpython-37.pyc,, +discord/__pycache__/user.cpython-37.pyc,, +discord/__pycache__/utils.cpython-37.pyc,, +discord/__pycache__/voice_client.cpython-37.pyc,, +discord/__pycache__/webhook.cpython-37.pyc,, +discord/__pycache__/widget.cpython-37.pyc,, +discord/abc.py,sha256=pE_mXbKVpdPeNifcD9TpsnyEfaKlGJAPSfBvudH3BGA,36799 +discord/activity.py,sha256=HdCE_iGfAoas4MVhshthwHKKvR1RzVZ0kQz5qhMmGTQ,18512 +discord/appinfo.py,sha256=t689zhlryO0_KasXBKTX4dzfRCjVPUFXrlIleThoXSQ,2920 +discord/asset.py,sha256=aw6_yNFczHAWj_JZgxkGWwaXHEC014t6gWIewhuUnyw,7453 +discord/audit_logs.py,sha256=d3cBe0htqrp_2IeO97oBcJWHSx_X4-iopqY0wXzlF_8,12503 +discord/backoff.py,sha256=hNx46IfFCcRQCAPTJuyVvNheEMOVvWnM7QhGL1fk2gE,3166 +discord/bin/libopus-0.x64.dll,sha256=zqIypk0pRCwXafYPCDVbDJsWPjNb-Emf8I2YbWsN790,423142 +discord/bin/libopus-0.x86.dll,sha256=3T1ErXk2NKZC13CzwfovFOmdSXtkwpoVRL3j8FykAlM,380885 +discord/calls.py,sha256=kMEqrLncfk2vgW4zsdl6Kk-D4GWo-0rPP6Y45ncGilE,5292 +discord/channel.py,sha256=kzl2clnihjfChUdB_UYBvHz3KqQj157pskZzulwa4fQ,38590 +discord/client.py,sha256=eSdOjxc5HeRdFfOrbDx0RUFXRNJhvu6UBkg7pFvXmAE,42093 +discord/colour.py,sha256=O_JNNvTsYb_XsNH2-7zZba2-77MZCVEDs36_dqAAHSM,7096 +discord/context_managers.py,sha256=Qjl_cC9AA-aB3r4PSUZ8DCb2U8_GsgHmdO5BPJmtBcI,2281 +discord/embeds.py,sha256=l_4Y1i_HeWSu2HDEpOdcGFyTQkjlnUA_vLOBvG_LP4U,16284 +discord/emoji.py,sha256=t_G_QEqzoojFAh_OvwSPKLawBgv6O1DzECQJpnj8NSM,9429 +discord/enums.py,sha256=S82QWL-yZvaZ0VMp7XS31-Vg_QG4t_vsfc0FWn8ST9Q,11634 +discord/errors.py,sha256=PmW4AAvohZ_5Ip8KUcQuYJtGRaFBQWDVPX4hpJAmzCM,5622 +discord/ext/commands/__init__.py,sha256=v-Snowaq6t6MaQZWeZLfdtv_Yb70gWucqovO56ocYHM,444 +discord/ext/commands/__pycache__/__init__.cpython-37.pyc,, +discord/ext/commands/__pycache__/_types.cpython-37.pyc,, +discord/ext/commands/__pycache__/bot.cpython-37.pyc,, +discord/ext/commands/__pycache__/cog.cpython-37.pyc,, +discord/ext/commands/__pycache__/context.cpython-37.pyc,, +discord/ext/commands/__pycache__/converter.cpython-37.pyc,, +discord/ext/commands/__pycache__/cooldowns.cpython-37.pyc,, +discord/ext/commands/__pycache__/core.cpython-37.pyc,, +discord/ext/commands/__pycache__/errors.cpython-37.pyc,, +discord/ext/commands/__pycache__/help.cpython-37.pyc,, +discord/ext/commands/__pycache__/view.cpython-37.pyc,, +discord/ext/commands/_types.py,sha256=kB-k22jMHnDjwXOBqswR-JNZDvIpNbAqZj9cvGOeuFo,1287 +discord/ext/commands/bot.py,sha256=02EVLhH-hAVc73jsUB3BjVH4n7cm5TSQTaQVVaosY6M,32726 +discord/ext/commands/cog.py,sha256=BXl9TOd1ohjBGUs6ydWDmHKFmE0S5QgpvXFdpdPMYHM,14845 +discord/ext/commands/context.py,sha256=xz6-exg8s1_x4FqXEXmQh-FnHhiU1XnA9CYhhfd_Qjs,10707 +discord/ext/commands/converter.py,sha256=YSw7o7X91F_SijJ6wSvxkc9BEZ2KTJzuL8HGKD-4tpg,18754 +discord/ext/commands/cooldowns.py,sha256=fpBZxEKuiVsfpXx8WYQ-dkFD6hw-xwT-iFjgj9zS4GU,5021 +discord/ext/commands/core.py,sha256=-NBRsBql8ZBMI-4ujKvt7qTwN-8I1CMKGntvxXV7hxs,56156 +discord/ext/commands/errors.py,sha256=0F2e1LYbkprlwdw2w3ghlbeTmEPJreLo-7KucxJjlOU,17398 +discord/ext/commands/help.py,sha256=_vPNk5-Mf6PH5XUCzwzOABWvpewQ4qDzJ1RLnURI5mA,45947 +discord/ext/commands/view.py,sha256=4SH8PwGvBYqmIUdUnmBTJHZ3OkBZQyF0cbFeMuTz_oA,5996 +discord/ext/tasks/__init__.py,sha256=x5T90Pb5lP6Qn-RExom9cE9ddKi9KaFf5l6EEO8IiNg,12337 +discord/ext/tasks/__pycache__/__init__.cpython-37.pyc,, +discord/file.py,sha256=iVxLMG84VtyrgEk8K63aETuIWaWU1f7Ae0h0wFcamlU,3842 +discord/gateway.py,sha256=10Ha-XI7v2X6tsyOjpTS223har6-9nypSLr_BZARo2E,25343 +discord/guild.py,sha256=4178du-ac4avFQDJZz-VQsi9fAtzn_7OEkmYTytJHPs,62023 +discord/http.py,sha256=Wuir-So6m24xwKHtjkD6TaPF37P2kNScwBUDCH2tWa8,32948 +discord/invite.py,sha256=GsG76f2zRvt9YNG_gW1R_4DLCnLo1tx2CZ6_NAqy_mo,12581 +discord/iterators.py,sha256=cSqXSAp2Pq8IXPTnkcBx1LGYxX6j8iSbtuPRuHlJONw,20754 +discord/member.py,sha256=mv8aj5K05Qz4MzvvZmm4Rq_yjhJx8CF9xsZARmovjkI,22912 +discord/message.py,sha256=K5CUbHH0QYlNV0z5-rXB-X0LtrvohyY3laFqWd1cJsI,34697 +discord/mixins.py,sha256=G21eW4bJCoZeo54M4_hE3DSN0G-tfS_jEI-xmhqTzTY,1505 +discord/object.py,sha256=Cd8pUMFqa3nkBFABxa9PCJ0J1gJA5Y3j9MjkakYDW7E,2469 +discord/opus.py,sha256=seJYtXMrTukXjI_bLEA7UPGhRM1cir1aT7YTc1o4M6k,8337 +discord/permissions.py,sha256=iRJGdJ7H_m2I7AymakOSPsW-_2p7paeos1Q69Ywyq6I,20521 +discord/player.py,sha256=SR88BWWFekq-jqxE5Z99RuEf-thPS6LwW2uluaUoqTI,10919 +discord/raw_models.py,sha256=Wi0tfqAB07lFHebGoeGfCJnCN6A5z_xM17914XuS0YY,5623 +discord/reaction.py,sha256=NSHnva7xRdL9tiY894JEYfLSgKeN4-Z5Fp-ZRWjl0KU,5773 +discord/relationship.py,sha256=9HS6dlDZFppyyPkaC-j0aHdk_NLa__4AGfqWgZM_q2U,2431 +discord/role.py,sha256=K-pzDj3KPlTf2fLBUW9kanz8xZNH2dFxtSkbtEvW40Y,9096 +discord/shard.py,sha256=yA2mwPX3i3T0u7reRCIVYvJWWsQn_2I3P63Te49e_Zw,13526 +discord/state.py,sha256=pYKHxanAIDCjV9UM5m6NkGR8o6feU0NeIMwHzB1A-co,38351 +discord/user.py,sha256=Uzcu5h1JNzWtjGsKRusKHQG0TVxAYNgUPEw1fS0Ezdk,27081 +discord/utils.py,sha256=PF7mVTcOvY4VMvOX426YearCWVLKZJYFEvAyvOmU8Ew,14715 +discord/voice_client.py,sha256=Kq4XnWFRnvRbuMyXv1ZbGIyC7RHCInJZz0wgE0GFzYU,15593 +discord/webhook.py,sha256=1G1GxEpY_3vV6bkM04N0S1YoPQeeCVPm_Khl5R34rlg,26153 +discord/widget.py,sha256=S1EsaLmLNtwbcK5sQBZkKo1-_jaUmcw6FcKZpCI6Zo0,8022 diff --git a/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/WHEEL b/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/WHEEL new file mode 100644 index 0000000..d4fe38c --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.6) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/authors.sh b/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/authors.sh new file mode 100644 index 0000000..e27c182 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/authors.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +# save as i.e.: git-authors and set the executable flag +git ls-tree -r -z --name-only HEAD -- $1 | xargs -0 -n1 git blame -w \ + --line-porcelain HEAD |grep "^author "|sort|uniq -c|sort -nr diff --git a/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/top_level.txt b/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/top_level.txt new file mode 100644 index 0000000..e46fba2 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord.py-1.2.5.dist-info/top_level.txt @@ -0,0 +1 @@ +discord diff --git a/venv/lib/python3.7/site-packages/discord/__init__.py b/venv/lib/python3.7/site-packages/discord/__init__.py new file mode 100644 index 0000000..69def1a --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/__init__.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- + +""" +Discord API Wrapper +~~~~~~~~~~~~~~~~~~~ + +A basic wrapper for the Discord API. + +:copyright: (c) 2015-2019 Rapptz +:license: MIT, see LICENSE for more details. + +""" + +__title__ = 'discord' +__author__ = 'Rapptz' +__license__ = 'MIT' +__copyright__ = 'Copyright 2015-2019 Rapptz' +__version__ = '1.2.5' + +from collections import namedtuple +import logging + +from .client import Client +from .appinfo import AppInfo +from .user import User, ClientUser, Profile +from .emoji import Emoji, PartialEmoji +from .activity import * +from .channel import * +from .guild import Guild, SystemChannelFlags +from .relationship import Relationship +from .member import Member, VoiceState +from .message import Message, Attachment +from .asset import Asset +from .errors import * +from .calls import CallMessage, GroupCall +from .permissions import Permissions, PermissionOverwrite +from .role import Role +from .file import File +from .colour import Color, Colour +from .invite import Invite, PartialInviteChannel, PartialInviteGuild +from .widget import Widget, WidgetMember, WidgetChannel +from .object import Object +from .reaction import Reaction +from . import utils, opus, abc +from .enums import * +from .embeds import Embed +from .shard import AutoShardedClient +from .player import * +from .webhook import * +from .voice_client import VoiceClient +from .audit_logs import AuditLogChanges, AuditLogEntry, AuditLogDiff +from .raw_models import * + +VersionInfo = namedtuple('VersionInfo', 'major minor micro releaselevel serial') + +version_info = VersionInfo(major=1, minor=2, micro=5, releaselevel='final', serial=0) + +try: + from logging import NullHandler +except ImportError: + class NullHandler(logging.Handler): + def emit(self, record): + pass + +logging.getLogger(__name__).addHandler(NullHandler()) diff --git a/venv/lib/python3.7/site-packages/discord/__main__.py b/venv/lib/python3.7/site-packages/discord/__main__.py new file mode 100644 index 0000000..3831938 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/__main__.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import argparse +import sys +from pathlib import Path + +import discord +import pkg_resources +import aiohttp +import websockets +import platform + +def show_version(): + entries = [] + + entries.append('- Python v{0.major}.{0.minor}.{0.micro}-{0.releaselevel}'.format(sys.version_info)) + version_info = discord.version_info + entries.append('- discord.py v{0.major}.{0.minor}.{0.micro}-{0.releaselevel}'.format(version_info)) + if version_info.releaselevel != 'final': + pkg = pkg_resources.get_distribution('discord.py') + if pkg: + entries.append(' - discord.py pkg_resources: v{0}'.format(pkg.version)) + + entries.append('- aiohttp v{0.__version__}'.format(aiohttp)) + entries.append('- websockets v{0.__version__}'.format(websockets)) + uname = platform.uname() + entries.append('- system info: {0.system} {0.release} {0.version}'.format(uname)) + print('\n'.join(entries)) + +def core(parser, args): + if args.version: + show_version() + +bot_template = """#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from discord.ext import commands +import discord +import config + +class Bot(commands.{base}): + def __init__(self, **kwargs): + super().__init__(command_prefix=commands.when_mentioned_or('{prefix}'), **kwargs) + for cog in config.cogs: + try: + self.load_extension(cog) + except Exception as exc: + print('Could not load extension {{0}} due to {{1.__class__.__name__}}: {{1}}'.format(cog, exc)) + + async def on_ready(self): + print('Logged on as {{0}} (ID: {{0.id}})'.format(self.user)) + + +bot = Bot() + +# write general commands here + +bot.run(config.token) +""" + +gitignore_template = """# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# Our configuration files +config.py +""" + +cog_template = '''# -*- coding: utf-8 -*- + +from discord.ext import commands +import discord + +class {name}(commands.Cog{attrs}): + """The description for {name} goes here.""" + + def __init__(self, bot): + self.bot = bot +{extra} +def setup(bot): + bot.add_cog({name}(bot)) +''' + +cog_extras = ''' + def cog_unload(self): + # clean up logic goes here + pass + + async def cog_check(self, ctx): + # checks that apply to every command in here + return True + + async def bot_check(self, ctx): + # checks that apply to every command to the bot + return True + + async def bot_check_once(self, ctx): + # check that apply to every command but is guaranteed to be called only once + return True + + async def cog_command_error(self, ctx, error): + # error handling to every command in here + pass + + async def cog_before_invoke(self, ctx): + # called before a command is called here + pass + + async def cog_after_invoke(self, ctx): + # called after a command is called here + pass + +''' + + +# certain file names and directory names are forbidden +# see: https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247%28v=vs.85%29.aspx +# although some of this doesn't apply to Linux, we might as well be consistent +_base_table = { + '<': '-', + '>': '-', + ':': '-', + '"': '-', + # '/': '-', these are fine + # '\\': '-', + '|': '-', + '?': '-', + '*': '-', +} + +# NUL (0) and 1-31 are disallowed +_base_table.update((chr(i), None) for i in range(32)) + +translation_table = str.maketrans(_base_table) + +def to_path(parser, name, *, replace_spaces=False): + if isinstance(name, Path): + return name + + if sys.platform == 'win32': + forbidden = ('CON', 'PRN', 'AUX', 'NUL', 'COM1', 'COM2', 'COM3', 'COM4', 'COM5', 'COM6', 'COM7', \ + 'COM8', 'COM9', 'LPT1', 'LPT2', 'LPT3', 'LPT4', 'LPT5', 'LPT6', 'LPT7', 'LPT8', 'LPT9') + if len(name) <= 4 and name.upper() in forbidden: + parser.error('invalid directory name given, use a different one') + + name = name.translate(translation_table) + if replace_spaces: + name = name.replace(' ', '-') + return Path(name) + +def newbot(parser, args): + new_directory = to_path(parser, args.directory) / to_path(parser, args.name) + + # as a note exist_ok for Path is a 3.5+ only feature + # since we already checked above that we're >3.5 + try: + new_directory.mkdir(exist_ok=True, parents=True) + except OSError as exc: + parser.error('could not create our bot directory ({})'.format(exc)) + + cogs = new_directory / 'cogs' + + try: + cogs.mkdir(exist_ok=True) + init = cogs / '__init__.py' + init.touch() + except OSError as exc: + print('warning: could not create cogs directory ({})'.format(exc)) + + try: + with open(str(new_directory / 'config.py'), 'w', encoding='utf-8') as fp: + fp.write('token = "place your token here"\ncogs = []\n') + except OSError as exc: + parser.error('could not create config file ({})'.format(exc)) + + try: + with open(str(new_directory / 'bot.py'), 'w', encoding='utf-8') as fp: + base = 'Bot' if not args.sharded else 'AutoShardedBot' + fp.write(bot_template.format(base=base, prefix=args.prefix)) + except OSError as exc: + parser.error('could not create bot file ({})'.format(exc)) + + if not args.no_git: + try: + with open(str(new_directory / '.gitignore'), 'w', encoding='utf-8') as fp: + fp.write(gitignore_template) + except OSError as exc: + print('warning: could not create .gitignore file ({})'.format(exc)) + + print('successfully made bot at', new_directory) + +def newcog(parser, args): + cog_dir = to_path(parser, args.directory) + try: + cog_dir.mkdir(exist_ok=True) + except OSError as exc: + print('warning: could not create cogs directory ({})'.format(exc)) + + directory = cog_dir / to_path(parser, args.name) + directory = directory.with_suffix('.py') + try: + with open(str(directory), 'w', encoding='utf-8') as fp: + attrs = '' + extra = cog_extras if args.full else '' + if args.class_name: + name = args.class_name + else: + name = str(directory.stem) + if '-' in name: + name = name.replace('-', ' ').title().replace(' ', '') + else: + name = name.title() + + if args.display_name: + attrs += ', name="{}"'.format(args.display_name) + if args.hide_commands: + attrs += ', command_attrs=dict(hidden=True)' + fp.write(cog_template.format(name=name, extra=extra, attrs=attrs)) + except OSError as exc: + parser.error('could not create cog file ({})'.format(exc)) + else: + print('successfully made cog at', directory) + +def add_newbot_args(subparser): + parser = subparser.add_parser('newbot', help='creates a command bot project quickly') + parser.set_defaults(func=newbot) + + parser.add_argument('name', help='the bot project name') + parser.add_argument('directory', help='the directory to place it in (default: .)', nargs='?', default=Path.cwd()) + parser.add_argument('--prefix', help='the bot prefix (default: $)', default='$', metavar='') + parser.add_argument('--sharded', help='whether to use AutoShardedBot', action='store_true') + parser.add_argument('--no-git', help='do not create a .gitignore file', action='store_true', dest='no_git') + +def add_newcog_args(subparser): + parser = subparser.add_parser('newcog', help='creates a new cog template quickly') + parser.set_defaults(func=newcog) + + parser.add_argument('name', help='the cog name') + parser.add_argument('directory', help='the directory to place it in (default: cogs)', nargs='?', default=Path('cogs')) + parser.add_argument('--class-name', help='the class name of the cog (default: )', dest='class_name') + parser.add_argument('--display-name', help='the cog name (default: )') + parser.add_argument('--hide-commands', help='whether to hide all commands in the cog', action='store_true') + parser.add_argument('--full', help='add all special methods as well', action='store_true') + +def parse_args(): + parser = argparse.ArgumentParser(prog='discord', description='Tools for helping with discord.py') + parser.add_argument('-v', '--version', action='store_true', help='shows the library version') + parser.set_defaults(func=core) + + subparser = parser.add_subparsers(dest='subcommand', title='subcommands') + add_newbot_args(subparser) + add_newcog_args(subparser) + return parser, parser.parse_args() + +def main(): + parser, args = parse_args() + args.func(parser, args) + +main() diff --git a/venv/lib/python3.7/site-packages/discord/abc.py b/venv/lib/python3.7/site-packages/discord/abc.py new file mode 100644 index 0000000..1d490cd --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/abc.py @@ -0,0 +1,1042 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import abc +import copy +import asyncio +from collections import namedtuple + +from .iterators import HistoryIterator +from .context_managers import Typing +from .errors import InvalidArgument, ClientException, HTTPException +from .permissions import PermissionOverwrite, Permissions +from .role import Role +from .invite import Invite +from .file import File +from .voice_client import VoiceClient +from . import utils + +class _Undefined: + def __repr__(self): + return 'see-below' + +_undefined = _Undefined() + +class Snowflake(metaclass=abc.ABCMeta): + """An ABC that details the common operations on a Discord model. + + Almost all :ref:`Discord models ` meet this + abstract base class. + + Attributes + ----------- + id: :class:`int` + The model's unique ID. + """ + __slots__ = () + + @property + @abc.abstractmethod + def created_at(self): + """:class:`datetime.datetime`: Returns the model's creation time as a naive datetime in UTC.""" + raise NotImplementedError + + @classmethod + def __subclasshook__(cls, C): + if cls is Snowflake: + mro = C.__mro__ + for attr in ('created_at', 'id'): + for base in mro: + if attr in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + +class User(metaclass=abc.ABCMeta): + """An ABC that details the common operations on a Discord user. + + The following implement this ABC: + + - :class:`~discord.User` + - :class:`~discord.ClientUser` + - :class:`~discord.Member` + + This ABC must also implement :class:`~discord.abc.Snowflake`. + + Attributes + ----------- + name: :class:`str` + The user's username. + discriminator: :class:`str` + The user's discriminator. + avatar: Optional[:class:`str`] + The avatar hash the user has. + bot: :class:`bool` + If the user is a bot account. + """ + __slots__ = () + + @property + @abc.abstractmethod + def display_name(self): + """:class:`str`: Returns the user's display name.""" + raise NotImplementedError + + @property + @abc.abstractmethod + def mention(self): + """:class:`str`: Returns a string that allows you to mention the given user.""" + raise NotImplementedError + + @classmethod + def __subclasshook__(cls, C): + if cls is User: + if Snowflake.__subclasshook__(C) is NotImplemented: + return NotImplemented + + mro = C.__mro__ + for attr in ('display_name', 'mention', 'name', 'avatar', 'discriminator', 'bot'): + for base in mro: + if attr in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + +class PrivateChannel(metaclass=abc.ABCMeta): + """An ABC that details the common operations on a private Discord channel. + + The following implement this ABC: + + - :class:`~discord.DMChannel` + - :class:`~discord.GroupChannel` + + This ABC must also implement :class:`~discord.abc.Snowflake`. + + Attributes + ----------- + me: :class:`~discord.ClientUser` + The user presenting yourself. + """ + __slots__ = () + + @classmethod + def __subclasshook__(cls, C): + if cls is PrivateChannel: + if Snowflake.__subclasshook__(C) is NotImplemented: + return NotImplemented + + mro = C.__mro__ + for base in mro: + if 'me' in base.__dict__: + return True + return NotImplemented + return NotImplemented + +_Overwrites = namedtuple('_Overwrites', 'id allow deny type') + +class GuildChannel: + """An ABC that details the common operations on a Discord guild channel. + + The following implement this ABC: + + - :class:`~discord.TextChannel` + - :class:`~discord.VoiceChannel` + - :class:`~discord.CategoryChannel` + + This ABC must also implement :class:`~discord.abc.Snowflake`. + + Attributes + ----------- + name: :class:`str` + The channel name. + guild: :class:`~discord.Guild` + The guild the channel belongs to. + position: :class:`int` + The position in the channel list. This is a number that starts at 0. + e.g. the top channel is position 0. + """ + __slots__ = () + + def __str__(self): + return self.name + + @property + def _sorting_bucket(self): + raise NotImplementedError + + async def _move(self, position, parent_id=None, lock_permissions=False, *, reason): + if position < 0: + raise InvalidArgument('Channel position cannot be less than 0.') + + http = self._state.http + bucket = self._sorting_bucket + channels = [c for c in self.guild.channels if c._sorting_bucket == bucket] + + if position >= len(channels): + raise InvalidArgument('Channel position cannot be greater than {}'.format(len(channels) - 1)) + + channels.sort(key=lambda c: c.position) + + try: + # remove ourselves from the channel list + channels.remove(self) + except ValueError: + # not there somehow lol + return + else: + # add ourselves at our designated position + channels.insert(position, self) + + payload = [] + for index, c in enumerate(channels): + d = {'id': c.id, 'position': index} + if parent_id is not _undefined and c.id == self.id: + d.update(parent_id=parent_id, lock_permissions=lock_permissions) + payload.append(d) + + await http.bulk_channel_update(self.guild.id, payload, reason=reason) + self.position = position + if parent_id is not _undefined: + self.category_id = int(parent_id) if parent_id else None + + async def _edit(self, options, reason): + try: + parent = options.pop('category') + except KeyError: + parent_id = _undefined + else: + parent_id = parent and parent.id + + try: + options['rate_limit_per_user'] = options.pop('slowmode_delay') + except KeyError: + pass + + lock_permissions = options.pop('sync_permissions', False) + + try: + position = options.pop('position') + except KeyError: + if parent_id is not _undefined: + if lock_permissions: + category = self.guild.get_channel(parent_id) + options['permission_overwrites'] = [c._asdict() for c in category._overwrites] + options['parent_id'] = parent_id + elif lock_permissions and self.category_id is not None: + # if we're syncing permissions on a pre-existing channel category without changing it + # we need to update the permissions to point to the pre-existing category + category = self.guild.get_channel(self.category_id) + options['permission_overwrites'] = [c._asdict() for c in category._overwrites] + else: + await self._move(position, parent_id=parent_id, lock_permissions=lock_permissions, reason=reason) + + if options: + data = await self._state.http.edit_channel(self.id, reason=reason, **options) + self._update(self.guild, data) + + def _fill_overwrites(self, data): + self._overwrites = [] + everyone_index = 0 + everyone_id = self.guild.id + + for index, overridden in enumerate(data.get('permission_overwrites', [])): + overridden_id = int(overridden.pop('id')) + self._overwrites.append(_Overwrites(id=overridden_id, **overridden)) + + if overridden['type'] == 'member': + continue + + if overridden_id == everyone_id: + # the @everyone role is not guaranteed to be the first one + # in the list of permission overwrites, however the permission + # resolution code kind of requires that it is the first one in + # the list since it is special. So we need the index so we can + # swap it to be the first one. + everyone_index = index + + # do the swap + tmp = self._overwrites + if tmp: + tmp[everyone_index], tmp[0] = tmp[0], tmp[everyone_index] + + @property + def changed_roles(self): + """List[:class:`~discord.Role`]: Returns a list of roles that have been overridden from + their default values in the :attr:`~discord.Guild.roles` attribute.""" + ret = [] + g = self.guild + for overwrite in filter(lambda o: o.type == 'role', self._overwrites): + role = g.get_role(overwrite.id) + if role is None: + continue + + role = copy.copy(role) + role.permissions.handle_overwrite(overwrite.allow, overwrite.deny) + ret.append(role) + return ret + + @property + def mention(self): + """:class:`str`: The string that allows you to mention the channel.""" + return '<#%s>' % self.id + + @property + def created_at(self): + """:class:`datetime.datetime`: Returns the channel's creation time in UTC.""" + return utils.snowflake_time(self.id) + + def overwrites_for(self, obj): + """Returns the channel-specific overwrites for a member or a role. + + Parameters + ----------- + obj: Union[:class:`~discord.Role`, :class:`~discord.abc.User`] + The role or user denoting + whose overwrite to get. + + Returns + --------- + :class:`~discord.PermissionOverwrite` + The permission overwrites for this object. + """ + + if isinstance(obj, User): + predicate = lambda p: p.type == 'member' + elif isinstance(obj, Role): + predicate = lambda p: p.type == 'role' + else: + predicate = lambda p: True + + for overwrite in filter(predicate, self._overwrites): + if overwrite.id == obj.id: + allow = Permissions(overwrite.allow) + deny = Permissions(overwrite.deny) + return PermissionOverwrite.from_pair(allow, deny) + + return PermissionOverwrite() + + @property + def overwrites(self): + """Returns all of the channel's overwrites. + + This is returned as a dictionary where the key contains the target which + can be either a :class:`~discord.Role` or a :class:`~discord.Member` and the key is the + overwrite as a :class:`~discord.PermissionOverwrite`. + + Returns + -------- + Mapping[Union[:class:`~discord.Role`, :class:`~discord.Member`], :class:`~discord.PermissionOverwrite`] + The channel's permission overwrites. + """ + ret = {} + for ow in self._overwrites: + allow = Permissions(ow.allow) + deny = Permissions(ow.deny) + overwrite = PermissionOverwrite.from_pair(allow, deny) + + if ow.type == 'role': + target = self.guild.get_role(ow.id) + elif ow.type == 'member': + target = self.guild.get_member(ow.id) + + # TODO: There is potential data loss here in the non-chunked + # case, i.e. target is None because get_member returned nothing. + # This can be fixed with a slight breaking change to the return type, + # i.e. adding discord.Object to the list of it + # However, for now this is an acceptable compromise. + if target is not None: + ret[target] = overwrite + return ret + + @property + def category(self): + """Optional[:class:`~discord.CategoryChannel`]: The category this channel belongs to. + + If there is no category then this is ``None``. + """ + return self.guild.get_channel(self.category_id) + + def permissions_for(self, member): + """Handles permission resolution for the current :class:`~discord.Member`. + + This function takes into consideration the following cases: + + - Guild owner + - Guild roles + - Channel overrides + - Member overrides + + Parameters + ---------- + member: :class:`~discord.Member` + The member to resolve permissions for. + + Returns + ------- + :class:`~discord.Permissions` + The resolved permissions for the member. + """ + + # The current cases can be explained as: + # Guild owner get all permissions -- no questions asked. Otherwise... + # The @everyone role gets the first application. + # After that, the applied roles that the user has in the channel + # (or otherwise) are then OR'd together. + # After the role permissions are resolved, the member permissions + # have to take into effect. + # After all that is done.. you have to do the following: + + # If manage permissions is True, then all permissions are set to True. + + # The operation first takes into consideration the denied + # and then the allowed. + + o = self.guild.owner + if o is not None and member.id == o.id: + return Permissions.all() + + default = self.guild.default_role + base = Permissions(default.permissions.value) + roles = member.roles + + # Apply guild roles that the member has. + for role in roles: + base.value |= role.permissions.value + + # Guild-wide Administrator -> True for everything + # Bypass all channel-specific overrides + if base.administrator: + return Permissions.all() + + # Apply @everyone allow/deny first since it's special + try: + maybe_everyone = self._overwrites[0] + if maybe_everyone.id == self.guild.id: + base.handle_overwrite(allow=maybe_everyone.allow, deny=maybe_everyone.deny) + remaining_overwrites = self._overwrites[1:] + else: + remaining_overwrites = self._overwrites + except IndexError: + remaining_overwrites = self._overwrites + + # not sure if doing member._roles.get(...) is better than the + # set approach. While this is O(N) to re-create into a set for O(1) + # the direct approach would just be O(log n) for searching with no + # extra memory overhead. For now, I'll keep the set cast + # Note that the member.roles accessor up top also creates a + # temporary list + member_role_ids = {r.id for r in roles} + denies = 0 + allows = 0 + + # Apply channel specific role permission overwrites + for overwrite in remaining_overwrites: + if overwrite.type == 'role' and overwrite.id in member_role_ids: + denies |= overwrite.deny + allows |= overwrite.allow + + base.handle_overwrite(allow=allows, deny=denies) + + # Apply member specific permission overwrites + for overwrite in remaining_overwrites: + if overwrite.type == 'member' and overwrite.id == member.id: + base.handle_overwrite(allow=overwrite.allow, deny=overwrite.deny) + break + + # if you can't send a message in a channel then you can't have certain + # permissions as well + if not base.send_messages: + base.send_tts_messages = False + base.mention_everyone = False + base.embed_links = False + base.attach_files = False + + # if you can't read a channel then you have no permissions there + if not base.read_messages: + denied = Permissions.all_channel() + base.value &= ~denied.value + + return base + + async def delete(self, *, reason=None): + """|coro| + + Deletes the channel. + + You must have :attr:`~.Permissions.manage_channels` permission to use this. + + Parameters + ----------- + reason: Optional[:class:`str`] + The reason for deleting this channel. + Shows up on the audit log. + + Raises + ------- + :exc:`~discord.Forbidden` + You do not have proper permissions to delete the channel. + :exc:`~discord.NotFound` + The channel was not found or was already deleted. + :exc:`~discord.HTTPException` + Deleting the channel failed. + """ + await self._state.http.delete_channel(self.id, reason=reason) + + async def set_permissions(self, target, *, overwrite=_undefined, reason=None, **permissions): + r"""|coro| + + Sets the channel specific permission overwrites for a target in the + channel. + + The ``target`` parameter should either be a :class:`~discord.Member` or a + :class:`~discord.Role` that belongs to guild. + + The ``overwrite`` parameter, if given, must either be ``None`` or + :class:`~discord.PermissionOverwrite`. For convenience, you can pass in + keyword arguments denoting :class:`~discord.Permissions` attributes. If this is + done, then you cannot mix the keyword arguments with the ``overwrite`` + parameter. + + If the ``overwrite`` parameter is ``None``, then the permission + overwrites are deleted. + + You must have the :attr:`~.Permissions.manage_roles` permission to use this. + + Examples + ---------- + + Setting allow and deny: :: + + await message.channel.set_permissions(message.author, read_messages=True, + send_messages=False) + + Deleting overwrites :: + + await channel.set_permissions(member, overwrite=None) + + Using :class:`~discord.PermissionOverwrite` :: + + overwrite = discord.PermissionOverwrite() + overwrite.send_messages = False + overwrite.read_messages = True + await channel.set_permissions(member, overwrite=overwrite) + + Parameters + ----------- + target: Union[:class:`~discord.Member`, :class:`~discord.Role`] + The member or role to overwrite permissions for. + overwrite: :class:`~discord.PermissionOverwrite` + The permissions to allow and deny to the target. + \*\*permissions + A keyword argument list of permissions to set for ease of use. + Cannot be mixed with ``overwrite``. + reason: Optional[:class:`str`] + The reason for doing this action. Shows up on the audit log. + + Raises + ------- + :exc:`~discord.Forbidden` + You do not have permissions to edit channel specific permissions. + :exc:`~discord.HTTPException` + Editing channel specific permissions failed. + :exc:`~discord.NotFound` + The role or member being edited is not part of the guild. + :exc:`~discord.InvalidArgument` + The overwrite parameter invalid or the target type was not + :class:`~discord.Role` or :class:`~discord.Member`. + """ + + http = self._state.http + + if isinstance(target, User): + perm_type = 'member' + elif isinstance(target, Role): + perm_type = 'role' + else: + raise InvalidArgument('target parameter must be either Member or Role') + + if isinstance(overwrite, _Undefined): + if len(permissions) == 0: + raise InvalidArgument('No overwrite provided.') + try: + overwrite = PermissionOverwrite(**permissions) + except (ValueError, TypeError): + raise InvalidArgument('Invalid permissions given to keyword arguments.') + else: + if len(permissions) > 0: + raise InvalidArgument('Cannot mix overwrite and keyword arguments.') + + # TODO: wait for event + + if overwrite is None: + await http.delete_channel_permissions(self.id, target.id, reason=reason) + elif isinstance(overwrite, PermissionOverwrite): + (allow, deny) = overwrite.pair() + await http.edit_channel_permissions(self.id, target.id, allow.value, deny.value, perm_type, reason=reason) + else: + raise InvalidArgument('Invalid overwrite type provided.') + + async def _clone_impl(self, base_attrs, *, name=None, reason=None): + base_attrs['permission_overwrites'] = [ + x._asdict() for x in self._overwrites + ] + base_attrs['parent_id'] = self.category_id + base_attrs['name'] = name or self.name + guild_id = self.guild.id + cls = self.__class__ + data = await self._state.http.create_channel(guild_id, self.type.value, reason=reason, **base_attrs) + obj = cls(state=self._state, guild=self.guild, data=data) + + # temporarily add it to the cache + self.guild._channels[obj.id] = obj + return obj + + async def clone(self, *, name=None, reason=None): + """|coro| + + Clones this channel. This creates a channel with the same properties + as this channel. + + .. versionadded:: 1.1.0 + + Parameters + ------------ + name: Optional[:class:`str`] + The name of the new channel. If not provided, defaults to this + channel name. + reason: Optional[:class:`str`] + The reason for cloning this channel. Shows up on the audit log. + + Raises + ------- + :exc:`~discord.Forbidden` + You do not have the proper permissions to create this channel. + :exc:`~discord.HTTPException` + Creating the channel failed. + """ + raise NotImplementedError + + async def create_invite(self, *, reason=None, **fields): + """|coro| + + Creates an instant invite. + + You must have the :attr:`~.Permissions.create_instant_invite` permission to + do this. + + Parameters + ------------ + max_age: :class:`int` + How long the invite should last. If it's 0 then the invite + doesn't expire. Defaults to 0. + max_uses: :class:`int` + How many uses the invite could be used for. If it's 0 then there + are unlimited uses. Defaults to 0. + temporary: :class:`bool` + Denotes that the invite grants temporary membership + (i.e. they get kicked after they disconnect). Defaults to ``False``. + unique: :class:`bool` + Indicates if a unique invite URL should be created. Defaults to True. + If this is set to ``False`` then it will return a previously created + invite. + reason: Optional[:class:`str`] + The reason for creating this invite. Shows up on the audit log. + + Raises + ------- + :exc:`~discord.HTTPException` + Invite creation failed. + + Returns + -------- + :class:`~discord.Invite` + The invite that was created. + """ + + data = await self._state.http.create_invite(self.id, reason=reason, **fields) + return Invite.from_incomplete(data=data, state=self._state) + + async def invites(self): + """|coro| + + Returns a list of all active instant invites from this channel. + + You must have :attr:`~.Permissions.manage_guild` to get this information. + + Raises + ------- + :exc:`~discord.Forbidden` + You do not have proper permissions to get the information. + :exc:`~discord.HTTPException` + An error occurred while fetching the information. + + Returns + ------- + List[:class:`~discord.Invite`] + The list of invites that are currently active. + """ + + state = self._state + data = await state.http.invites_from_channel(self.id) + result = [] + + for invite in data: + invite['channel'] = self + invite['guild'] = self.guild + result.append(Invite(state=state, data=invite)) + + return result + +class Messageable(metaclass=abc.ABCMeta): + """An ABC that details the common operations on a model that can send messages. + + The following implement this ABC: + + - :class:`~discord.TextChannel` + - :class:`~discord.DMChannel` + - :class:`~discord.GroupChannel` + - :class:`~discord.User` + - :class:`~discord.Member` + - :class:`~discord.ext.commands.Context` + + This ABC must also implement :class:`~discord.abc.Snowflake`. + """ + + __slots__ = () + + @abc.abstractmethod + async def _get_channel(self): + raise NotImplementedError + + async def send(self, content=None, *, tts=False, embed=None, file=None, files=None, delete_after=None, nonce=None): + """|coro| + + Sends a message to the destination with the content given. + + The content must be a type that can convert to a string through ``str(content)``. + If the content is set to ``None`` (the default), then the ``embed`` parameter must + be provided. + + To upload a single file, the ``file`` parameter should be used with a + single :class:`~discord.File` object. To upload multiple files, the ``files`` + parameter should be used with a :class:`list` of :class:`~discord.File` objects. + **Specifying both parameters will lead to an exception**. + + If the ``embed`` parameter is provided, it must be of type :class:`~discord.Embed` and + it must be a rich embed type. + + Parameters + ------------ + content: :class:`str` + The content of the message to send. + tts: :class:`bool` + Indicates if the message should be sent using text-to-speech. + embed: :class:`~discord.Embed` + The rich embed for the content. + file: :class:`~discord.File` + The file to upload. + files: List[:class:`~discord.File`] + A list of files to upload. Must be a maximum of 10. + nonce: :class:`int` + The nonce to use for sending this message. If the message was successfully sent, + then the message will have a nonce with this value. + delete_after: :class:`float` + If provided, the number of seconds to wait in the background + before deleting the message we just sent. If the deletion fails, + then it is silently ignored. + + Raises + -------- + :exc:`~discord.HTTPException` + Sending the message failed. + :exc:`~discord.Forbidden` + You do not have the proper permissions to send the message. + :exc:`~discord.InvalidArgument` + The ``files`` list is not of the appropriate size or + you specified both ``file`` and ``files``. + + Returns + --------- + :class:`~discord.Message` + The message that was sent. + """ + + channel = await self._get_channel() + state = self._state + content = str(content) if content is not None else None + if embed is not None: + embed = embed.to_dict() + + if file is not None and files is not None: + raise InvalidArgument('cannot pass both file and files parameter to send()') + + if file is not None: + if not isinstance(file, File): + raise InvalidArgument('file parameter must be File') + + try: + data = await state.http.send_files(channel.id, files=[file], + content=content, tts=tts, embed=embed, nonce=nonce) + finally: + file.close() + + elif files is not None: + if len(files) > 10: + raise InvalidArgument('files parameter must be a list of up to 10 elements') + elif not all(isinstance(file, File) for file in files): + raise InvalidArgument('files parameter must be a list of File') + + try: + data = await state.http.send_files(channel.id, files=files, content=content, tts=tts, + embed=embed, nonce=nonce) + finally: + for f in files: + f.close() + else: + data = await state.http.send_message(channel.id, content, tts=tts, embed=embed, nonce=nonce) + + ret = state.create_message(channel=channel, data=data) + if delete_after is not None: + await ret.delete(delay=delete_after) + return ret + + async def trigger_typing(self): + """|coro| + + Triggers a *typing* indicator to the destination. + + *Typing* indicator will go away after 10 seconds, or after a message is sent. + """ + + channel = await self._get_channel() + await self._state.http.send_typing(channel.id) + + def typing(self): + """Returns a context manager that allows you to type for an indefinite period of time. + + This is useful for denoting long computations in your bot. + + .. note:: + + This is both a regular context manager and an async context manager. + This means that both ``with`` and ``async with`` work with this. + + Example Usage: :: + + async with channel.typing(): + # do expensive stuff here + await channel.send('done!') + + """ + return Typing(self) + + async def fetch_message(self, id): + """|coro| + + Retrieves a single :class:`~discord.Message` from the destination. + + This can only be used by bot accounts. + + Parameters + ------------ + id: :class:`int` + The message ID to look for. + + Raises + -------- + :exc:`~discord.NotFound` + The specified message was not found. + :exc:`~discord.Forbidden` + You do not have the permissions required to get a message. + :exc:`~discord.HTTPException` + Retrieving the message failed. + + Returns + -------- + :class:`~discord.Message` + The message asked for. + """ + + channel = await self._get_channel() + data = await self._state.http.get_message(channel.id, id) + return self._state.create_message(channel=channel, data=data) + + async def pins(self): + """|coro| + + Retrieves all messages that are currently pinned in the channel. + + .. note:: + + Due to a limitation with the Discord API, the :class:`.Message` + objects returned by this method do not contain complete + :attr:`.Message.reactions` data. + + Raises + ------- + :exc:`~discord.HTTPException` + Retrieving the pinned messages failed. + + Returns + -------- + List[:class:`~discord.Message`] + The messages that are currently pinned. + """ + + channel = await self._get_channel() + state = self._state + data = await state.http.pins_from(channel.id) + return [state.create_message(channel=channel, data=m) for m in data] + + def history(self, *, limit=100, before=None, after=None, around=None, oldest_first=None): + """Returns an :class:`~discord.AsyncIterator` that enables receiving the destination's message history. + + You must have :attr:`~.Permissions.read_message_history` permissions to use this. + + Examples + --------- + + Usage :: + + counter = 0 + async for message in channel.history(limit=200): + if message.author == client.user: + counter += 1 + + Flattening into a list: :: + + messages = await channel.history(limit=123).flatten() + # messages is now a list of Message... + + All parameters are optional. + + Parameters + ----------- + limit: Optional[:class:`int`] + The number of messages to retrieve. + If ``None``, retrieves every message in the channel. Note, however, + that this would make it a slow operation. + before: Optional[Union[:class:`~discord.abc.Snowflake`, :class:`datetime.datetime`]] + Retrieve messages before this date or message. + If a date is provided it must be a timezone-naive datetime representing UTC time. + after: Optional[Union[:class:`~discord.abc.Snowflake`, :class:`datetime.datetime`]] + Retrieve messages after this date or message. + If a date is provided it must be a timezone-naive datetime representing UTC time. + around: Optional[Union[:class:`~discord.abc.Snowflake`, :class:`datetime.datetime`]] + Retrieve messages around this date or message. + If a date is provided it must be a timezone-naive datetime representing UTC time. + When using this argument, the maximum limit is 101. Note that if the limit is an + even number then this will return at most limit + 1 messages. + oldest_first: Optional[:class:`bool`] + If set to ``True``, return messages in oldest->newest order. Defaults to ``True`` if + ``after`` is specified, otherwise ``False``. + + Raises + ------ + :exc:`~discord.Forbidden` + You do not have permissions to get channel message history. + :exc:`~discord.HTTPException` + The request to get message history failed. + + Yields + ------- + :class:`~discord.Message` + The message with the message data parsed. + """ + return HistoryIterator(self, limit=limit, before=before, after=after, around=around, oldest_first=oldest_first) + + +class Connectable(metaclass=abc.ABCMeta): + """An ABC that details the common operations on a channel that can + connect to a voice server. + + The following implement this ABC: + + - :class:`~discord.VoiceChannel` + """ + __slots__ = () + + @abc.abstractmethod + def _get_voice_client_key(self): + raise NotImplementedError + + @abc.abstractmethod + def _get_voice_state_pair(self): + raise NotImplementedError + + async def connect(self, *, timeout=60.0, reconnect=True): + """|coro| + + Connects to voice and creates a :class:`VoiceClient` to establish + your connection to the voice server. + + Parameters + ----------- + timeout: :class:`float` + The timeout in seconds to wait for the voice endpoint. + reconnect: :class:`bool` + Whether the bot should automatically attempt + a reconnect if a part of the handshake fails + or the gateway goes down. + + Raises + ------- + :exc:`asyncio.TimeoutError` + Could not connect to the voice channel in time. + :exc:`~discord.ClientException` + You are already connected to a voice channel. + :exc:`~discord.opus.OpusNotLoaded` + The opus library has not been loaded. + + Returns + -------- + :class:`~discord.VoiceClient` + A voice client that is fully connected to the voice server. + """ + key_id, _ = self._get_voice_client_key() + state = self._state + + if state._get_voice_client(key_id): + raise ClientException('Already connected to a voice channel.') + + voice = VoiceClient(state=state, timeout=timeout, channel=self) + state._add_voice_client(key_id, voice) + + try: + await voice.connect(reconnect=reconnect) + except asyncio.TimeoutError: + try: + await voice.disconnect(force=True) + except Exception: + # we don't care if disconnect failed because connection failed + pass + raise # re-raise + + return voice diff --git a/venv/lib/python3.7/site-packages/discord/activity.py b/venv/lib/python3.7/site-packages/discord/activity.py new file mode 100644 index 0000000..3acc046 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/activity.py @@ -0,0 +1,600 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import datetime + +from .enums import ActivityType, try_enum +from .colour import Colour +from .utils import _get_as_snowflake + +__all__ = ( + 'Activity', + 'Streaming', + 'Game', + 'Spotify', +) + +"""If curious, this is the current schema for an activity. + +It's fairly long so I will document it here: + +All keys are optional. + +state: str (max: 128), +details: str (max: 128) +timestamps: dict + start: int (min: 1) + end: int (min: 1) +assets: dict + large_image: str (max: 32) + large_text: str (max: 128) + small_image: str (max: 32) + small_text: str (max: 128) +party: dict + id: str (max: 128), + size: List[int] (max-length: 2) + elem: int (min: 1) +secrets: dict + match: str (max: 128) + join: str (max: 128) + spectate: str (max: 128) +instance: bool +application_id: str +name: str (max: 128) +url: str +type: int +sync_id: str +session_id: str +flags: int + +There are also activity flags which are mostly uninteresting for the library atm. + +t.ActivityFlags = { + INSTANCE: 1, + JOIN: 2, + SPECTATE: 4, + JOIN_REQUEST: 8, + SYNC: 16, + PLAY: 32 +} +""" + +class _ActivityTag: + __slots__ = () + +class Activity(_ActivityTag): + """Represents an activity in Discord. + + This could be an activity such as streaming, playing, listening + or watching. + + For memory optimisation purposes, some activities are offered in slimmed + down versions: + + - :class:`Game` + - :class:`Streaming` + + Attributes + ------------ + application_id: :class:`int` + The application ID of the game. + name: :class:`str` + The name of the activity. + url: :class:`str` + A stream URL that the activity could be doing. + type: :class:`ActivityType` + The type of activity currently being done. + state: :class:`str` + The user's current state. For example, "In Game". + details: :class:`str` + The detail of the user's current activity. + timestamps: :class:`dict` + A dictionary of timestamps. It contains the following optional keys: + + - ``start``: Corresponds to when the user started doing the + activity in milliseconds since Unix epoch. + - ``end``: Corresponds to when the user will finish doing the + activity in milliseconds since Unix epoch. + + assets: :class:`dict` + A dictionary representing the images and their hover text of an activity. + It contains the following optional keys: + + - ``large_image``: A string representing the ID for the large image asset. + - ``large_text``: A string representing the text when hovering over the large image asset. + - ``small_image``: A string representing the ID for the small image asset. + - ``small_text``: A string representing the text when hovering over the small image asset. + + party: :class:`dict` + A dictionary representing the activity party. It contains the following optional keys: + + - ``id``: A string representing the party ID. + - ``size``: A list of up to two integer elements denoting (current_size, maximum_size). + """ + + __slots__ = ('state', 'details', 'timestamps', 'assets', 'party', + 'flags', 'sync_id', 'session_id', 'type', 'name', 'url', 'application_id') + + def __init__(self, **kwargs): + self.state = kwargs.pop('state', None) + self.details = kwargs.pop('details', None) + self.timestamps = kwargs.pop('timestamps', {}) + self.assets = kwargs.pop('assets', {}) + self.party = kwargs.pop('party', {}) + self.application_id = _get_as_snowflake(kwargs, 'application_id') + self.name = kwargs.pop('name', None) + self.url = kwargs.pop('url', None) + self.flags = kwargs.pop('flags', 0) + self.sync_id = kwargs.pop('sync_id', None) + self.session_id = kwargs.pop('session_id', None) + self.type = try_enum(ActivityType, kwargs.pop('type', -1)) + + def __repr__(self): + attrs = ( + 'type', + 'name', + 'url', + 'details', + 'application_id', + 'session_id', + ) + mapped = ' '.join('%s=%r' % (attr, getattr(self, attr)) for attr in attrs) + return '' % mapped + + def to_dict(self): + ret = {} + for attr in self.__slots__: + value = getattr(self, attr, None) + if value is None: + continue + + if isinstance(value, dict) and len(value) == 0: + continue + + ret[attr] = value + ret['type'] = int(self.type) + return ret + + @property + def start(self): + """Optional[:class:`datetime.datetime`]: When the user started doing this activity in UTC, if applicable.""" + try: + return datetime.datetime.utcfromtimestamp(self.timestamps['start'] / 1000) + except KeyError: + return None + + @property + def end(self): + """Optional[:class:`datetime.datetime`]: When the user will stop doing this activity in UTC, if applicable.""" + try: + return datetime.datetime.utcfromtimestamp(self.timestamps['end'] / 1000) + except KeyError: + return None + + @property + def large_image_url(self): + """Optional[:class:`str`]: Returns a URL pointing to the large image asset of this activity if applicable.""" + if self.application_id is None: + return None + + try: + large_image = self.assets['large_image'] + except KeyError: + return None + else: + return 'https://cdn.discordapp.com/app-assets/{0}/{1}.png'.format(self.application_id, large_image) + + @property + def small_image_url(self): + """Optional[:class:`str`]: Returns a URL pointing to the small image asset of this activity if applicable.""" + if self.application_id is None: + return None + + try: + small_image = self.assets['small_image'] + except KeyError: + return None + else: + return 'https://cdn.discordapp.com/app-assets/{0}/{1}.png'.format(self.application_id, small_image) + @property + def large_image_text(self): + """Optional[:class:`str`]: Returns the large image asset hover text of this activity if applicable.""" + return self.assets.get('large_text', None) + + @property + def small_image_text(self): + """Optional[:class:`str`]: Returns the small image asset hover text of this activity if applicable.""" + return self.assets.get('small_text', None) + + +class Game(_ActivityTag): + """A slimmed down version of :class:`Activity` that represents a Discord game. + + This is typically displayed via **Playing** on the official Discord client. + + .. container:: operations + + .. describe:: x == y + + Checks if two games are equal. + + .. describe:: x != y + + Checks if two games are not equal. + + .. describe:: hash(x) + + Returns the game's hash. + + .. describe:: str(x) + + Returns the game's name. + + Parameters + ----------- + name: :class:`str` + The game's name. + start: Optional[:class:`datetime.datetime`] + A naive UTC timestamp representing when the game started. Keyword-only parameter. Ignored for bots. + end: Optional[:class:`datetime.datetime`] + A naive UTC timestamp representing when the game ends. Keyword-only parameter. Ignored for bots. + + Attributes + ----------- + name: :class:`str` + The game's name. + """ + + __slots__ = ('name', '_end', '_start') + + def __init__(self, name, **extra): + self.name = name + + try: + timestamps = extra['timestamps'] + except KeyError: + self._extract_timestamp(extra, 'start') + self._extract_timestamp(extra, 'end') + else: + self._start = timestamps.get('start', 0) + self._end = timestamps.get('end', 0) + + def _extract_timestamp(self, data, key): + try: + dt = data[key] + except KeyError: + setattr(self, '_' + key, 0) + else: + setattr(self, '_' + key, dt.timestamp() * 1000.0) + + @property + def type(self): + """Returns the game's type. This is for compatibility with :class:`Activity`. + + It always returns :attr:`ActivityType.playing`. + """ + return ActivityType.playing + + @property + def start(self): + """Optional[:class:`datetime.datetime`]: When the user started playing this game in UTC, if applicable.""" + if self._start: + return datetime.datetime.utcfromtimestamp(self._start / 1000) + return None + + @property + def end(self): + """Optional[:class:`datetime.datetime`]: When the user will stop playing this game in UTC, if applicable.""" + if self._end: + return datetime.datetime.utcfromtimestamp(self._end / 1000) + return None + + def __str__(self): + return str(self.name) + + def __repr__(self): + return ''.format(self) + + def to_dict(self): + timestamps = {} + if self._start: + timestamps['start'] = self._start + + if self._end: + timestamps['end'] = self._end + + return { + 'type': ActivityType.playing.value, + 'name': str(self.name), + 'timestamps': timestamps + } + + def __eq__(self, other): + return isinstance(other, Game) and other.name == self.name + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash(self.name) + +class Streaming(_ActivityTag): + """A slimmed down version of :class:`Activity` that represents a Discord streaming status. + + This is typically displayed via **Streaming** on the official Discord client. + + .. container:: operations + + .. describe:: x == y + + Checks if two streams are equal. + + .. describe:: x != y + + Checks if two streams are not equal. + + .. describe:: hash(x) + + Returns the stream's hash. + + .. describe:: str(x) + + Returns the stream's name. + + Attributes + ----------- + name: :class:`str` + The stream's name. + url: :class:`str` + The stream's URL. Currently only twitch.tv URLs are supported. Anything else is silently + discarded. + details: Optional[:class:`str`] + If provided, typically the game the streamer is playing. + assets: :class:`dict` + A dictionary comprising of similar keys than those in :attr:`Activity.assets`. + """ + + __slots__ = ('name', 'url', 'details', 'assets') + + def __init__(self, *, name, url, **extra): + self.name = name + self.url = url + self.details = extra.pop('details', None) + self.assets = extra.pop('assets', {}) + + @property + def type(self): + """Returns the game's type. This is for compatibility with :class:`Activity`. + + It always returns :attr:`ActivityType.streaming`. + """ + return ActivityType.streaming + + def __str__(self): + return str(self.name) + + def __repr__(self): + return ''.format(self) + + @property + def twitch_name(self): + """Optional[:class:`str`]: If provided, the twitch name of the user streaming. + + This corresponds to the ``large_image`` key of the :attr:`Streaming.assets` + dictionary if it starts with ``twitch:``. Typically set by the Discord client. + """ + + try: + name = self.assets['large_image'] + except KeyError: + return None + else: + return name[7:] if name[:7] == 'twitch:' else None + + def to_dict(self): + ret = { + 'type': ActivityType.streaming.value, + 'name': str(self.name), + 'url': str(self.url), + 'assets': self.assets + } + if self.details: + ret['details'] = self.details + return ret + + def __eq__(self, other): + return isinstance(other, Streaming) and other.name == self.name and other.url == self.url + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash(self.name) + +class Spotify: + """Represents a Spotify listening activity from Discord. This is a special case of + :class:`Activity` that makes it easier to work with the Spotify integration. + + .. container:: operations + + .. describe:: x == y + + Checks if two activities are equal. + + .. describe:: x != y + + Checks if two activities are not equal. + + .. describe:: hash(x) + + Returns the activity's hash. + + .. describe:: str(x) + + Returns the string 'Spotify'. + """ + + __slots__ = ('_state', '_details', '_timestamps', '_assets', '_party', '_sync_id', '_session_id') + + def __init__(self, **data): + self._state = data.pop('state', None) + self._details = data.pop('details', None) + self._timestamps = data.pop('timestamps', {}) + self._assets = data.pop('assets', {}) + self._party = data.pop('party', {}) + self._sync_id = data.pop('sync_id') + self._session_id = data.pop('session_id') + + @property + def type(self): + """Returns the activity's type. This is for compatibility with :class:`Activity`. + + It always returns :attr:`ActivityType.listening`. + """ + return ActivityType.listening + + @property + def colour(self): + """Returns the Spotify integration colour, as a :class:`Colour`. + + There is an alias for this named :meth:`color`""" + return Colour(0x1db954) + + @property + def color(self): + """Returns the Spotify integration colour, as a :class:`Colour`. + + There is an alias for this named :meth:`colour`""" + return self.colour + + def to_dict(self): + return { + 'flags': 48, # SYNC | PLAY + 'name': 'Spotify', + 'assets': self._assets, + 'party': self._party, + 'sync_id': self._sync_id, + 'session_id': self._session_id, + 'timestamps': self._timestamps, + 'details': self._details, + 'state': self._state + } + + @property + def name(self): + """:class:`str`: The activity's name. This will always return "Spotify".""" + return 'Spotify' + + def __eq__(self, other): + return (isinstance(other, Spotify) and other._session_id == self._session_id + and other._sync_id == self._sync_id and other.start == self.start) + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash(self._session_id) + + def __str__(self): + return 'Spotify' + + def __repr__(self): + return ''.format(self) + + @property + def title(self): + """:class:`str`: The title of the song being played.""" + return self._details + + @property + def artists(self): + """List[:class:`str`]: The artists of the song being played.""" + return self._state.split('; ') + + @property + def artist(self): + """:class:`str`: The artist of the song being played. + + This does not attempt to split the artist information into + multiple artists. Useful if there's only a single artist. + """ + return self._state + + @property + def album(self): + """:class:`str`: The album that the song being played belongs to.""" + return self._assets.get('large_text', '') + + @property + def album_cover_url(self): + """:class:`str`: The album cover image URL from Spotify's CDN.""" + large_image = self._assets.get('large_image', '') + if large_image[:8] != 'spotify:': + return '' + album_image_id = large_image[8:] + return 'https://i.scdn.co/image/' + album_image_id + + @property + def track_id(self): + """:class:`str`: The track ID used by Spotify to identify this song.""" + return self._sync_id + + @property + def start(self): + """:class:`datetime.datetime`: When the user started playing this song in UTC.""" + return datetime.datetime.utcfromtimestamp(self._timestamps['start'] / 1000) + + @property + def end(self): + """:class:`datetime.datetime`: When the user will stop playing this song in UTC.""" + return datetime.datetime.utcfromtimestamp(self._timestamps['end'] / 1000) + + @property + def duration(self): + """:class:`datetime.timedelta`: The duration of the song being played.""" + return self.end - self.start + + @property + def party_id(self): + """:class:`str`: The party ID of the listening party.""" + return self._party.get('id', '') + +def create_activity(data): + if not data: + return None + + game_type = try_enum(ActivityType, data.get('type', -1)) + if game_type is ActivityType.playing: + if 'application_id' in data or 'session_id' in data: + return Activity(**data) + return Game(**data) + elif game_type is ActivityType.streaming: + if 'url' in data: + return Streaming(**data) + return Activity(**data) + elif game_type is ActivityType.listening and 'sync_id' in data and 'session_id' in data: + return Spotify(**data) + return Activity(**data) diff --git a/venv/lib/python3.7/site-packages/discord/appinfo.py b/venv/lib/python3.7/site-packages/discord/appinfo.py new file mode 100644 index 0000000..f38f40b --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/appinfo.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from .user import User +from .asset import Asset + + +class AppInfo: + """Represents the application info for the bot provided by Discord. + + + Attributes + ------------- + id: :class:`int` + The application ID. + name: :class:`str` + The application name. + owner: :class:`User` + The application owner. + icon: Optional[:class:`str`] + The icon hash, if it exists. + description: Optional[:class:`str`] + The application description. + bot_public: :class:`bool` + Whether the bot can be invited by anyone or if it is locked + to the application owner. + bot_require_code_grant: :class:`bool` + Whether the bot requires the completion of the full oauth2 code + grant flow to join. + rpc_origins: Optional[List[:class:`str`]] + A list of RPC origin URLs, if RPC is enabled. + """ + __slots__ = ('_state', 'description', 'id', 'name', 'rpc_origins', + 'bot_public', 'bot_require_code_grant', 'owner', 'icon') + + def __init__(self, state, data): + self._state = state + + self.id = int(data['id']) + self.name = data['name'] + self.description = data['description'] + self.icon = data['icon'] + self.rpc_origins = data['rpc_origins'] + self.bot_public = data['bot_public'] + self.bot_require_code_grant = data['bot_require_code_grant'] + self.owner = User(state=self._state, data=data['owner']) + + def __repr__(self): + return '<{0.__class__.__name__} id={0.id} name={0.name!r} description={0.description!r} public={0.bot_public} ' \ + 'owner={0.owner!r}>'.format(self) + + @property + def icon_url(self): + """:class:`.Asset`: Retrieves the application's icon asset.""" + return Asset._from_icon(self._state, self, 'app') diff --git a/venv/lib/python3.7/site-packages/discord/asset.py b/venv/lib/python3.7/site-packages/discord/asset.py new file mode 100644 index 0000000..af5a480 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/asset.py @@ -0,0 +1,225 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import io +from .errors import DiscordException +from .errors import InvalidArgument +from . import utils + +VALID_STATIC_FORMATS = frozenset({"jpeg", "jpg", "webp", "png"}) +VALID_AVATAR_FORMATS = VALID_STATIC_FORMATS | {"gif"} + +class Asset: + """Represents a CDN asset on Discord. + + .. container:: operations + + .. describe:: str(x) + + Returns the URL of the CDN asset. + + .. describe:: len(x) + + Returns the length of the CDN asset's URL. + + .. describe:: bool(x) + + Checks if the Asset has a URL. + + .. describe:: x == y + + Checks if the asset is equal to another asset. + + .. describe:: x != y + + Checks if the asset is not equal to another asset. + + .. describe:: hash(x) + + Returns the hash of the asset. + """ + __slots__ = ('_state', '_url') + + def __init__(self, state, url=None): + self._state = state + self._url = url + + @classmethod + def _from_avatar(cls, state, user, *, format=None, static_format='webp', size=1024): + if not utils.valid_icon_size(size): + raise InvalidArgument("size must be a power of 2 between 16 and 4096") + if format is not None and format not in VALID_AVATAR_FORMATS: + raise InvalidArgument("format must be None or one of {}".format(VALID_AVATAR_FORMATS)) + if format == "gif" and not user.is_avatar_animated(): + raise InvalidArgument("non animated avatars do not support gif format") + if static_format not in VALID_STATIC_FORMATS: + raise InvalidArgument("static_format must be one of {}".format(VALID_STATIC_FORMATS)) + + if user.avatar is None: + return user.default_avatar_url + + if format is None: + format = 'gif' if user.is_avatar_animated() else static_format + + return cls(state, 'https://cdn.discordapp.com/avatars/{0.id}/{0.avatar}.{1}?size={2}'.format(user, format, size)) + + @classmethod + def _from_icon(cls, state, object, path): + if object.icon is None: + return cls(state) + + url = 'https://cdn.discordapp.com/{0}-icons/{1.id}/{1.icon}.jpg'.format(path, object) + return cls(state, url) + + @classmethod + def _from_guild_image(cls, state, id, hash, key, *, format='webp', size=1024): + if not utils.valid_icon_size(size): + raise InvalidArgument("size must be a power of 2 between 16 and 4096") + if format not in VALID_STATIC_FORMATS: + raise InvalidArgument("format must be one of {}".format(VALID_STATIC_FORMATS)) + + if hash is None: + return cls(state) + + url = 'https://cdn.discordapp.com/{key}/{0}/{1}.{2}?size={3}' + return cls(state, url.format(id, hash, format, size, key=key)) + + @classmethod + def _from_guild_icon(cls, state, guild, *, format=None, static_format='webp', size=1024): + if not utils.valid_icon_size(size): + raise InvalidArgument("size must be a power of 2 between 16 and 4096") + if format is not None and format not in VALID_AVATAR_FORMATS: + raise InvalidArgument("format must be one of {}".format(VALID_AVATAR_FORMATS)) + if format == "gif" and not guild.is_icon_animated(): + raise InvalidArgument("non animated guild icons do not support gif format") + if static_format not in VALID_STATIC_FORMATS: + raise InvalidArgument("static_format must be one of {}".format(VALID_STATIC_FORMATS)) + + if guild.icon is None: + return cls(state) + + if format is None: + format = 'gif' if guild.is_icon_animated() else static_format + + return cls(state, 'https://cdn.discordapp.com/icons/{0.id}/{0.icon}.{1}?size={2}'.format(guild, format, size)) + + + def __str__(self): + return self._url if self._url is not None else '' + + def __len__(self): + if self._url: + return len(self._url) + return 0 + + def __bool__(self): + return self._url is not None + + def __repr__(self): + return ''.format(self) + + def __eq__(self, other): + return isinstance(other, Asset) and self._url == other._url + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash(self._url) + + async def read(self): + """|coro| + + Retrieves the content of this asset as a :class:`bytes` object. + + .. warning:: + + :class:`PartialEmoji` won't have a connection state if user created, + and a URL won't be present if a custom image isn't associated with + the asset, e.g. a guild with no custom icon. + + .. versionadded:: 1.1.0 + + Parameters + ----------- + fp: Union[:class:`io.BufferedIOBase`, :class:`os.PathLike`] + Same as in :meth:`Attachment.save`. + seek_begin: :class:`bool` + Same as in :meth:`Attachment.save`. + + Raises + ------ + DiscordException + There was no valid URL or internal connection state. + HTTPException + Downloading the asset failed. + NotFound + The asset was deleted. + + Returns + ------- + :class:`bytes` + The content of the asset. + """ + if not self._url: + raise DiscordException('Invalid asset (no URL provided)') + + if self._state is None: + raise DiscordException('Invalid state (no ConnectionState provided)') + + return await self._state.http.get_from_cdn(self._url) + + async def save(self, fp, *, seek_begin=True): + """|coro| + + Saves this asset into a file-like object. + + Parameters + ---------- + fp: Union[BinaryIO, :class:`os.PathLike`] + Same as in :meth:`Attachment.save`. + seek_begin: :class:`bool` + Same as in :meth:`Attachment.save`. + + Raises + ------ + Same as :meth:`read`. + + Returns + -------- + :class:`int` + The number of bytes written. + """ + + data = await self.read() + if isinstance(fp, io.IOBase) and fp.writable(): + written = fp.write(data) + if seek_begin: + fp.seek(0) + return written + else: + with open(fp, 'wb') as f: + return f.write(data) diff --git a/venv/lib/python3.7/site-packages/discord/audit_logs.py b/venv/lib/python3.7/site-packages/discord/audit_logs.py new file mode 100644 index 0000000..b997e91 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/audit_logs.py @@ -0,0 +1,350 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from . import utils, enums +from .object import Object +from .permissions import PermissionOverwrite, Permissions +from .colour import Colour +from .invite import Invite + +def _transform_verification_level(entry, data): + return enums.try_enum(enums.VerificationLevel, data) + +def _transform_default_notifications(entry, data): + return enums.try_enum(enums.NotificationLevel, data) + +def _transform_explicit_content_filter(entry, data): + return enums.try_enum(enums.ContentFilter, data) + +def _transform_permissions(entry, data): + return Permissions(data) + +def _transform_color(entry, data): + return Colour(data) + +def _transform_snowflake(entry, data): + return int(data) + +def _transform_channel(entry, data): + if data is None: + return None + channel = entry.guild.get_channel(int(data)) or Object(id=data) + return channel + +def _transform_owner_id(entry, data): + if data is None: + return None + return entry._get_member(int(data)) + +def _transform_inviter_id(entry, data): + if data is None: + return None + return entry._get_member(int(data)) + +def _transform_overwrites(entry, data): + overwrites = [] + for elem in data: + allow = Permissions(elem['allow']) + deny = Permissions(elem['deny']) + ow = PermissionOverwrite.from_pair(allow, deny) + + ow_type = elem['type'] + ow_id = int(elem['id']) + if ow_type == 'role': + target = entry.guild.get_role(ow_id) + else: + target = entry._get_member(ow_id) + + if target is None: + target = Object(id=ow_id) + + overwrites.append((target, ow)) + + return overwrites + +class AuditLogDiff: + def __len__(self): + return len(self.__dict__) + + def __iter__(self): + return iter(self.__dict__.items()) + + def __repr__(self): + values = ' '.join('%s=%r' % item for item in self.__dict__.items()) + return '' % values + +class AuditLogChanges: + TRANSFORMERS = { + 'verification_level': (None, _transform_verification_level), + 'explicit_content_filter': (None, _transform_explicit_content_filter), + 'allow': (None, _transform_permissions), + 'deny': (None, _transform_permissions), + 'permissions': (None, _transform_permissions), + 'id': (None, _transform_snowflake), + 'color': ('colour', _transform_color), + 'owner_id': ('owner', _transform_owner_id), + 'inviter_id': ('inviter', _transform_inviter_id), + 'channel_id': ('channel', _transform_channel), + 'afk_channel_id': ('afk_channel', _transform_channel), + 'system_channel_id': ('system_channel', _transform_channel), + 'widget_channel_id': ('widget_channel', _transform_channel), + 'permission_overwrites': ('overwrites', _transform_overwrites), + 'splash_hash': ('splash', None), + 'icon_hash': ('icon', None), + 'avatar_hash': ('avatar', None), + 'rate_limit_per_user': ('slowmode_delay', None), + 'default_message_notifications': ('default_notifications', _transform_default_notifications), + } + + def __init__(self, entry, data): + self.before = AuditLogDiff() + self.after = AuditLogDiff() + + for elem in data: + attr = elem['key'] + + # special cases for role add/remove + if attr == '$add': + self._handle_role(self.before, self.after, entry, elem['new_value']) + continue + elif attr == '$remove': + self._handle_role(self.after, self.before, entry, elem['new_value']) + continue + + transformer = self.TRANSFORMERS.get(attr) + if transformer: + key, transformer = transformer + if key: + attr = key + + try: + before = elem['old_value'] + except KeyError: + before = None + else: + if transformer: + before = transformer(entry, before) + + setattr(self.before, attr, before) + + try: + after = elem['new_value'] + except KeyError: + after = None + else: + if transformer: + after = transformer(entry, after) + + setattr(self.after, attr, after) + + # add an alias + if hasattr(self.after, 'colour'): + self.after.color = self.after.colour + self.before.color = self.before.colour + + def __repr__(self): + return '' % (self.before, self.after) + + def _handle_role(self, first, second, entry, elem): + if not hasattr(first, 'roles'): + setattr(first, 'roles', []) + + data = [] + g = entry.guild + + for e in elem: + role_id = int(e['id']) + role = g.get_role(role_id) + + if role is None: + role = Object(id=role_id) + role.name = e['name'] + + data.append(role) + + setattr(second, 'roles', data) + +class AuditLogEntry: + r"""Represents an Audit Log entry. + + You retrieve these via :meth:`Guild.audit_logs`. + + Attributes + ----------- + action: :class:`AuditLogAction` + The action that was done. + user: :class:`abc.User` + The user who initiated this action. Usually a :class:`Member`\, unless gone + then it's a :class:`User`. + id: :class:`int` + The entry ID. + target: Any + The target that got changed. The exact type of this depends on + the action being done. + reason: Optional[:class:`str`] + The reason this action was done. + extra: Any + Extra information that this entry has that might be useful. + For most actions, this is ``None``. However in some cases it + contains extra information. See :class:`AuditLogAction` for + which actions have this field filled out. + """ + + def __init__(self, *, users, data, guild): + self._state = guild._state + self.guild = guild + self._users = users + self._from_data(data) + + def _from_data(self, data): + self.action = enums.try_enum(enums.AuditLogAction, data['action_type']) + self.id = int(data['id']) + + # this key is technically not usually present + self.reason = data.get('reason') + self.extra = data.get('options') + + if self.extra: + if self.action is enums.AuditLogAction.member_prune: + # member prune has two keys with useful information + self.extra = type('_AuditLogProxy', (), {k: int(v) for k, v in self.extra.items()})() + elif self.action is enums.AuditLogAction.message_delete: + channel_id = int(self.extra['channel_id']) + elems = { + 'count': int(self.extra['count']), + 'channel': self.guild.get_channel(channel_id) or Object(id=channel_id) + } + self.extra = type('_AuditLogProxy', (), elems)() + elif self.action.name.startswith('overwrite_'): + # the overwrite_ actions have a dict with some information + instance_id = int(self.extra['id']) + the_type = self.extra.get('type') + if the_type == 'member': + self.extra = self._get_member(instance_id) + else: + role = self.guild.get_role(instance_id) + if role is None: + role = Object(id=instance_id) + role.name = self.extra.get('role_name') + self.extra = role + + # this key is not present when the above is present, typically. + # It's a list of { new_value: a, old_value: b, key: c } + # where new_value and old_value are not guaranteed to be there depending + # on the action type, so let's just fetch it for now and only turn it + # into meaningful data when requested + self._changes = data.get('changes', []) + + self.user = self._get_member(utils._get_as_snowflake(data, 'user_id')) + self._target_id = utils._get_as_snowflake(data, 'target_id') + + def _get_member(self, user_id): + return self.guild.get_member(user_id) or self._users.get(user_id) + + def __repr__(self): + return ''.format(self) + + @utils.cached_property + def created_at(self): + """:class:`datetime.datetime`: Returns the entry's creation time in UTC.""" + return utils.snowflake_time(self.id) + + @utils.cached_property + def target(self): + try: + converter = getattr(self, '_convert_target_' + self.action.target_type) + except AttributeError: + return Object(id=self._target_id) + else: + return converter(self._target_id) + + @utils.cached_property + def category(self): + """Optional[:class:`AuditLogActionCategory`]: The category of the action, if applicable.""" + return self.action.category + + @utils.cached_property + def changes(self): + """:class:`AuditLogChanges`: The list of changes this entry has.""" + obj = AuditLogChanges(self, self._changes) + del self._changes + return obj + + @utils.cached_property + def before(self): + """:class:`AuditLogDiff`: The target's prior state.""" + return self.changes.before + + @utils.cached_property + def after(self): + """:class:`AuditLogDiff`: The target's subsequent state.""" + return self.changes.after + + def _convert_target_guild(self, target_id): + return self.guild + + def _convert_target_channel(self, target_id): + ch = self.guild.get_channel(target_id) + if ch is None: + return Object(id=target_id) + return ch + + def _convert_target_user(self, target_id): + return self._get_member(target_id) + + def _convert_target_role(self, target_id): + role = self.guild.get_role(target_id) + if role is None: + return Object(id=target_id) + return role + + def _convert_target_invite(self, target_id): + # invites have target_id set to null + # so figure out which change has the full invite data + changeset = self.before if self.action is enums.AuditLogAction.invite_delete else self.after + + fake_payload = { + 'max_age': changeset.max_age, + 'max_uses': changeset.max_uses, + 'code': changeset.code, + 'temporary': changeset.temporary, + 'channel': changeset.channel, + 'uses': changeset.uses, + 'guild': self.guild, + } + + obj = Invite(state=self._state, data=fake_payload) + try: + obj.inviter = changeset.inviter + except AttributeError: + pass + return obj + + def _convert_target_emoji(self, target_id): + return self._state.get_emoji(target_id) or Object(id=target_id) + + def _convert_target_message(self, target_id): + return self._get_member(target_id) diff --git a/venv/lib/python3.7/site-packages/discord/backoff.py b/venv/lib/python3.7/site-packages/discord/backoff.py new file mode 100644 index 0000000..3ebc019 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/backoff.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import time +import random + +class ExponentialBackoff: + """An implementation of the exponential backoff algorithm + + Provides a convenient interface to implement an exponential backoff + for reconnecting or retrying transmissions in a distributed network. + + Once instantiated, the delay method will return the next interval to + wait for when retrying a connection or transmission. The maximum + delay increases exponentially with each retry up to a maximum of + 2^10 * base, and is reset if no more attempts are needed in a period + of 2^11 * base seconds. + + Parameters + ---------- + base: :class:`int` + The base delay in seconds. The first retry-delay will be up to + this many seconds. + integral: :class:`bool` + Set to ``True`` if whole periods of base is desirable, otherwise any + number in between may be returned. + """ + + def __init__(self, base=1, *, integral=False): + self._base = base + + self._exp = 0 + self._max = 10 + self._reset_time = base * 2 ** 11 + self._last_invocation = time.monotonic() + + # Use our own random instance to avoid messing with global one + rand = random.Random() + rand.seed() + + self._randfunc = rand.randrange if integral else rand.uniform + + def delay(self): + """Compute the next delay + + Returns the next delay to wait according to the exponential + backoff algorithm. This is a value between 0 and base * 2^exp + where exponent starts off at 1 and is incremented at every + invocation of this method up to a maximum of 10. + + If a period of more than base * 2^11 has passed since the last + retry, the exponent is reset to 1. + """ + invocation = time.monotonic() + interval = invocation - self._last_invocation + self._last_invocation = invocation + + if interval > self._reset_time: + self._exp = 0 + + self._exp = min(self._exp + 1, self._max) + return self._randfunc(0, self._base * 2 ** self._exp) diff --git a/venv/lib/python3.7/site-packages/discord/bin/libopus-0.x64.dll b/venv/lib/python3.7/site-packages/discord/bin/libopus-0.x64.dll new file mode 100644 index 0000000..2832418 Binary files /dev/null and b/venv/lib/python3.7/site-packages/discord/bin/libopus-0.x64.dll differ diff --git a/venv/lib/python3.7/site-packages/discord/bin/libopus-0.x86.dll b/venv/lib/python3.7/site-packages/discord/bin/libopus-0.x86.dll new file mode 100644 index 0000000..b291dfc Binary files /dev/null and b/venv/lib/python3.7/site-packages/discord/bin/libopus-0.x86.dll differ diff --git a/venv/lib/python3.7/site-packages/discord/calls.py b/venv/lib/python3.7/site-packages/discord/calls.py new file mode 100644 index 0000000..8ab0414 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/calls.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import datetime + +from . import utils +from .enums import VoiceRegion, try_enum +from .member import VoiceState + +class CallMessage: + """Represents a group call message from Discord. + + This is only received in cases where the message type is equivalent to + :attr:`MessageType.call`. + + Attributes + ----------- + ended_timestamp: Optional[:class:`datetime.datetime`] + A naive UTC datetime object that represents the time that the call has ended. + participants: List[:class:`User`] + The list of users that are participating in this call. + message: :class:`Message` + The message associated with this call message. + """ + + def __init__(self, message, **kwargs): + self.message = message + self.ended_timestamp = utils.parse_time(kwargs.get('ended_timestamp')) + self.participants = kwargs.get('participants') + + @property + def call_ended(self): + """:class:`bool`: Indicates if the call has ended.""" + return self.ended_timestamp is not None + + @property + def channel(self): + r""":class:`GroupChannel`\: The private channel associated with this message.""" + return self.message.channel + + @property + def duration(self): + """Queries the duration of the call. + + If the call has not ended then the current duration will + be returned. + + Returns + --------- + :class:`datetime.timedelta` + The timedelta object representing the duration. + """ + if self.ended_timestamp is None: + return datetime.datetime.utcnow() - self.message.created_at + else: + return self.ended_timestamp - self.message.created_at + +class GroupCall: + """Represents the actual group call from Discord. + + This is accompanied with a :class:`CallMessage` denoting the information. + + Attributes + ----------- + call: :class:`CallMessage` + The call message associated with this group call. + unavailable: :class:`bool` + Denotes if this group call is unavailable. + ringing: List[:class:`User`] + A list of users that are currently being rung to join the call. + region: :class:`VoiceRegion` + The guild region the group call is being hosted on. + """ + + def __init__(self, **kwargs): + self.call = kwargs.get('call') + self.unavailable = kwargs.get('unavailable') + self._voice_states = {} + + for state in kwargs.get('voice_states', []): + self._update_voice_state(state) + + self._update(**kwargs) + + def _update(self, **kwargs): + self.region = try_enum(VoiceRegion, kwargs.get('region')) + lookup = {u.id: u for u in self.call.channel.recipients} + me = self.call.channel.me + lookup[me.id] = me + self.ringing = list(filter(None, map(lookup.get, kwargs.get('ringing', [])))) + + def _update_voice_state(self, data): + user_id = int(data['user_id']) + # left the voice channel? + if data['channel_id'] is None: + self._voice_states.pop(user_id, None) + else: + self._voice_states[user_id] = VoiceState(data=data, channel=self.channel) + + @property + def connected(self): + """List[:class:`User`]: A property that returns all users that are currently in this call.""" + ret = [u for u in self.channel.recipients if self.voice_state_for(u) is not None] + me = self.channel.me + if self.voice_state_for(me) is not None: + ret.append(me) + + return ret + + @property + def channel(self): + r""":class:`GroupChannel`\: Returns the channel the group call is in.""" + return self.call.channel + + def voice_state_for(self, user): + """Retrieves the :class:`VoiceState` for a specified :class:`User`. + + If the :class:`User` has no voice state then this function returns + ``None``. + + Parameters + ------------ + user: :class:`User` + The user to retrieve the voice state for. + + Returns + -------- + Optional[:class:`VoiceState`] + The voice state associated with this user. + """ + + return self._voice_states.get(user.id) diff --git a/venv/lib/python3.7/site-packages/discord/channel.py b/venv/lib/python3.7/site-packages/discord/channel.py new file mode 100644 index 0000000..ae0eb4c --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/channel.py @@ -0,0 +1,1214 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import time +import asyncio + +import discord.abc +from .permissions import Permissions +from .enums import ChannelType, try_enum +from .mixins import Hashable +from . import utils +from .asset import Asset +from .errors import ClientException, NoMoreItems +from .webhook import Webhook + +__all__ = ( + 'TextChannel', + 'VoiceChannel', + 'DMChannel', + 'CategoryChannel', + 'StoreChannel', + 'GroupChannel', + '_channel_factory', +) + +async def _single_delete_strategy(messages): + for m in messages: + await m.delete() + +class TextChannel(discord.abc.Messageable, discord.abc.GuildChannel, Hashable): + """Represents a Discord guild text channel. + + .. container:: operations + + .. describe:: x == y + + Checks if two channels are equal. + + .. describe:: x != y + + Checks if two channels are not equal. + + .. describe:: hash(x) + + Returns the channel's hash. + + .. describe:: str(x) + + Returns the channel's name. + + Attributes + ----------- + name: :class:`str` + The channel name. + guild: :class:`Guild` + The guild the channel belongs to. + id: :class:`int` + The channel ID. + category_id: :class:`int` + The category channel ID this channel belongs to. + topic: Optional[:class:`str`] + The channel's topic. None if it doesn't exist. + position: :class:`int` + The position in the channel list. This is a number that starts at 0. e.g. the + top channel is position 0. + last_message_id: Optional[:class:`int`] + The last message ID of the message sent to this channel. It may + *not* point to an existing or valid message. + slowmode_delay: :class:`int` + The number of seconds a member must wait between sending messages + in this channel. A value of `0` denotes that it is disabled. + Bots and users with :attr:`~Permissions.manage_channels` or + :attr:`~Permissions.manage_messages` bypass slowmode. + """ + + __slots__ = ('name', 'id', 'guild', 'topic', '_state', 'nsfw', + 'category_id', 'position', 'slowmode_delay', '_overwrites', + '_type', 'last_message_id') + + def __init__(self, *, state, guild, data): + self._state = state + self.id = int(data['id']) + self._type = data['type'] + self._update(guild, data) + + def __repr__(self): + attrs = [ + ('id', self.id), + ('name', self.name), + ('position', self.position), + ('nsfw', self.nsfw), + ('news', self.is_news()), + ('category_id', self.category_id) + ] + return '<%s %s>' % (self.__class__.__name__, ' '.join('%s=%r' % t for t in attrs)) + + def _update(self, guild, data): + self.guild = guild + self.name = data['name'] + self.category_id = utils._get_as_snowflake(data, 'parent_id') + self.topic = data.get('topic') + self.position = data['position'] + self.nsfw = data.get('nsfw', False) + # Does this need coercion into `int`? No idea yet. + self.slowmode_delay = data.get('rate_limit_per_user', 0) + self._type = data.get('type', self._type) + self.last_message_id = utils._get_as_snowflake(data, 'last_message_id') + self._fill_overwrites(data) + + async def _get_channel(self): + return self + + @property + def type(self): + """:class:`ChannelType`: The channel's Discord type.""" + return try_enum(ChannelType, self._type) + + @property + def _sorting_bucket(self): + return ChannelType.text.value + + def permissions_for(self, member): + base = super().permissions_for(member) + + # text channels do not have voice related permissions + denied = Permissions.voice() + base.value &= ~denied.value + return base + + permissions_for.__doc__ = discord.abc.GuildChannel.permissions_for.__doc__ + + @property + def members(self): + """List[:class:`Member`]: Returns all members that can see this channel.""" + return [m for m in self.guild.members if self.permissions_for(m).read_messages] + + def is_nsfw(self): + """Checks if the channel is NSFW.""" + return self.nsfw + + def is_news(self): + """Checks if the channel is a news channel.""" + return self._type == ChannelType.news.value + + @property + def last_message(self): + """Fetches the last message from this channel in cache. + + The message might not be valid or point to an existing message. + + .. admonition:: Reliable Fetching + :class: helpful + + For a slightly more reliable method of fetching the + last message, consider using either :meth:`history` + or :meth:`fetch_message` with the :attr:`last_message_id` + attribute. + + Returns + --------- + Optional[:class:`Message`] + The last message in this channel or ``None`` if not found. + """ + return self._state._get_message(self.last_message_id) if self.last_message_id else None + + async def edit(self, *, reason=None, **options): + """|coro| + + Edits the channel. + + You must have the :attr:`~Permissions.manage_channels` permission to + use this. + + Parameters + ---------- + name: :class:`str` + The new channel name. + topic: :class:`str` + The new channel's topic. + position: :class:`int` + The new channel's position. + nsfw: :class:`bool` + To mark the channel as NSFW or not. + sync_permissions: :class:`bool` + Whether to sync permissions with the channel's new or pre-existing + category. Defaults to ``False``. + category: Optional[:class:`CategoryChannel`] + The new category for this channel. Can be ``None`` to remove the + category. + slowmode_delay: :class:`int` + Specifies the slowmode rate limit for user in this channel, in seconds. + A value of `0` disables slowmode. The maximum value possible is `21600`. + reason: Optional[:class:`str`] + The reason for editing this channel. Shows up on the audit log. + + Raises + ------ + InvalidArgument + If position is less than 0 or greater than the number of channels. + Forbidden + You do not have permissions to edit the channel. + HTTPException + Editing the channel failed. + """ + await self._edit(options, reason=reason) + + async def clone(self, *, name=None, reason=None): + return await self._clone_impl({ + 'topic': self.topic, + 'nsfw': self.nsfw, + 'rate_limit_per_user': self.slowmode_delay + }, name=name, reason=reason) + + clone.__doc__ = discord.abc.GuildChannel.clone.__doc__ + + async def delete_messages(self, messages): + """|coro| + + Deletes a list of messages. This is similar to :meth:`Message.delete` + except it bulk deletes multiple messages. + + As a special case, if the number of messages is 0, then nothing + is done. If the number of messages is 1 then single message + delete is done. If it's more than two, then bulk delete is used. + + You cannot bulk delete more than 100 messages or messages that + are older than 14 days old. + + You must have the :attr:`~Permissions.manage_messages` permission to + use this. + + Usable only by bot accounts. + + Parameters + ----------- + messages: Iterable[:class:`abc.Snowflake`] + An iterable of messages denoting which ones to bulk delete. + + Raises + ------ + ClientException + The number of messages to delete was more than 100. + Forbidden + You do not have proper permissions to delete the messages or + you're not using a bot account. + HTTPException + Deleting the messages failed. + """ + if not isinstance(messages, (list, tuple)): + messages = list(messages) + + if len(messages) == 0: + return # do nothing + + if len(messages) == 1: + message_id = messages[0].id + await self._state.http.delete_message(self.id, message_id) + return + + if len(messages) > 100: + raise ClientException('Can only bulk delete messages up to 100 messages') + + message_ids = [m.id for m in messages] + await self._state.http.delete_messages(self.id, message_ids) + + async def purge(self, *, limit=100, check=None, before=None, after=None, around=None, oldest_first=False, bulk=True): + """|coro| + + Purges a list of messages that meet the criteria given by the predicate + ``check``. If a ``check`` is not provided then all messages are deleted + without discrimination. + + You must have the :attr:`~Permissions.manage_messages` permission to + delete messages even if they are your own (unless you are a user + account). The :attr:`~Permissions.read_message_history` permission is + also needed to retrieve message history. + + Internally, this employs a different number of strategies depending + on the conditions met such as if a bulk delete is possible or if + the account is a user bot or not. + + Examples + --------- + + Deleting bot's messages :: + + def is_me(m): + return m.author == client.user + + deleted = await channel.purge(limit=100, check=is_me) + await channel.send('Deleted {} message(s)'.format(len(deleted))) + + Parameters + ----------- + limit: Optional[:class:`int`] + The number of messages to search through. This is not the number + of messages that will be deleted, though it can be. + check: Callable[[:class:`Message`], :class:`bool`] + The function used to check if a message should be deleted. + It must take a :class:`Message` as its sole parameter. + before: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] + Same as ``before`` in :meth:`history`. + after: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] + Same as ``after`` in :meth:`history`. + around: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] + Same as ``around`` in :meth:`history`. + oldest_first: Optional[:class:`bool`] + Same as ``oldest_first`` in :meth:`history`. + bulk: class:`bool` + If ``True``, use bulk delete. Setting this to ``False`` is useful for mass-deleting + a bot's own messages without :attr:`Permissions.manage_messages`. When ``True``, will + fall back to single delete if current account is a user bot, or if messages are + older than two weeks. + + Raises + ------- + Forbidden + You do not have proper permissions to do the actions required. + HTTPException + Purging the messages failed. + + Returns + -------- + List[:class:`.Message`] + The list of messages that were deleted. + """ + + if check is None: + check = lambda m: True + + iterator = self.history(limit=limit, before=before, after=after, oldest_first=oldest_first, around=around) + ret = [] + count = 0 + + minimum_time = int((time.time() - 14 * 24 * 60 * 60) * 1000.0 - 1420070400000) << 22 + strategy = self.delete_messages if self._state.is_bot and bulk else _single_delete_strategy + + while True: + try: + msg = await iterator.next() + except NoMoreItems: + # no more messages to poll + if count >= 2: + # more than 2 messages -> bulk delete + to_delete = ret[-count:] + await strategy(to_delete) + elif count == 1: + # delete a single message + await ret[-1].delete() + + return ret + else: + if count == 100: + # we've reached a full 'queue' + to_delete = ret[-100:] + await strategy(to_delete) + count = 0 + await asyncio.sleep(1) + + if check(msg): + if msg.id < minimum_time: + # older than 14 days old + if count == 1: + await ret[-1].delete() + elif count >= 2: + to_delete = ret[-count:] + await strategy(to_delete) + + count = 0 + strategy = _single_delete_strategy + + count += 1 + ret.append(msg) + + async def webhooks(self): + """|coro| + + Gets the list of webhooks from this channel. + + Requires :attr:`~.Permissions.manage_webhooks` permissions. + + Raises + ------- + Forbidden + You don't have permissions to get the webhooks. + + Returns + -------- + List[:class:`Webhook`] + The webhooks for this channel. + """ + + data = await self._state.http.channel_webhooks(self.id) + return [Webhook.from_state(d, state=self._state) for d in data] + + async def create_webhook(self, *, name, avatar=None, reason=None): + """|coro| + + Creates a webhook for this channel. + + Requires :attr:`~.Permissions.manage_webhooks` permissions. + + .. versionchanged:: 1.1.0 + Added the ``reason`` keyword-only parameter. + + Parameters + ------------- + name: :class:`str` + The webhook's name. + avatar: Optional[:class:`bytes`] + A :term:`py:bytes-like object` representing the webhook's default avatar. + This operates similarly to :meth:`~ClientUser.edit`. + reason: Optional[:class:`str`] + The reason for creating this webhook. Shows up in the audit logs. + + Raises + ------- + HTTPException + Creating the webhook failed. + Forbidden + You do not have permissions to create a webhook. + + Returns + -------- + :class:`Webhook` + The created webhook. + """ + + if avatar is not None: + avatar = utils._bytes_to_base64_data(avatar) + + data = await self._state.http.create_webhook(self.id, name=str(name), avatar=avatar, reason=reason) + return Webhook.from_state(data, state=self._state) + +class VoiceChannel(discord.abc.Connectable, discord.abc.GuildChannel, Hashable): + """Represents a Discord guild voice channel. + + .. container:: operations + + .. describe:: x == y + + Checks if two channels are equal. + + .. describe:: x != y + + Checks if two channels are not equal. + + .. describe:: hash(x) + + Returns the channel's hash. + + .. describe:: str(x) + + Returns the channel's name. + + Attributes + ----------- + name: :class:`str` + The channel name. + guild: :class:`Guild` + The guild the channel belongs to. + id: :class:`int` + The channel ID. + category_id: :class:`int` + The category channel ID this channel belongs to. + position: :class:`int` + The position in the channel list. This is a number that starts at 0. e.g. the + top channel is position 0. + bitrate: :class:`int` + The channel's preferred audio bitrate in bits per second. + user_limit: :class:`int` + The channel's limit for number of members that can be in a voice channel. + """ + + __slots__ = ('name', 'id', 'guild', 'bitrate', 'user_limit', + '_state', 'position', '_overwrites', 'category_id') + + def __init__(self, *, state, guild, data): + self._state = state + self.id = int(data['id']) + self._update(guild, data) + + def __repr__(self): + attrs = [ + ('id', self.id), + ('name', self.name), + ('position', self.position), + ('bitrate', self.bitrate), + ('user_limit', self.user_limit), + ('category_id', self.category_id) + ] + return '<%s %s>' % (self.__class__.__name__, ' '.join('%s=%r' % t for t in attrs)) + + def _get_voice_client_key(self): + return self.guild.id, 'guild_id' + + def _get_voice_state_pair(self): + return self.guild.id, self.id + + @property + def type(self): + """:class:`ChannelType`: The channel's Discord type.""" + return ChannelType.voice + + def _update(self, guild, data): + self.guild = guild + self.name = data['name'] + self.category_id = utils._get_as_snowflake(data, 'parent_id') + self.position = data['position'] + self.bitrate = data.get('bitrate') + self.user_limit = data.get('user_limit') + self._fill_overwrites(data) + + @property + def _sorting_bucket(self): + return ChannelType.voice.value + + @property + def members(self): + """List[:class:`Member`]: Returns all members that are currently inside this voice channel.""" + ret = [] + for user_id, state in self.guild._voice_states.items(): + if state.channel.id == self.id: + member = self.guild.get_member(user_id) + if member is not None: + ret.append(member) + return ret + + def permissions_for(self, member): + base = super().permissions_for(member) + + # voice channels cannot be edited by people who can't connect to them + # It also implicitly denies all other voice perms + if not base.connect: + denied = Permissions.voice() + denied.update(manage_channels=True, manage_roles=True) + base.value &= ~denied.value + return base + + permissions_for.__doc__ = discord.abc.GuildChannel.permissions_for.__doc__ + + async def clone(self, *, name=None, reason=None): + return await self._clone_impl({ + 'bitrate': self.bitrate, + 'user_limit': self.user_limit + }, name=name, reason=reason) + + clone.__doc__ = discord.abc.GuildChannel.clone.__doc__ + + async def edit(self, *, reason=None, **options): + """|coro| + + Edits the channel. + + You must have the :attr:`~Permissions.manage_channels` permission to + use this. + + Parameters + ---------- + name: :class:`str` + The new channel's name. + bitrate: :class:`int` + The new channel's bitrate. + user_limit: :class:`int` + The new channel's user limit. + position: :class:`int` + The new channel's position. + sync_permissions: :class:`bool` + Whether to sync permissions with the channel's new or pre-existing + category. Defaults to ``False``. + category: Optional[:class:`CategoryChannel`] + The new category for this channel. Can be ``None`` to remove the + category. + reason: Optional[:class:`str`] + The reason for editing this channel. Shows up on the audit log. + + Raises + ------ + Forbidden + You do not have permissions to edit the channel. + HTTPException + Editing the channel failed. + """ + + await self._edit(options, reason=reason) + +class CategoryChannel(discord.abc.GuildChannel, Hashable): + """Represents a Discord channel category. + + These are useful to group channels to logical compartments. + + .. container:: operations + + .. describe:: x == y + + Checks if two channels are equal. + + .. describe:: x != y + + Checks if two channels are not equal. + + .. describe:: hash(x) + + Returns the category's hash. + + .. describe:: str(x) + + Returns the category's name. + + Attributes + ----------- + name: :class:`str` + The category name. + guild: :class:`Guild` + The guild the category belongs to. + id: :class:`int` + The category channel ID. + position: :class:`int` + The position in the category list. This is a number that starts at 0. e.g. the + top category is position 0. + """ + + __slots__ = ('name', 'id', 'guild', 'nsfw', '_state', 'position', '_overwrites', 'category_id') + + def __init__(self, *, state, guild, data): + self._state = state + self.id = int(data['id']) + self._update(guild, data) + + def __repr__(self): + return ''.format(self) + + def _update(self, guild, data): + self.guild = guild + self.name = data['name'] + self.category_id = utils._get_as_snowflake(data, 'parent_id') + self.nsfw = data.get('nsfw', False) + self.position = data['position'] + self._fill_overwrites(data) + + @property + def _sorting_bucket(self): + return ChannelType.category.value + + @property + def type(self): + """:class:`ChannelType`: The channel's Discord type.""" + return ChannelType.category + + def is_nsfw(self): + """Checks if the category is NSFW.""" + return self.nsfw + + async def clone(self, *, name=None, reason=None): + return await self._clone_impl({ + 'nsfw': self.nsfw + }, name=name, reason=reason) + + clone.__doc__ = discord.abc.GuildChannel.clone.__doc__ + + async def edit(self, *, reason=None, **options): + """|coro| + + Edits the channel. + + You must have the :attr:`~Permissions.manage_channels` permission to + use this. + + Parameters + ---------- + name: :class:`str` + The new category's name. + position: :class:`int` + The new category's position. + nsfw: :class:`bool` + To mark the category as NSFW or not. + reason: Optional[:class:`str`] + The reason for editing this category. Shows up on the audit log. + + Raises + ------ + InvalidArgument + If position is less than 0 or greater than the number of categories. + Forbidden + You do not have permissions to edit the category. + HTTPException + Editing the category failed. + """ + + try: + position = options.pop('position') + except KeyError: + pass + else: + await self._move(position, reason=reason) + self.position = position + + if options: + data = await self._state.http.edit_channel(self.id, reason=reason, **options) + self._update(self.guild, data) + + @property + def channels(self): + """List[:class:`abc.GuildChannel`]: Returns the channels that are under this category. + + These are sorted by the official Discord UI, which places voice channels below the text channels. + """ + def comparator(channel): + return (not isinstance(channel, TextChannel), channel.position) + + ret = [c for c in self.guild.channels if c.category_id == self.id] + ret.sort(key=comparator) + return ret + + @property + def text_channels(self): + """List[:class:`TextChannel`]: Returns the text channels that are under this category.""" + ret = [c for c in self.guild.channels + if c.category_id == self.id + and isinstance(c, TextChannel)] + ret.sort(key=lambda c: (c.position, c.id)) + return ret + + @property + def voice_channels(self): + """List[:class:`VoiceChannel`]: Returns the voice channels that are under this category.""" + ret = [c for c in self.guild.channels + if c.category_id == self.id + and isinstance(c, VoiceChannel)] + ret.sort(key=lambda c: (c.position, c.id)) + return ret + + async def create_text_channel(self, name, *, overwrites=None, reason=None, **options): + """|coro| + + A shortcut method to :meth:`Guild.create_text_channel` to create a :class:`TextChannel` in the category. + """ + return await self.guild.create_text_channel(name, overwrites=overwrites, category=self, reason=reason, **options) + + async def create_voice_channel(self, name, *, overwrites=None, reason=None, **options): + """|coro| + + A shortcut method to :meth:`Guild.create_voice_channel` to create a :class:`VoiceChannel` in the category. + """ + return await self.guild.create_voice_channel(name, overwrites=overwrites, category=self, reason=reason, **options) + +class StoreChannel(discord.abc.GuildChannel, Hashable): + """Represents a Discord guild store channel. + + .. container:: operations + + .. describe:: x == y + + Checks if two channels are equal. + + .. describe:: x != y + + Checks if two channels are not equal. + + .. describe:: hash(x) + + Returns the channel's hash. + + .. describe:: str(x) + + Returns the channel's name. + + Attributes + ----------- + name: :class:`str` + The channel name. + guild: :class:`Guild` + The guild the channel belongs to. + id: :class:`int` + The channel ID. + category_id: :class:`int` + The category channel ID this channel belongs to. + position: :class:`int` + The position in the channel list. This is a number that starts at 0. e.g. the + top channel is position 0. + """ + __slots__ = ('name', 'id', 'guild', '_state', 'nsfw', + 'category_id', 'position', '_overwrites',) + + def __init__(self, *, state, guild, data): + self._state = state + self.id = int(data['id']) + self._update(guild, data) + + def __repr__(self): + return ''.format(self) + + def _update(self, guild, data): + self.guild = guild + self.name = data['name'] + self.category_id = utils._get_as_snowflake(data, 'parent_id') + self.position = data['position'] + self.nsfw = data.get('nsfw', False) + self._fill_overwrites(data) + + @property + def _sorting_bucket(self): + return ChannelType.text.value + + @property + def type(self): + """:class:`ChannelType`: The channel's Discord type.""" + return ChannelType.store + + def permissions_for(self, member): + base = super().permissions_for(member) + + # store channels do not have voice related permissions + denied = Permissions.voice() + base.value &= ~denied.value + return base + + permissions_for.__doc__ = discord.abc.GuildChannel.permissions_for.__doc__ + + def is_nsfw(self): + """Checks if the channel is NSFW.""" + return self.nsfw + + async def clone(self, *, name=None, reason=None): + return await self._clone_impl({ + 'nsfw': self.nsfw + }, name=name, reason=reason) + + clone.__doc__ = discord.abc.GuildChannel.clone.__doc__ + + async def edit(self, *, reason=None, **options): + """|coro| + + Edits the channel. + + You must have the :attr:`~Permissions.manage_channels` permission to + use this. + + Parameters + ---------- + name: :class:`str` + The new channel name. + position: :class:`int` + The new channel's position. + nsfw: :class:`bool` + To mark the channel as NSFW or not. + sync_permissions: :class:`bool` + Whether to sync permissions with the channel's new or pre-existing + category. Defaults to ``False``. + category: Optional[:class:`CategoryChannel`] + The new category for this channel. Can be ``None`` to remove the + category. + reason: Optional[:class:`str`] + The reason for editing this channel. Shows up on the audit log. + + Raises + ------ + InvalidArgument + If position is less than 0 or greater than the number of channels. + Forbidden + You do not have permissions to edit the channel. + HTTPException + Editing the channel failed. + """ + await self._edit(options, reason=reason) + +class DMChannel(discord.abc.Messageable, Hashable): + """Represents a Discord direct message channel. + + .. container:: operations + + .. describe:: x == y + + Checks if two channels are equal. + + .. describe:: x != y + + Checks if two channels are not equal. + + .. describe:: hash(x) + + Returns the channel's hash. + + .. describe:: str(x) + + Returns a string representation of the channel + + Attributes + ---------- + recipient: :class:`User` + The user you are participating with in the direct message channel. + me: :class:`ClientUser` + The user presenting yourself. + id: :class:`int` + The direct message channel ID. + """ + + __slots__ = ('id', 'recipient', 'me', '_state') + + def __init__(self, *, me, state, data): + self._state = state + self.recipient = state.store_user(data['recipients'][0]) + self.me = me + self.id = int(data['id']) + + async def _get_channel(self): + return self + + def __str__(self): + return 'Direct Message with %s' % self.recipient + + def __repr__(self): + return ''.format(self) + + @property + def type(self): + """:class:`ChannelType`: The channel's Discord type.""" + return ChannelType.private + + @property + def created_at(self): + """Returns the direct message channel's creation time in UTC.""" + return utils.snowflake_time(self.id) + + def permissions_for(self, user=None): + """Handles permission resolution for a :class:`User`. + + This function is there for compatibility with other channel types. + + Actual direct messages do not really have the concept of permissions. + + This returns all the Text related permissions set to ``True`` except: + + - :attr:`~Permissions.send_tts_messages`: You cannot send TTS messages in a DM. + - :attr:`~Permissions.manage_messages`: You cannot delete others messages in a DM. + + Parameters + ----------- + user: :class:`User` + The user to check permissions for. This parameter is ignored + but kept for compatibility. + + Returns + -------- + :class:`Permissions` + The resolved permissions. + """ + + base = Permissions.text() + base.send_tts_messages = False + base.manage_messages = False + return base + +class GroupChannel(discord.abc.Messageable, Hashable): + """Represents a Discord group channel. + + .. container:: operations + + .. describe:: x == y + + Checks if two channels are equal. + + .. describe:: x != y + + Checks if two channels are not equal. + + .. describe:: hash(x) + + Returns the channel's hash. + + .. describe:: str(x) + + Returns a string representation of the channel + + Attributes + ---------- + recipients: List[:class:`User`] + The users you are participating with in the group channel. + me: :class:`ClientUser` + The user presenting yourself. + id: :class:`int` + The group channel ID. + owner: :class:`User` + The user that owns the group channel. + icon: Optional[:class:`str`] + The group channel's icon hash if provided. + name: Optional[:class:`str`] + The group channel's name if provided. + """ + + __slots__ = ('id', 'recipients', 'owner', 'icon', 'name', 'me', '_state') + + def __init__(self, *, me, state, data): + self._state = state + self.id = int(data['id']) + self.me = me + self._update_group(data) + + def _update_group(self, data): + owner_id = utils._get_as_snowflake(data, 'owner_id') + self.icon = data.get('icon') + self.name = data.get('name') + + try: + self.recipients = [self._state.store_user(u) for u in data['recipients']] + except KeyError: + pass + + if owner_id == self.me.id: + self.owner = self.me + else: + self.owner = utils.find(lambda u: u.id == owner_id, self.recipients) + + async def _get_channel(self): + return self + + def __str__(self): + if self.name: + return self.name + + if len(self.recipients) == 0: + return 'Unnamed' + + return ', '.join(map(lambda x: x.name, self.recipients)) + + def __repr__(self): + return ''.format(self) + + @property + def type(self): + """:class:`ChannelType`: The channel's Discord type.""" + return ChannelType.group + + @property + def icon_url(self): + """:class:`Asset`: Returns the channel's icon asset if available.""" + return Asset._from_icon(self._state, self, 'channel') + + @property + def created_at(self): + """:class:`datetime.datetime`: Returns the channel's creation time in UTC.""" + return utils.snowflake_time(self.id) + + def permissions_for(self, user): + """Handles permission resolution for a :class:`User`. + + This function is there for compatibility with other channel types. + + Actual direct messages do not really have the concept of permissions. + + This returns all the Text related permissions set to ``True`` except: + + - send_tts_messages: You cannot send TTS messages in a DM. + - manage_messages: You cannot delete others messages in a DM. + + This also checks the kick_members permission if the user is the owner. + + Parameters + ----------- + user: :class:`User` + The user to check permissions for. + + Returns + -------- + :class:`Permissions` + The resolved permissions for the user. + """ + + base = Permissions.text() + base.send_tts_messages = False + base.manage_messages = False + base.mention_everyone = True + + if user.id == self.owner.id: + base.kick_members = True + + return base + + async def add_recipients(self, *recipients): + r"""|coro| + + Adds recipients to this group. + + A group can only have a maximum of 10 members. + Attempting to add more ends up in an exception. To + add a recipient to the group, you must have a relationship + with the user of type :attr:`RelationshipType.friend`. + + Parameters + ----------- + \*recipients: :class:`User` + An argument list of users to add to this group. + + Raises + ------- + HTTPException + Adding a recipient to this group failed. + """ + + # TODO: wait for the corresponding WS event + + req = self._state.http.add_group_recipient + for recipient in recipients: + await req(self.id, recipient.id) + + async def remove_recipients(self, *recipients): + r"""|coro| + + Removes recipients from this group. + + Parameters + ----------- + \*recipients: :class:`User` + An argument list of users to remove from this group. + + Raises + ------- + HTTPException + Removing a recipient from this group failed. + """ + + # TODO: wait for the corresponding WS event + + req = self._state.http.remove_group_recipient + for recipient in recipients: + await req(self.id, recipient.id) + + async def edit(self, **fields): + """|coro| + + Edits the group. + + Parameters + ----------- + name: Optional[:class:`str`] + The new name to change the group to. + Could be ``None`` to remove the name. + icon: Optional[:class:`bytes`] + A :term:`py:bytes-like object` representing the new icon. + Could be ``None`` to remove the icon. + + Raises + ------- + HTTPException + Editing the group failed. + """ + + try: + icon_bytes = fields['icon'] + except KeyError: + pass + else: + if icon_bytes is not None: + fields['icon'] = utils._bytes_to_base64_data(icon_bytes) + + data = await self._state.http.edit_group(self.id, **fields) + self._update_group(data) + + async def leave(self): + """|coro| + + Leave the group. + + If you are the only one in the group, this deletes it as well. + + Raises + ------- + HTTPException + Leaving the group failed. + """ + + await self._state.http.leave_group(self.id) + +def _channel_factory(channel_type): + value = try_enum(ChannelType, channel_type) + if value is ChannelType.text: + return TextChannel, value + elif value is ChannelType.voice: + return VoiceChannel, value + elif value is ChannelType.private: + return DMChannel, value + elif value is ChannelType.category: + return CategoryChannel, value + elif value is ChannelType.group: + return GroupChannel, value + elif value is ChannelType.news: + return TextChannel, value + elif value is ChannelType.store: + return StoreChannel, value + else: + return None, value diff --git a/venv/lib/python3.7/site-packages/discord/client.py b/venv/lib/python3.7/site-packages/discord/client.py new file mode 100644 index 0000000..e480459 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/client.py @@ -0,0 +1,1266 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import asyncio +from collections import namedtuple +import logging +import signal +import sys +import traceback + +import aiohttp +import websockets + +from .user import User, Profile +from .asset import Asset +from .invite import Invite +from .widget import Widget +from .guild import Guild +from .channel import _channel_factory +from .enums import ChannelType +from .member import Member +from .errors import * +from .enums import Status, VoiceRegion +from .gateway import * +from .activity import _ActivityTag, create_activity +from .voice_client import VoiceClient +from .http import HTTPClient +from .state import ConnectionState +from . import utils +from .object import Object +from .backoff import ExponentialBackoff +from .webhook import Webhook +from .iterators import GuildIterator +from .appinfo import AppInfo + +log = logging.getLogger(__name__) + +def _cancel_tasks(loop): + try: + task_retriever = asyncio.Task.all_tasks + except AttributeError: + # future proofing for 3.9 I guess + task_retriever = asyncio.all_tasks + + tasks = {t for t in task_retriever(loop=loop) if not t.done()} + + if not tasks: + return + + log.info('Cleaning up after %d tasks.', len(tasks)) + for task in tasks: + task.cancel() + + loop.run_until_complete(asyncio.gather(*tasks, loop=loop, return_exceptions=True)) + log.info('All tasks finished cancelling.') + + for task in tasks: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler({ + 'message': 'Unhandled exception during Client.run shutdown.', + 'exception': task.exception(), + 'task': task + }) + +def _cleanup_loop(loop): + try: + _cancel_tasks(loop) + if sys.version_info >= (3, 6): + loop.run_until_complete(loop.shutdown_asyncgens()) + finally: + log.info('Closing the event loop.') + loop.close() + +class _ClientEventTask(asyncio.Task): + def __init__(self, original_coro, event_name, coro, *, loop): + super().__init__(coro, loop=loop) + self.__event_name = event_name + self.__original_coro = original_coro + + def __repr__(self): + info = [ + ('state', self._state.lower()), + ('event', self.__event_name), + ('coro', repr(self.__original_coro)), + ] + if self._exception is not None: + info.append(('exception', repr(self._exception))) + return ''.format(' '.join('%s=%s' % t for t in info)) + +class Client: + r"""Represents a client connection that connects to Discord. + This class is used to interact with the Discord WebSocket and API. + + A number of options can be passed to the :class:`Client`. + + Parameters + ----------- + max_messages: Optional[:class:`int`] + The maximum number of messages to store in the internal message cache. + This defaults to 5000. Passing in ``None`` or a value less than 100 + will use the default instead of the passed in value. + loop: Optional[:class:`asyncio.AbstractEventLoop`] + The :class:`asyncio.AbstractEventLoop` to use for asynchronous operations. + Defaults to ``None``, in which case the default event loop is used via + :func:`asyncio.get_event_loop()`. + connector: :class:`aiohttp.BaseConnector` + The connector to use for connection pooling. + proxy: Optional[:class:`str`] + Proxy URL. + proxy_auth: Optional[:class:`aiohttp.BasicAuth`] + An object that represents proxy HTTP Basic Authorization. + shard_id: Optional[:class:`int`] + Integer starting at 0 and less than :attr:`.shard_count`. + shard_count: Optional[:class:`int`] + The total number of shards. + fetch_offline_members: :class:`bool` + Indicates if :func:`.on_ready` should be delayed to fetch all offline + members from the guilds the bot belongs to. If this is ``False``\, then + no offline members are received and :meth:`request_offline_members` + must be used to fetch the offline members of the guild. + status: Optional[:class:`.Status`] + A status to start your presence with upon logging on to Discord. + activity: Optional[Union[:class:`.Activity`, :class:`.Game`, :class:`.Streaming`]] + An activity to start your presence with upon logging on to Discord. + heartbeat_timeout: :class:`float` + The maximum numbers of seconds before timing out and restarting the + WebSocket in the case of not receiving a HEARTBEAT_ACK. Useful if + processing the initial packets take too long to the point of disconnecting + you. The default timeout is 60 seconds. + + Attributes + ----------- + ws + The websocket gateway the client is currently connected to. Could be ``None``. + loop: :class:`asyncio.AbstractEventLoop` + The event loop that the client uses for HTTP requests and websocket operations. + """ + def __init__(self, *, loop=None, **options): + self.ws = None + self.loop = asyncio.get_event_loop() if loop is None else loop + self._listeners = {} + self.shard_id = options.get('shard_id') + self.shard_count = options.get('shard_count') + + connector = options.pop('connector', None) + proxy = options.pop('proxy', None) + proxy_auth = options.pop('proxy_auth', None) + self.http = HTTPClient(connector, proxy=proxy, proxy_auth=proxy_auth, loop=self.loop) + + self._handlers = { + 'ready': self._handle_ready + } + + self._connection = ConnectionState(dispatch=self.dispatch, chunker=self._chunker, handlers=self._handlers, + syncer=self._syncer, http=self.http, loop=self.loop, **options) + + self._connection.shard_count = self.shard_count + self._closed = False + self._ready = asyncio.Event(loop=self.loop) + self._connection._get_websocket = lambda g: self.ws + + if VoiceClient.warn_nacl: + VoiceClient.warn_nacl = False + log.warning("PyNaCl is not installed, voice will NOT be supported") + + # internals + + async def _syncer(self, guilds): + await self.ws.request_sync(guilds) + + async def _chunker(self, guild): + try: + guild_id = guild.id + except AttributeError: + guild_id = [s.id for s in guild] + + payload = { + 'op': 8, + 'd': { + 'guild_id': guild_id, + 'query': '', + 'limit': 0 + } + } + + await self.ws.send_as_json(payload) + + def _handle_ready(self): + self._ready.set() + + @property + def latency(self): + """:class:`float`: Measures latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds. + + This could be referred to as the Discord WebSocket protocol latency. + """ + ws = self.ws + return float('nan') if not ws else ws.latency + + @property + def user(self): + """Optional[:class:`.ClientUser`]: Represents the connected client. None if not logged in.""" + return self._connection.user + + @property + def guilds(self): + """List[:class:`.Guild`]: The guilds that the connected client is a member of.""" + return self._connection.guilds + + @property + def emojis(self): + """List[:class:`.Emoji`]: The emojis that the connected client has.""" + return self._connection.emojis + + @property + def cached_messages(self): + """Sequence[:class:`.Message`]: Read-only list of messages the connected client has cached. + + .. versionadded:: 1.1.0 + """ + return utils.SequenceProxy(self._connection._messages) + + @property + def private_channels(self): + """List[:class:`.abc.PrivateChannel`]: The private channels that the connected client is participating on. + + .. note:: + + This returns only up to 128 most recent private channels due to an internal working + on how Discord deals with private channels. + """ + return self._connection.private_channels + + @property + def voice_clients(self): + """List[:class:`.VoiceClient`]: Represents a list of voice connections.""" + return self._connection.voice_clients + + def is_ready(self): + """Specifies if the client's internal cache is ready for use.""" + return self._ready.is_set() + + async def _run_event(self, coro, event_name, *args, **kwargs): + try: + await coro(*args, **kwargs) + except asyncio.CancelledError: + pass + except Exception: + try: + await self.on_error(event_name, *args, **kwargs) + except asyncio.CancelledError: + pass + + def _schedule_event(self, coro, event_name, *args, **kwargs): + wrapped = self._run_event(coro, event_name, *args, **kwargs) + # Schedules the task + return _ClientEventTask(original_coro=coro, event_name=event_name, coro=wrapped, loop=self.loop) + + def dispatch(self, event, *args, **kwargs): + log.debug('Dispatching event %s', event) + method = 'on_' + event + + listeners = self._listeners.get(event) + if listeners: + removed = [] + for i, (future, condition) in enumerate(listeners): + if future.cancelled(): + removed.append(i) + continue + + try: + result = condition(*args) + except Exception as exc: + future.set_exception(exc) + removed.append(i) + else: + if result: + if len(args) == 0: + future.set_result(None) + elif len(args) == 1: + future.set_result(args[0]) + else: + future.set_result(args) + removed.append(i) + + if len(removed) == len(listeners): + self._listeners.pop(event) + else: + for idx in reversed(removed): + del listeners[idx] + + try: + coro = getattr(self, method) + except AttributeError: + pass + else: + self._schedule_event(coro, method, *args, **kwargs) + + async def on_error(self, event_method, *args, **kwargs): + """|coro| + + The default error handler provided by the client. + + By default this prints to :data:`sys.stderr` however it could be + overridden to have a different implementation. + Check :func:`~discord.on_error` for more details. + """ + print('Ignoring exception in {}'.format(event_method), file=sys.stderr) + traceback.print_exc() + + async def request_offline_members(self, *guilds): + r"""|coro| + + Requests previously offline members from the guild to be filled up + into the :attr:`.Guild.members` cache. This function is usually not + called. It should only be used if you have the ``fetch_offline_members`` + parameter set to ``False``. + + When the client logs on and connects to the websocket, Discord does + not provide the library with offline members if the number of members + in the guild is larger than 250. You can check if a guild is large + if :attr:`.Guild.large` is ``True``. + + Parameters + ----------- + \*guilds: :class:`.Guild` + An argument list of guilds to request offline members for. + + Raises + ------- + :exc:`.InvalidArgument` + If any guild is unavailable or not large in the collection. + """ + if any(not g.large or g.unavailable for g in guilds): + raise InvalidArgument('An unavailable or non-large guild was passed.') + + await self._connection.request_offline_members(guilds) + + # login state management + + async def login(self, token, *, bot=True): + """|coro| + + Logs in the client with the specified credentials. + + This function can be used in two different ways. + + .. warning:: + + Logging on with a user token is against the Discord + `Terms of Service `_ + and doing so might potentially get your account banned. + Use this at your own risk. + + Parameters + ----------- + token: :class:`str` + The authentication token. Do not prefix this token with + anything as the library will do it for you. + bot: :class:`bool` + Keyword argument that specifies if the account logging on is a bot + token or not. + + Raises + ------ + :exc:`.LoginFailure` + The wrong credentials are passed. + :exc:`.HTTPException` + An unknown HTTP related error occurred, + usually when it isn't 200 or the known incorrect credentials + passing status code. + """ + + log.info('logging in using static token') + await self.http.static_login(token, bot=bot) + self._connection.is_bot = bot + + async def logout(self): + """|coro| + + Logs out of Discord and closes all connections. + + .. note:: + + This is just an alias to :meth:`close`. If you want + to do extraneous cleanup when subclassing, it is suggested + to override :meth:`close` instead. + """ + await self.close() + + async def _connect(self): + coro = DiscordWebSocket.from_client(self, shard_id=self.shard_id) + self.ws = await asyncio.wait_for(coro, timeout=180.0, loop=self.loop) + while True: + try: + await self.ws.poll_event() + except ResumeWebSocket: + log.info('Got a request to RESUME the websocket.') + self.dispatch('disconnect') + coro = DiscordWebSocket.from_client(self, shard_id=self.shard_id, session=self.ws.session_id, + sequence=self.ws.sequence, resume=True) + self.ws = await asyncio.wait_for(coro, timeout=180.0, loop=self.loop) + + async def connect(self, *, reconnect=True): + """|coro| + + Creates a websocket connection and lets the websocket listen + to messages from Discord. This is a loop that runs the entire + event system and miscellaneous aspects of the library. Control + is not resumed until the WebSocket connection is terminated. + + Parameters + ----------- + reconnect: :class:`bool` + If we should attempt reconnecting, either due to internet + failure or a specific failure on Discord's part. Certain + disconnects that lead to bad state will not be handled (such as + invalid sharding payloads or bad tokens). + + Raises + ------- + :exc:`.GatewayNotFound` + If the gateway to connect to Discord is not found. Usually if this + is thrown then there is a Discord API outage. + :exc:`.ConnectionClosed` + The websocket connection has been terminated. + """ + + backoff = ExponentialBackoff() + while not self.is_closed(): + try: + await self._connect() + except (OSError, + HTTPException, + GatewayNotFound, + ConnectionClosed, + aiohttp.ClientError, + asyncio.TimeoutError, + websockets.InvalidHandshake, + websockets.WebSocketProtocolError) as exc: + + self.dispatch('disconnect') + if not reconnect: + await self.close() + if isinstance(exc, ConnectionClosed) and exc.code == 1000: + # clean close, don't re-raise this + return + raise + + if self.is_closed(): + return + + # We should only get this when an unhandled close code happens, + # such as a clean disconnect (1000) or a bad state (bad token, no sharding, etc) + # sometimes, discord sends us 1000 for unknown reasons so we should reconnect + # regardless and rely on is_closed instead + if isinstance(exc, ConnectionClosed): + if exc.code != 1000: + await self.close() + raise + + retry = backoff.delay() + log.exception("Attempting a reconnect in %.2fs", retry) + await asyncio.sleep(retry, loop=self.loop) + + async def close(self): + """|coro| + + Closes the connection to Discord. + """ + if self._closed: + return + + await self.http.close() + self._closed = True + + for voice in self.voice_clients: + try: + await voice.disconnect() + except Exception: + # if an error happens during disconnects, disregard it. + pass + + if self.ws is not None and self.ws.open: + await self.ws.close() + + self._ready.clear() + + def clear(self): + """Clears the internal state of the bot. + + After this, the bot can be considered "re-opened", i.e. :meth:`is_closed` + and :meth:`is_ready` both return ``False`` along with the bot's internal + cache cleared. + """ + self._closed = False + self._ready.clear() + self._connection.clear() + self.http.recreate() + + async def start(self, *args, **kwargs): + """|coro| + + A shorthand coroutine for :meth:`login` + :meth:`connect`. + + Raises + ------- + TypeError + An unexpected keyword argument was received. + """ + bot = kwargs.pop('bot', True) + reconnect = kwargs.pop('reconnect', True) + + if kwargs: + raise TypeError("unexpected keyword argument(s) %s" % list(kwargs.keys())) + + await self.login(*args, bot=bot) + await self.connect(reconnect=reconnect) + + def run(self, *args, **kwargs): + """A blocking call that abstracts away the event loop + initialisation from you. + + If you want more control over the event loop then this + function should not be used. Use :meth:`start` coroutine + or :meth:`connect` + :meth:`login`. + + Roughly Equivalent to: :: + + try: + loop.run_until_complete(start(*args, **kwargs)) + except KeyboardInterrupt: + loop.run_until_complete(logout()) + # cancel all tasks lingering + finally: + loop.close() + + .. warning:: + + This function must be the last function to call due to the fact that it + is blocking. That means that registration of events or anything being + called after this function call will not execute until it returns. + """ + loop = self.loop + + try: + loop.add_signal_handler(signal.SIGINT, lambda: loop.stop()) + loop.add_signal_handler(signal.SIGTERM, lambda: loop.stop()) + except NotImplementedError: + pass + + async def runner(): + try: + await self.start(*args, **kwargs) + finally: + await self.close() + + def stop_loop_on_completion(f): + loop.stop() + + future = asyncio.ensure_future(runner(), loop=loop) + future.add_done_callback(stop_loop_on_completion) + try: + loop.run_forever() + except KeyboardInterrupt: + log.info('Received signal to terminate bot and event loop.') + finally: + future.remove_done_callback(stop_loop_on_completion) + log.info('Cleaning up tasks.') + _cleanup_loop(loop) + + if not future.cancelled(): + return future.result() + + # properties + + def is_closed(self): + """Indicates if the websocket connection is closed.""" + return self._closed + + @property + def activity(self): + """Optional[Union[:class:`.Activity`, :class:`.Game`, :class:`.Streaming`]]: The activity being used upon + logging in. + """ + return create_activity(self._connection._activity) + + @activity.setter + def activity(self, value): + if value is None: + self._connection._activity = None + elif isinstance(value, _ActivityTag): + self._connection._activity = value.to_dict() + else: + raise TypeError('activity must be one of Game, Streaming, or Activity.') + + # helpers/getters + + @property + def users(self): + """List[:class:`~discord.User`]: Returns a list of all the users the bot can see.""" + return list(self._connection._users.values()) + + def get_channel(self, id): + """Optional[Union[:class:`.abc.GuildChannel`, :class:`.abc.PrivateChannel`]]: Returns a channel with the + given ID. + + If not found, returns ``None``. + """ + return self._connection.get_channel(id) + + def get_guild(self, id): + """Optional[:class:`.Guild`]: Returns a guild with the given ID. + + If not found, returns ``None``. + """ + return self._connection._get_guild(id) + + def get_user(self, id): + """Optional[:class:`~discord.User`]: Returns a user with the given ID. + + If not found, returns ``None``. + """ + return self._connection.get_user(id) + + def get_emoji(self, id): + """Optional[:class:`.Emoji`]: Returns an emoji with the given ID. + + If not found, returns ``None``. + """ + return self._connection.get_emoji(id) + + def get_all_channels(self): + """A generator that retrieves every :class:`.abc.GuildChannel` the client can 'access'. + + This is equivalent to: :: + + for guild in client.guilds: + for channel in guild.channels: + yield channel + + .. note:: + + Just because you receive a :class:`.abc.GuildChannel` does not mean that + you can communicate in said channel. :meth:`.abc.GuildChannel.permissions_for` should + be used for that. + """ + + for guild in self.guilds: + for channel in guild.channels: + yield channel + + def get_all_members(self): + """Returns a generator with every :class:`.Member` the client can see. + + This is equivalent to: :: + + for guild in client.guilds: + for member in guild.members: + yield member + """ + for guild in self.guilds: + for member in guild.members: + yield member + + # listeners/waiters + + async def wait_until_ready(self): + """|coro| + + Waits until the client's internal cache is all ready. + """ + await self._ready.wait() + + def wait_for(self, event, *, check=None, timeout=None): + """|coro| + + Waits for a WebSocket event to be dispatched. + + This could be used to wait for a user to reply to a message, + or to react to a message, or to edit a message in a self-contained + way. + + The ``timeout`` parameter is passed onto :func:`asyncio.wait_for`. By default, + it does not timeout. Note that this does propagate the + :exc:`asyncio.TimeoutError` for you in case of timeout and is provided for + ease of use. + + In case the event returns multiple arguments, a :class:`tuple` containing those + arguments is returned instead. Please check the + :ref:`documentation ` for a list of events and their + parameters. + + This function returns the **first event that meets the requirements**. + + Examples + --------- + + Waiting for a user reply: :: + + @client.event + async def on_message(message): + if message.content.startswith('$greet'): + channel = message.channel + await channel.send('Say hello!') + + def check(m): + return m.content == 'hello' and m.channel == channel + + msg = await client.wait_for('message', check=check) + await channel.send('Hello {.author}!'.format(msg)) + + Waiting for a thumbs up reaction from the message author: :: + + @client.event + async def on_message(message): + if message.content.startswith('$thumb'): + channel = message.channel + await channel.send('Send me that \N{THUMBS UP SIGN} reaction, mate') + + def check(reaction, user): + return user == message.author and str(reaction.emoji) == '\N{THUMBS UP SIGN}' + + try: + reaction, user = await client.wait_for('reaction_add', timeout=60.0, check=check) + except asyncio.TimeoutError: + await channel.send('\N{THUMBS DOWN SIGN}') + else: + await channel.send('\N{THUMBS UP SIGN}') + + + Parameters + ------------ + event: :class:`str` + The event name, similar to the :ref:`event reference `, + but without the ``on_`` prefix, to wait for. + check: Optional[Callable[..., :class:`bool`]] + A predicate to check what to wait for. The arguments must meet the + parameters of the event being waited for. + timeout: Optional[:class:`float`] + The number of seconds to wait before timing out and raising + :exc:`asyncio.TimeoutError`. + + Raises + ------- + asyncio.TimeoutError + If a timeout is provided and it was reached. + + Returns + -------- + Any + Returns no arguments, a single argument, or a :class:`tuple` of multiple + arguments that mirrors the parameters passed in the + :ref:`event reference `. + """ + + future = self.loop.create_future() + if check is None: + def _check(*args): + return True + check = _check + + ev = event.lower() + try: + listeners = self._listeners[ev] + except KeyError: + listeners = [] + self._listeners[ev] = listeners + + listeners.append((future, check)) + return asyncio.wait_for(future, timeout, loop=self.loop) + + # event registration + + def event(self, coro): + """A decorator that registers an event to listen to. + + You can find more info about the events on the :ref:`documentation below `. + + The events must be a :ref:`coroutine `, if not, :exc:`TypeError` is raised. + + Example + --------- + + .. code-block:: python3 + + @client.event + async def on_ready(): + print('Ready!') + + Raises + -------- + TypeError + The coroutine passed is not actually a coroutine. + """ + + if not asyncio.iscoroutinefunction(coro): + raise TypeError('event registered must be a coroutine function') + + setattr(self, coro.__name__, coro) + log.debug('%s has successfully been registered as an event', coro.__name__) + return coro + + async def change_presence(self, *, activity=None, status=None, afk=False): + """|coro| + + Changes the client's presence. + + The activity parameter is a :class:`.Activity` object (not a string) that represents + the activity being done currently. This could also be the slimmed down versions, + :class:`.Game` and :class:`.Streaming`. + + Example + --------- + + .. code-block:: python3 + + game = discord.Game("with the API") + await client.change_presence(status=discord.Status.idle, activity=game) + + Parameters + ---------- + activity: Optional[Union[:class:`.Game`, :class:`.Streaming`, :class:`.Activity`]] + The activity being done. ``None`` if no currently active activity is done. + status: Optional[:class:`.Status`] + Indicates what status to change to. If ``None``, then + :attr:`.Status.online` is used. + afk: Optional[:class:`bool`] + Indicates if you are going AFK. This allows the discord + client to know how to handle push notifications better + for you in case you are actually idle and not lying. + + Raises + ------ + :exc:`.InvalidArgument` + If the ``activity`` parameter is not the proper type. + """ + + if status is None: + status = 'online' + status_enum = Status.online + elif status is Status.offline: + status = 'invisible' + status_enum = Status.offline + else: + status_enum = status + status = str(status) + + await self.ws.change_presence(activity=activity, status=status, afk=afk) + + for guild in self._connection.guilds: + me = guild.me + if me is None: + continue + + me.activities = (activity,) + me.status = status_enum + + # Guild stuff + + def fetch_guilds(self, *, limit=100, before=None, after=None): + """|coro| + + Retrieves an :class:`.AsyncIterator` that enables receiving your guilds. + + .. note:: + + Using this, you will only receive :attr:`.Guild.owner`, :attr:`.Guild.icon`, + :attr:`.Guild.id`, and :attr:`.Guild.name` per :class:`.Guild`. + + .. note:: + + This method is an API call. For general usage, consider :attr:`guilds` instead. + + Examples + --------- + + Usage :: + + async for guild in client.fetch_guilds(limit=150): + print(guild.name) + + Flattening into a list :: + + guilds = await client.fetch_guilds(limit=150).flatten() + # guilds is now a list of Guild... + + All parameters are optional. + + Parameters + ----------- + limit: Optional[:class:`int`] + The number of guilds to retrieve. + If ``None``, it retrieves every guild you have access to. Note, however, + that this would make it a slow operation. + Defaults to 100. + before: Union[:class:`.abc.Snowflake`, :class:`datetime.datetime`] + Retrieves guilds before this date or object. + If a date is provided it must be a timezone-naive datetime representing UTC time. + after: Union[:class:`.abc.Snowflake`, :class:`datetime.datetime`] + Retrieve guilds after this date or object. + If a date is provided it must be a timezone-naive datetime representing UTC time. + + Raises + ------ + :exc:`.HTTPException` + Getting the guilds failed. + + Yields + -------- + :class:`.Guild` + The guild with the guild data parsed. + """ + return GuildIterator(self, limit=limit, before=before, after=after) + + async def fetch_guild(self, guild_id): + """|coro| + + Retrieves a :class:`.Guild` from an ID. + + .. note:: + + Using this, you will **not** receive :attr:`.Guild.channels`, :class:`.Guild.members`, + :attr:`.Member.activity` and :attr:`.Member.voice` per :class:`.Member`. + + .. note:: + + This method is an API call. For general usage, consider :meth:`get_guild` instead. + + Parameters + ----------- + guild_id: :class:`int` + The guild's ID to fetch from. + + Raises + ------ + :exc:`.Forbidden` + You do not have access to the guild. + :exc:`.HTTPException` + Getting the guild failed. + + Returns + -------- + :class:`.Guild` + The guild from the ID. + """ + data = await self.http.get_guild(guild_id) + return Guild(data=data, state=self._connection) + + async def create_guild(self, name, region=None, icon=None): + """|coro| + + Creates a :class:`.Guild`. + + Bot accounts in more than 10 guilds are not allowed to create guilds. + + Parameters + ---------- + name: :class:`str` + The name of the guild. + region: :class:`.VoiceRegion` + The region for the voice communication server. + Defaults to :attr:`.VoiceRegion.us_west`. + icon: :class:`bytes` + The :term:`py:bytes-like object` representing the icon. See :meth:`.ClientUser.edit` + for more details on what is expected. + + Raises + ------ + :exc:`.HTTPException` + Guild creation failed. + :exc:`.InvalidArgument` + Invalid icon image format given. Must be PNG or JPG. + + Returns + ------- + :class:`.Guild` + The guild created. This is not the same guild that is + added to cache. + """ + if icon is not None: + icon = utils._bytes_to_base64_data(icon) + + if region is None: + region = VoiceRegion.us_west.value + else: + region = region.value + + data = await self.http.create_guild(name, region, icon) + return Guild(data=data, state=self._connection) + + # Invite management + + async def fetch_invite(self, url, *, with_counts=True): + """|coro| + + Gets an :class:`.Invite` from a discord.gg URL or ID. + + .. note:: + + If the invite is for a guild you have not joined, the guild and channel + attributes of the returned :class:`.Invite` will be :class:`.PartialInviteGuild` and + :class:`PartialInviteChannel` respectively. + + Parameters + ----------- + url: :class:`str` + The Discord invite ID or URL (must be a discord.gg URL). + with_counts: :class:`bool` + Whether to include count information in the invite. This fills the + :attr:`.Invite.approximate_member_count` and :attr:`.Invite.approximate_presence_count` + fields. + + Raises + ------- + :exc:`.NotFound` + The invite has expired or is invalid. + :exc:`.HTTPException` + Getting the invite failed. + + Returns + -------- + :class:`.Invite` + The invite from the URL/ID. + """ + + invite_id = utils.resolve_invite(url) + data = await self.http.get_invite(invite_id, with_counts=with_counts) + return Invite.from_incomplete(state=self._connection, data=data) + + async def delete_invite(self, invite): + """|coro| + + Revokes an :class:`.Invite`, URL, or ID to an invite. + + You must have the :attr:`~.Permissions.manage_channels` permission in + the associated guild to do this. + + Parameters + ---------- + invite: Union[:class:`.Invite`, :class:`str`] + The invite to revoke. + + Raises + ------- + :exc:`.Forbidden` + You do not have permissions to revoke invites. + :exc:`.NotFound` + The invite is invalid or expired. + :exc:`.HTTPException` + Revoking the invite failed. + """ + + invite_id = utils.resolve_invite(invite) + await self.http.delete_invite(invite_id) + + # Miscellaneous stuff + + async def fetch_widget(self, guild_id): + """|coro| + + Gets a :class:`.Widget` from a guild ID. + + .. note:: + + The guild must have the widget enabled to get this information. + + Parameters + ----------- + guild_id: :class:`int` + The ID of the guild. + + Raises + ------- + :exc:`.Forbidden` + The widget for this guild is disabled. + :exc:`.HTTPException` + Retrieving the widget failed. + + Returns + -------- + :class:`.Widget` + The guild's widget. + """ + data = await self.http.get_widget(guild_id) + + return Widget(state=self._connection, data=data) + + async def application_info(self): + """|coro| + + Retrieves the bot's application information. + + Raises + ------- + :exc:`.HTTPException` + Retrieving the information failed somehow. + + Returns + -------- + :class:`.AppInfo` + The bot's application information. + """ + data = await self.http.application_info() + if 'rpc_origins' not in data: + data['rpc_origins'] = None + return AppInfo(self._connection, data) + + async def fetch_user(self, user_id): + """|coro| + + Retrieves a :class:`~discord.User` based on their ID. This can only + be used by bot accounts. You do not have to share any guilds + with the user to get this information, however many operations + do require that you do. + + .. note:: + + This method is an API call. For general usage, consider :meth:`get_user` instead. + + Parameters + ----------- + user_id: :class:`int` + The user's ID to fetch from. + + Raises + ------- + :exc:`.NotFound` + A user with this ID does not exist. + :exc:`.HTTPException` + Fetching the user failed. + + Returns + -------- + :class:`~discord.User` + The user you requested. + """ + data = await self.http.get_user(user_id) + return User(state=self._connection, data=data) + + async def fetch_user_profile(self, user_id): + """|coro| + + Gets an arbitrary user's profile. This can only be used by non-bot accounts. + + Parameters + ------------ + user_id: :class:`int` + The ID of the user to fetch their profile for. + + Raises + ------- + :exc:`.Forbidden` + Not allowed to fetch profiles. + :exc:`.HTTPException` + Fetching the profile failed. + + Returns + -------- + :class:`.Profile` + The profile of the user. + """ + + state = self._connection + data = await self.http.get_user_profile(user_id) + + def transform(d): + return state._get_guild(int(d['id'])) + + since = data.get('premium_since') + mutual_guilds = list(filter(None, map(transform, data.get('mutual_guilds', [])))) + user = data['user'] + return Profile(flags=user.get('flags', 0), + premium_since=utils.parse_time(since), + mutual_guilds=mutual_guilds, + user=User(data=user, state=state), + connected_accounts=data['connected_accounts']) + + async def fetch_channel(self, channel_id): + """|coro| + + Retrieves a :class:`.abc.GuildChannel` or :class:`.abc.PrivateChannel` with the specified ID. + + .. note:: + + This method is an API call. For general usage, consider :meth:`get_channel` instead. + + .. versionadded:: 1.2.0 + + Raises + ------- + :exc:`.InvalidData` + An unknown channel type was received from Discord. + :exc:`.HTTPException` + Retrieving the channel failed. + :exc:`.NotFound` + Invalid Channel ID. + :exc:`.Forbidden` + You do not have permission to fetch this channel. + + Returns + -------- + Union[:class:`.abc.GuildChannel`, :class:`.abc.PrivateChannel`] + The channel from the ID. + """ + data = await self.http.get_channel(channel_id) + + factory, ch_type = _channel_factory(data['type']) + if factory is None: + raise InvalidData('Unknown channel type {type} for channel ID {id}.'.format_map(data)) + + if ch_type in (ChannelType.group, ChannelType.private): + channel = factory(me=self.user, data=data, state=self._connection) + else: + guild_id = int(data['guild_id']) + guild = self.get_guild(guild_id) or Object(id=guild_id) + channel = factory(guild=guild, state=self._connection, data=data) + + return channel + + async def fetch_webhook(self, webhook_id): + """|coro| + + Retrieves a :class:`.Webhook` with the specified ID. + + Raises + -------- + :exc:`.HTTPException` + Retrieving the webhook failed. + :exc:`.NotFound` + Invalid webhook ID. + :exc:`.Forbidden` + You do not have permission to fetch this webhook. + + Returns + --------- + :class:`.Webhook` + The webhook you requested. + """ + data = await self.http.get_webhook(webhook_id) + return Webhook.from_state(data, state=self._connection) diff --git a/venv/lib/python3.7/site-packages/discord/colour.py b/venv/lib/python3.7/site-packages/discord/colour.py new file mode 100644 index 0000000..1dc9662 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/colour.py @@ -0,0 +1,230 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import colorsys + +class Colour: + """Represents a Discord role colour. This class is similar + to an (red, green, blue) :class:`tuple`. + + There is an alias for this called Color. + + .. container:: operations + + .. describe:: x == y + + Checks if two colours are equal. + + .. describe:: x != y + + Checks if two colours are not equal. + + .. describe:: hash(x) + + Return the colour's hash. + + .. describe:: str(x) + + Returns the hex format for the colour. + + Attributes + ------------ + value: :class:`int` + The raw integer colour value. + """ + + __slots__ = ('value',) + + def __init__(self, value): + if not isinstance(value, int): + raise TypeError('Expected int parameter, received %s instead.' % value.__class__.__name__) + + self.value = value + + def _get_byte(self, byte): + return (self.value >> (8 * byte)) & 0xff + + def __eq__(self, other): + return isinstance(other, Colour) and self.value == other.value + + def __ne__(self, other): + return not self.__eq__(other) + + def __str__(self): + return '#{:0>6x}'.format(self.value) + + def __repr__(self): + return '' % self.value + + def __hash__(self): + return hash(self.value) + + @property + def r(self): + """:class:`int`: Returns the red component of the colour.""" + return self._get_byte(2) + + @property + def g(self): + """:class:`int`: Returns the green component of the colour.""" + return self._get_byte(1) + + @property + def b(self): + """:class:`int`: Returns the blue component of the colour.""" + return self._get_byte(0) + + def to_rgb(self): + """Tuple[:class:`int`, :class:`int`, :class:`int`]: Returns an (r, g, b) tuple representing the colour.""" + return (self.r, self.g, self.b) + + @classmethod + def from_rgb(cls, r, g, b): + """Constructs a :class:`Colour` from an RGB tuple.""" + return cls((r << 16) + (g << 8) + b) + + @classmethod + def from_hsv(cls, h, s, v): + """Constructs a :class:`Colour` from an HSV tuple.""" + rgb = colorsys.hsv_to_rgb(h, s, v) + return cls.from_rgb(*(int(x * 255) for x in rgb)) + + @classmethod + def default(cls): + """A factory method that returns a :class:`Colour` with a value of 0.""" + return cls(0) + + @classmethod + def teal(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x1abc9c``.""" + return cls(0x1abc9c) + + @classmethod + def dark_teal(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x11806a``.""" + return cls(0x11806a) + + @classmethod + def green(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x2ecc71``.""" + return cls(0x2ecc71) + + @classmethod + def dark_green(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x1f8b4c``.""" + return cls(0x1f8b4c) + + @classmethod + def blue(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x3498db``.""" + return cls(0x3498db) + + @classmethod + def dark_blue(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x206694``.""" + return cls(0x206694) + + @classmethod + def purple(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x9b59b6``.""" + return cls(0x9b59b6) + + @classmethod + def dark_purple(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x71368a``.""" + return cls(0x71368a) + + @classmethod + def magenta(cls): + """A factory method that returns a :class:`Colour` with a value of ``0xe91e63``.""" + return cls(0xe91e63) + + @classmethod + def dark_magenta(cls): + """A factory method that returns a :class:`Colour` with a value of ``0xad1457``.""" + return cls(0xad1457) + + @classmethod + def gold(cls): + """A factory method that returns a :class:`Colour` with a value of ``0xf1c40f``.""" + return cls(0xf1c40f) + + @classmethod + def dark_gold(cls): + """A factory method that returns a :class:`Colour` with a value of ``0xc27c0e``.""" + return cls(0xc27c0e) + + @classmethod + def orange(cls): + """A factory method that returns a :class:`Colour` with a value of ``0xe67e22``.""" + return cls(0xe67e22) + + @classmethod + def dark_orange(cls): + """A factory method that returns a :class:`Colour` with a value of ``0xa84300``.""" + return cls(0xa84300) + + @classmethod + def red(cls): + """A factory method that returns a :class:`Colour` with a value of ``0xe74c3c``.""" + return cls(0xe74c3c) + + @classmethod + def dark_red(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x992d22``.""" + return cls(0x992d22) + + @classmethod + def lighter_grey(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x95a5a6``.""" + return cls(0x95a5a6) + + @classmethod + def dark_grey(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x607d8b``.""" + return cls(0x607d8b) + + @classmethod + def light_grey(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x979c9f``.""" + return cls(0x979c9f) + + @classmethod + def darker_grey(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x546e7a``.""" + return cls(0x546e7a) + + @classmethod + def blurple(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x7289da``.""" + return cls(0x7289da) + + @classmethod + def greyple(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x99aab5``.""" + return cls(0x99aab5) + +Color = Colour diff --git a/venv/lib/python3.7/site-packages/discord/context_managers.py b/venv/lib/python3.7/site-packages/discord/context_managers.py new file mode 100644 index 0000000..e7ac501 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/context_managers.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import asyncio + +def _typing_done_callback(fut): + # just retrieve any exception and call it a day + try: + fut.exception() + except (asyncio.CancelledError, Exception): + pass + +class Typing: + def __init__(self, messageable): + self.loop = messageable._state.loop + self.messageable = messageable + + async def do_typing(self): + try: + channel = self._channel + except AttributeError: + channel = await self.messageable._get_channel() + + typing = channel._state.http.send_typing + + while True: + await typing(channel.id) + await asyncio.sleep(5) + + def __enter__(self): + self.task = asyncio.ensure_future(self.do_typing(), loop=self.loop) + self.task.add_done_callback(_typing_done_callback) + return self + + def __exit__(self, exc_type, exc, tb): + self.task.cancel() + + async def __aenter__(self): + self._channel = channel = await self.messageable._get_channel() + await channel._state.http.send_typing(channel.id) + return self.__enter__() + + async def __aexit__(self, exc_type, exc, tb): + self.task.cancel() diff --git a/venv/lib/python3.7/site-packages/discord/embeds.py b/venv/lib/python3.7/site-packages/discord/embeds.py new file mode 100644 index 0000000..e8b8398 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/embeds.py @@ -0,0 +1,565 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import datetime + +from . import utils +from .colour import Colour + +class _EmptyEmbed: + def __bool__(self): + return False + + def __repr__(self): + return 'Embed.Empty' + + def __len__(self): + return 0 + +EmptyEmbed = _EmptyEmbed() + +class EmbedProxy: + def __init__(self, layer): + self.__dict__.update(layer) + + def __len__(self): + return len(self.__dict__) + + def __repr__(self): + return 'EmbedProxy(%s)' % ', '.join(('%s=%r' % (k, v) for k, v in self.__dict__.items() if not k.startswith('_'))) + + def __getattr__(self, attr): + return EmptyEmbed + +class Embed: + """Represents a Discord embed. + + .. container:: operations + + .. describe:: len(x) + + Returns the total size of the embed. + Useful for checking if it's within the 6000 character limit. + + Certain properties return an ``EmbedProxy``, a type + that acts similar to a regular :class:`dict` except using dotted access, + e.g. ``embed.author.icon_url``. If the attribute + is invalid or empty, then a special sentinel value is returned, + :attr:`Embed.Empty`. + + For ease of use, all parameters that expect a :class:`str` are implicitly + casted to :class:`str` for you. + + Attributes + ----------- + title: :class:`str` + The title of the embed. + This can be set during initialisation. + type: :class:`str` + The type of embed. Usually "rich". + This can be set during initialisation. + description: :class:`str` + The description of the embed. + This can be set during initialisation. + url: :class:`str` + The URL of the embed. + This can be set during initialisation. + timestamp: :class:`datetime.datetime` + The timestamp of the embed content. This could be a naive or aware datetime. + colour: Union[:class:`Colour`, :class:`int`] + The colour code of the embed. Aliased to ``color`` as well. + This can be set during initialisation. + Empty + A special sentinel value used by ``EmbedProxy`` and this class + to denote that the value or attribute is empty. + """ + + __slots__ = ('title', 'url', 'type', '_timestamp', '_colour', '_footer', + '_image', '_thumbnail', '_video', '_provider', '_author', + '_fields', 'description') + + Empty = EmptyEmbed + + def __init__(self, **kwargs): + # swap the colour/color aliases + try: + colour = kwargs['colour'] + except KeyError: + colour = kwargs.get('color', EmptyEmbed) + + self.colour = colour + self.title = kwargs.get('title', EmptyEmbed) + self.type = kwargs.get('type', 'rich') + self.url = kwargs.get('url', EmptyEmbed) + self.description = kwargs.get('description', EmptyEmbed) + + try: + timestamp = kwargs['timestamp'] + except KeyError: + pass + else: + self.timestamp = timestamp + + @classmethod + def from_dict(cls, data): + """Converts a :class:`dict` to a :class:`Embed` provided it is in the + format that Discord expects it to be in. + + You can find out about this format in the `official Discord documentation`__. + + .. _DiscordDocs: https://discordapp.com/developers/docs/resources/channel#embed-object + + __ DiscordDocs_ + + Parameters + ----------- + data: :class:`dict` + The dictionary to convert into an embed. + """ + # we are bypassing __init__ here since it doesn't apply here + self = cls.__new__(cls) + + # fill in the basic fields + + self.title = data.get('title', EmptyEmbed) + self.type = data.get('type', EmptyEmbed) + self.description = data.get('description', EmptyEmbed) + self.url = data.get('url', EmptyEmbed) + + # try to fill in the more rich fields + + try: + self._colour = Colour(value=data['color']) + except KeyError: + pass + + try: + self._timestamp = utils.parse_time(data['timestamp']) + except KeyError: + pass + + for attr in ('thumbnail', 'video', 'provider', 'author', 'fields', 'image', 'footer'): + try: + value = data[attr] + except KeyError: + continue + else: + setattr(self, '_' + attr, value) + + return self + + def copy(self): + """Returns a shallow copy of the embed.""" + return Embed.from_dict(self.to_dict()) + + def __len__(self): + total = len(self.title) + len(self.description) + for field in getattr(self, '_fields', []): + total += len(field['name']) + len(field['value']) + + try: + footer = self._footer + except AttributeError: + pass + else: + total += len(footer['text']) + + try: + author = self._author + except AttributeError: + pass + else: + total += len(author['name']) + + return total + + @property + def colour(self): + return getattr(self, '_colour', EmptyEmbed) + + @colour.setter + def colour(self, value): + if isinstance(value, (Colour, _EmptyEmbed)): + self._colour = value + elif isinstance(value, int): + self._colour = Colour(value=value) + else: + raise TypeError('Expected discord.Colour, int, or Embed.Empty but received %s instead.' % value.__class__.__name__) + + color = colour + + @property + def timestamp(self): + return getattr(self, '_timestamp', EmptyEmbed) + + @timestamp.setter + def timestamp(self, value): + if isinstance(value, (datetime.datetime, _EmptyEmbed)): + self._timestamp = value + else: + raise TypeError("Expected datetime.datetime or Embed.Empty received %s instead" % value.__class__.__name__) + + @property + def footer(self): + """Returns an ``EmbedProxy`` denoting the footer contents. + + See :meth:`set_footer` for possible values you can access. + + If the attribute has no value then :attr:`Empty` is returned. + """ + return EmbedProxy(getattr(self, '_footer', {})) + + def set_footer(self, *, text=EmptyEmbed, icon_url=EmptyEmbed): + """Sets the footer for the embed content. + + This function returns the class instance to allow for fluent-style + chaining. + + Parameters + ----------- + text: :class:`str` + The footer text. + icon_url: :class:`str` + The URL of the footer icon. Only HTTP(S) is supported. + """ + + self._footer = {} + if text is not EmptyEmbed: + self._footer['text'] = str(text) + + if icon_url is not EmptyEmbed: + self._footer['icon_url'] = str(icon_url) + + return self + + @property + def image(self): + """Returns an ``EmbedProxy`` denoting the image contents. + + Possible attributes you can access are: + + - ``url`` + - ``proxy_url`` + - ``width`` + - ``height`` + + If the attribute has no value then :attr:`Empty` is returned. + """ + return EmbedProxy(getattr(self, '_image', {})) + + def set_image(self, *, url): + """Sets the image for the embed content. + + This function returns the class instance to allow for fluent-style + chaining. + + Parameters + ----------- + url: :class:`str` + The source URL for the image. Only HTTP(S) is supported. + """ + + self._image = { + 'url': str(url) + } + + return self + + @property + def thumbnail(self): + """Returns an ``EmbedProxy`` denoting the thumbnail contents. + + Possible attributes you can access are: + + - ``url`` + - ``proxy_url`` + - ``width`` + - ``height`` + + If the attribute has no value then :attr:`Empty` is returned. + """ + return EmbedProxy(getattr(self, '_thumbnail', {})) + + def set_thumbnail(self, *, url): + """Sets the thumbnail for the embed content. + + This function returns the class instance to allow for fluent-style + chaining. + + Parameters + ----------- + url: :class:`str` + The source URL for the thumbnail. Only HTTP(S) is supported. + """ + + self._thumbnail = { + 'url': str(url) + } + + return self + + @property + def video(self): + """Returns an ``EmbedProxy`` denoting the video contents. + + Possible attributes include: + + - ``url`` for the video URL. + - ``height`` for the video height. + - ``width`` for the video width. + + If the attribute has no value then :attr:`Empty` is returned. + """ + return EmbedProxy(getattr(self, '_video', {})) + + @property + def provider(self): + """Returns an ``EmbedProxy`` denoting the provider contents. + + The only attributes that might be accessed are ``name`` and ``url``. + + If the attribute has no value then :attr:`Empty` is returned. + """ + return EmbedProxy(getattr(self, '_provider', {})) + + @property + def author(self): + """Returns an ``EmbedProxy`` denoting the author contents. + + See :meth:`set_author` for possible values you can access. + + If the attribute has no value then :attr:`Empty` is returned. + """ + return EmbedProxy(getattr(self, '_author', {})) + + def set_author(self, *, name, url=EmptyEmbed, icon_url=EmptyEmbed): + """Sets the author for the embed content. + + This function returns the class instance to allow for fluent-style + chaining. + + Parameters + ----------- + name: :class:`str` + The name of the author. + url: :class:`str` + The URL for the author. + icon_url: :class:`str` + The URL of the author icon. Only HTTP(S) is supported. + """ + + self._author = { + 'name': str(name) + } + + if url is not EmptyEmbed: + self._author['url'] = str(url) + + if icon_url is not EmptyEmbed: + self._author['icon_url'] = str(icon_url) + + return self + + @property + def fields(self): + """Returns a :class:`list` of ``EmbedProxy`` denoting the field contents. + + See :meth:`add_field` for possible values you can access. + + If the attribute has no value then :attr:`Empty` is returned. + """ + return [EmbedProxy(d) for d in getattr(self, '_fields', [])] + + def add_field(self, *, name, value, inline=True): + """Adds a field to the embed object. + + This function returns the class instance to allow for fluent-style + chaining. + + Parameters + ----------- + name: :class:`str` + The name of the field. + value: :class:`str` + The value of the field. + inline: :class:`bool` + Whether the field should be displayed inline. + """ + + field = { + 'inline': inline, + 'name': str(name), + 'value': str(value) + } + + try: + self._fields.append(field) + except AttributeError: + self._fields = [field] + + return self + + def insert_field_at(self, index, *, name, value, inline=True): + """Inserts a field before a specified index to the embed. + + This function returns the class instance to allow for fluent-style + chaining. + + .. versionadded:: 1.2.0 + + Parameters + ----------- + index: :class:`int` + The index of where to insert the field. + name: :class:`str` + The name of the field. + value: :class:`str` + The value of the field. + inline: :class:`bool` + Whether the field should be displayed inline. + """ + + field = { + 'inline': inline, + 'name': str(name), + 'value': str(value) + } + + try: + self._fields.insert(index, field) + except AttributeError: + self._fields = [field] + + return self + + def clear_fields(self): + """Removes all fields from this embed.""" + try: + self._fields.clear() + except AttributeError: + self._fields = [] + + def remove_field(self, index): + """Removes a field at a specified index. + + If the index is invalid or out of bounds then the error is + silently swallowed. + + .. note:: + + When deleting a field by index, the index of the other fields + shift to fill the gap just like a regular list. + + Parameters + ----------- + index: :class:`int` + The index of the field to remove. + """ + try: + del self._fields[index] + except (AttributeError, IndexError): + pass + + def set_field_at(self, index, *, name, value, inline=True): + """Modifies a field to the embed object. + + The index must point to a valid pre-existing field. + + This function returns the class instance to allow for fluent-style + chaining. + + Parameters + ----------- + index: :class:`int` + The index of the field to modify. + name: :class:`str` + The name of the field. + value: :class:`str` + The value of the field. + inline: :class:`bool` + Whether the field should be displayed inline. + + Raises + ------- + IndexError + An invalid index was provided. + """ + + try: + field = self._fields[index] + except (TypeError, IndexError, AttributeError): + raise IndexError('field index out of range') + + field['name'] = str(name) + field['value'] = str(value) + field['inline'] = inline + return self + + def to_dict(self): + """Converts this embed object into a dict.""" + + # add in the raw data into the dict + result = { + key[1:]: getattr(self, key) + for key in self.__slots__ + if key[0] == '_' and hasattr(self, key) + } + + # deal with basic convenience wrappers + + try: + colour = result.pop('colour') + except KeyError: + pass + else: + if colour: + result['color'] = colour.value + + try: + timestamp = result.pop('timestamp') + except KeyError: + pass + else: + if timestamp: + if timestamp.tzinfo: + result['timestamp'] = timestamp.astimezone(tz=datetime.timezone.utc).isoformat() + else: + result['timestamp'] = timestamp.replace(tzinfo=datetime.timezone.utc).isoformat() + + # add in the non raw attribute ones + if self.type: + result['type'] = self.type + + if self.description: + result['description'] = self.description + + if self.url: + result['url'] = self.url + + if self.title: + result['title'] = self.title + + return result diff --git a/venv/lib/python3.7/site-packages/discord/emoji.py b/venv/lib/python3.7/site-packages/discord/emoji.py new file mode 100644 index 0000000..5e864b2 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/emoji.py @@ -0,0 +1,302 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from .asset import Asset +from . import utils +from .user import User + +class PartialEmoji: + """Represents a "partial" emoji. + + This model will be given in two scenarios: + + - "Raw" data events such as :func:`on_raw_reaction_add` + - Custom emoji that the bot cannot see from e.g. :attr:`Message.reactions` + + .. container:: operations + + .. describe:: x == y + + Checks if two emoji are the same. + + .. describe:: x != y + + Checks if two emoji are not the same. + + .. describe:: hash(x) + + Return the emoji's hash. + + .. describe:: str(x) + + Returns the emoji rendered for discord. + + Attributes + ----------- + name: :class:`str` + The custom emoji name, if applicable, or the unicode codepoint + of the non-custom emoji. + animated: :class:`bool` + Whether the emoji is animated or not. + id: Optional[:class:`int`] + The ID of the custom emoji, if applicable. + """ + + __slots__ = ('animated', 'name', 'id', '_state') + + def __init__(self, *, animated, name, id=None): + self.animated = animated + self.name = name + self.id = id + self._state = None + + @classmethod + def with_state(cls, state, *, animated, name, id=None): + self = cls(animated=animated, name=name, id=id) + self._state = state + return self + + def __str__(self): + if self.id is None: + return self.name + if self.animated: + return '' % (self.name, self.id) + return '<:%s:%s>' % (self.name, self.id) + + def __repr__(self): + return '<{0.__class__.__name__} animated={0.animated} name={0.name!r} id={0.id}>'.format(self) + + def __eq__(self, other): + if self.is_unicode_emoji(): + return isinstance(other, PartialEmoji) and self.name == other.name + + if isinstance(other, (PartialEmoji, Emoji)): + return self.id == other.id + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash((self.id, self.name)) + + def is_custom_emoji(self): + """Checks if this is a custom non-Unicode emoji.""" + return self.id is not None + + def is_unicode_emoji(self): + """Checks if this is a Unicode emoji.""" + return self.id is None + + def _as_reaction(self): + if self.id is None: + return self.name + return '%s:%s' % (self.name, self.id) + + @property + def url(self): + """:class:`Asset`:Returns an asset of the emoji, if it is custom.""" + if self.is_unicode_emoji(): + return Asset(self._state) + + _format = 'gif' if self.animated else 'png' + url = "https://cdn.discordapp.com/emojis/{0.id}.{1}".format(self, _format) + return Asset(self._state, url) + +class Emoji: + """Represents a custom emoji. + + Depending on the way this object was created, some of the attributes can + have a value of ``None``. + + .. container:: operations + + .. describe:: x == y + + Checks if two emoji are the same. + + .. describe:: x != y + + Checks if two emoji are not the same. + + .. describe:: hash(x) + + Return the emoji's hash. + + .. describe:: iter(x) + + Returns an iterator of ``(field, value)`` pairs. This allows this class + to be used as an iterable in list/dict/etc constructions. + + .. describe:: str(x) + + Returns the emoji rendered for discord. + + Attributes + ----------- + name: :class:`str` + The name of the emoji. + id: :class:`int` + The emoji's ID. + require_colons: :class:`bool` + If colons are required to use this emoji in the client (:PJSalt: vs PJSalt). + animated: :class:`bool` + Whether an emoji is animated or not. + managed: :class:`bool` + If this emoji is managed by a Twitch integration. + guild_id: :class:`int` + The guild ID the emoji belongs to. + available: :class:`bool` + Whether the emoji is available for use. + user: Optional[:class:`User`] + The user that created the emoji. This can only be retrieved using :meth:`Guild.fetch_emoji` and + having the :attr:`~Permissions.manage_emojis` permission. + """ + __slots__ = ('require_colons', 'animated', 'managed', 'id', 'name', '_roles', 'guild_id', + '_state', 'user', 'available') + + def __init__(self, *, guild, state, data): + self.guild_id = guild.id + self._state = state + self._from_data(data) + + def _from_data(self, emoji): + self.require_colons = emoji['require_colons'] + self.managed = emoji['managed'] + self.id = int(emoji['id']) + self.name = emoji['name'] + self.animated = emoji.get('animated', False) + self.available = emoji.get('available', True) + self._roles = utils.SnowflakeList(map(int, emoji.get('roles', []))) + user = emoji.get('user') + self.user = User(state=self._state, data=user) if user else None + + def _iterator(self): + for attr in self.__slots__: + if attr[0] != '_': + value = getattr(self, attr, None) + if value is not None: + yield (attr, value) + + def __iter__(self): + return self._iterator() + + def __str__(self): + if self.animated: + return ''.format(self) + return "<:{0.name}:{0.id}>".format(self) + + def __repr__(self): + return ''.format(self) + + def __eq__(self, other): + return isinstance(other, (PartialEmoji, Emoji)) and self.id == other.id + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return self.id >> 22 + + @property + def created_at(self): + """:class:`datetime.datetime`: Returns the emoji's creation time in UTC.""" + return utils.snowflake_time(self.id) + + @property + def url(self): + """:class:`Asset`: Returns the asset of the emoji.""" + _format = 'gif' if self.animated else 'png' + url = "https://cdn.discordapp.com/emojis/{0.id}.{1}".format(self, _format) + return Asset(self._state, url) + + @property + def roles(self): + """List[:class:`Role`]: A :class:`list` of roles that is allowed to use this emoji. + + If roles is empty, the emoji is unrestricted. + """ + guild = self.guild + if guild is None: + return [] + + return [role for role in guild.roles if self._roles.has(role.id)] + + @property + def guild(self): + """:class:`Guild`: The guild this emoji belongs to.""" + return self._state._get_guild(self.guild_id) + + async def delete(self, *, reason=None): + """|coro| + + Deletes the custom emoji. + + You must have :attr:`~Permissions.manage_emojis` permission to + do this. + + Parameters + ----------- + reason: Optional[:class:`str`] + The reason for deleting this emoji. Shows up on the audit log. + + Raises + ------- + Forbidden + You are not allowed to delete emojis. + HTTPException + An error occurred deleting the emoji. + """ + + await self._state.http.delete_custom_emoji(self.guild.id, self.id, reason=reason) + + async def edit(self, *, name, roles=None, reason=None): + r"""|coro| + + Edits the custom emoji. + + You must have :attr:`~Permissions.manage_emojis` permission to + do this. + + Parameters + ----------- + name: :class:`str` + The new emoji name. + roles: Optional[list[:class:`Role`]] + A :class:`list` of :class:`Role`\s that can use this emoji. Leave empty to make it available to everyone. + reason: Optional[:class:`str`] + The reason for editing this emoji. Shows up on the audit log. + + Raises + ------- + Forbidden + You are not allowed to edit emojis. + HTTPException + An error occurred editing the emoji. + """ + + if roles: + roles = [role.id for role in roles] + await self._state.http.edit_custom_emoji(self.guild.id, self.id, name=name, roles=roles, reason=reason) diff --git a/venv/lib/python3.7/site-packages/discord/enums.py b/venv/lib/python3.7/site-packages/discord/enums.py new file mode 100644 index 0000000..5fec3a0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/enums.py @@ -0,0 +1,404 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import types +from collections import namedtuple + +__all__ = ( + 'Enum', + 'ChannelType', + 'MessageType', + 'VoiceRegion', + 'SpeakingState', + 'VerificationLevel', + 'ContentFilter', + 'Status', + 'DefaultAvatar', + 'RelationshipType', + 'AuditLogAction', + 'AuditLogActionCategory', + 'UserFlags', + 'ActivityType', + 'HypeSquadHouse', + 'NotificationLevel', + 'PremiumType', + 'UserContentFilter', + 'FriendFlags', + 'Theme', +) + +def _create_value_cls(name): + cls = namedtuple('_EnumValue_' + name, 'name value') + cls.__repr__ = lambda self: '<%s.%s: %r>' % (name, self.name, self.value) + cls.__str__ = lambda self: '%s.%s' % (name, self.name) + return cls + +def _is_descriptor(obj): + return hasattr(obj, '__get__') or hasattr(obj, '__set__') or hasattr(obj, '__delete__') + +class EnumMeta(type): + def __new__(cls, name, bases, attrs): + value_mapping = {} + member_mapping = {} + member_names = [] + + value_cls = _create_value_cls(name) + for key, value in list(attrs.items()): + is_descriptor = _is_descriptor(value) + if key[0] == '_' and not is_descriptor: + continue + + # Special case classmethod to just pass through + if isinstance(value, classmethod): + continue + + if is_descriptor: + setattr(value_cls, key, value) + del attrs[key] + continue + + try: + new_value = value_mapping[value] + except KeyError: + new_value = value_cls(name=key, value=value) + value_mapping[value] = new_value + member_names.append(key) + + member_mapping[key] = new_value + attrs[key] = new_value + + attrs['_enum_value_map_'] = value_mapping + attrs['_enum_member_map_'] = member_mapping + attrs['_enum_member_names_'] = member_names + actual_cls = super().__new__(cls, name, bases, attrs) + value_cls._actual_enum_cls_ = actual_cls + return actual_cls + + def __iter__(cls): + return (cls._enum_member_map_[name] for name in cls._enum_member_names_) + + def __reversed__(cls): + return (cls._enum_member_map_[name] for name in reversed(cls._enum_member_names_)) + + def __len__(cls): + return len(cls._enum_member_names_) + + def __repr__(cls): + return '' % cls.__name__ + + @property + def __members__(cls): + return types.MappingProxyType(cls._enum_member_map_) + + def __call__(cls, value): + try: + return cls._enum_value_map_[value] + except (KeyError, TypeError): + raise ValueError("%r is not a valid %s" % (value, cls.__name__)) + + def __getitem__(cls, key): + return cls._enum_member_map_[key] + + def __setattr__(cls, name, value): + raise TypeError('Enums are immutable.') + + def __delattr__(cls, attr): + raise TypeError('Enums are immutable') + + def __instancecheck__(self, instance): + # isinstance(x, Y) + # -> __instancecheck__(Y, x) + try: + return instance._actual_enum_cls_ is self + except AttributeError: + return False + +class Enum(metaclass=EnumMeta): + @classmethod + def try_value(cls, value): + try: + return cls._enum_value_map_[value] + except (KeyError, TypeError): + return value + + +class ChannelType(Enum): + text = 0 + private = 1 + voice = 2 + group = 3 + category = 4 + news = 5 + store = 6 + + def __str__(self): + return self.name + +class MessageType(Enum): + default = 0 + recipient_add = 1 + recipient_remove = 2 + call = 3 + channel_name_change = 4 + channel_icon_change = 5 + pins_add = 6 + new_member = 7 + premium_guild_subscription = 8 + premium_guild_tier_1 = 9 + premium_guild_tier_2 = 10 + premium_guild_tier_3 = 11 + +class VoiceRegion(Enum): + us_west = 'us-west' + us_east = 'us-east' + us_south = 'us-south' + us_central = 'us-central' + eu_west = 'eu-west' + eu_central = 'eu-central' + singapore = 'singapore' + london = 'london' + sydney = 'sydney' + amsterdam = 'amsterdam' + frankfurt = 'frankfurt' + brazil = 'brazil' + hongkong = 'hongkong' + russia = 'russia' + japan = 'japan' + southafrica = 'southafrica' + india = 'india' + vip_us_east = 'vip-us-east' + vip_us_west = 'vip-us-west' + vip_amsterdam = 'vip-amsterdam' + + def __str__(self): + return self.value + +class SpeakingState(Enum): + none = 0 + voice = 1 + soundshare = 2 + priority = 4 + + def __str__(self): + return self.name + + def __int__(self): + return self.value + +class VerificationLevel(Enum): + none = 0 + low = 1 + medium = 2 + high = 3 + table_flip = 3 + extreme = 4 + double_table_flip = 4 + + def __str__(self): + return self.name + +class ContentFilter(Enum): + disabled = 0 + no_role = 1 + all_members = 2 + + def __str__(self): + return self.name + +class UserContentFilter(Enum): + disabled = 0 + friends = 1 + all_messages = 2 + +class FriendFlags(Enum): + noone = 0 + mutual_guilds = 1 + mutual_friends = 2 + guild_and_friends = 3 + everyone = 4 + +class Theme(Enum): + light = 'light' + dark = 'dark' + +class Status(Enum): + online = 'online' + offline = 'offline' + idle = 'idle' + dnd = 'dnd' + do_not_disturb = 'dnd' + invisible = 'invisible' + + def __str__(self): + return self.value + +class DefaultAvatar(Enum): + blurple = 0 + grey = 1 + gray = 1 + green = 2 + orange = 3 + red = 4 + + def __str__(self): + return self.name + +class RelationshipType(Enum): + friend = 1 + blocked = 2 + incoming_request = 3 + outgoing_request = 4 + +class NotificationLevel(Enum): + all_messages = 0 + only_mentions = 1 + +class AuditLogActionCategory(Enum): + create = 1 + delete = 2 + update = 3 + +class AuditLogAction(Enum): + guild_update = 1 + channel_create = 10 + channel_update = 11 + channel_delete = 12 + overwrite_create = 13 + overwrite_update = 14 + overwrite_delete = 15 + kick = 20 + member_prune = 21 + ban = 22 + unban = 23 + member_update = 24 + member_role_update = 25 + role_create = 30 + role_update = 31 + role_delete = 32 + invite_create = 40 + invite_update = 41 + invite_delete = 42 + webhook_create = 50 + webhook_update = 51 + webhook_delete = 52 + emoji_create = 60 + emoji_update = 61 + emoji_delete = 62 + message_delete = 72 + + @property + def category(self): + lookup = { + AuditLogAction.guild_update: AuditLogActionCategory.update, + AuditLogAction.channel_create: AuditLogActionCategory.create, + AuditLogAction.channel_update: AuditLogActionCategory.update, + AuditLogAction.channel_delete: AuditLogActionCategory.delete, + AuditLogAction.overwrite_create: AuditLogActionCategory.create, + AuditLogAction.overwrite_update: AuditLogActionCategory.update, + AuditLogAction.overwrite_delete: AuditLogActionCategory.delete, + AuditLogAction.kick: None, + AuditLogAction.member_prune: None, + AuditLogAction.ban: None, + AuditLogAction.unban: None, + AuditLogAction.member_update: AuditLogActionCategory.update, + AuditLogAction.member_role_update: AuditLogActionCategory.update, + AuditLogAction.role_create: AuditLogActionCategory.create, + AuditLogAction.role_update: AuditLogActionCategory.update, + AuditLogAction.role_delete: AuditLogActionCategory.delete, + AuditLogAction.invite_create: AuditLogActionCategory.create, + AuditLogAction.invite_update: AuditLogActionCategory.update, + AuditLogAction.invite_delete: AuditLogActionCategory.delete, + AuditLogAction.webhook_create: AuditLogActionCategory.create, + AuditLogAction.webhook_update: AuditLogActionCategory.update, + AuditLogAction.webhook_delete: AuditLogActionCategory.delete, + AuditLogAction.emoji_create: AuditLogActionCategory.create, + AuditLogAction.emoji_update: AuditLogActionCategory.update, + AuditLogAction.emoji_delete: AuditLogActionCategory.delete, + AuditLogAction.message_delete: AuditLogActionCategory.delete, + } + return lookup[self] + + @property + def target_type(self): + v = self.value + if v == -1: + return 'all' + elif v < 10: + return 'guild' + elif v < 20: + return 'channel' + elif v < 30: + return 'user' + elif v < 40: + return 'role' + elif v < 50: + return 'invite' + elif v < 60: + return 'webhook' + elif v < 70: + return 'emoji' + elif v < 80: + return 'message' + +class UserFlags(Enum): + staff = 1 + partner = 2 + hypesquad = 4 + bug_hunter = 8 + hypesquad_bravery = 64 + hypesquad_brilliance = 128 + hypesquad_balance = 256 + early_supporter = 512 + +class ActivityType(Enum): + unknown = -1 + playing = 0 + streaming = 1 + listening = 2 + watching = 3 + + def __int__(self): + return self.value + +class HypeSquadHouse(Enum): + bravery = 1 + brilliance = 2 + balance = 3 + +class PremiumType(Enum): + nitro_classic = 1 + nitro = 2 + +def try_enum(cls, val): + """A function that tries to turn the value into enum ``cls``. + + If it fails it returns the value instead. + """ + + try: + return cls._enum_value_map_[val] + except (KeyError, TypeError, AttributeError): + return val diff --git a/venv/lib/python3.7/site-packages/discord/errors.py b/venv/lib/python3.7/site-packages/discord/errors.py new file mode 100644 index 0000000..fd967a9 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/errors.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +class DiscordException(Exception): + """Base exception class for discord.py + + Ideally speaking, this could be caught to handle any exceptions thrown from this library. + """ + pass + +class ClientException(DiscordException): + """Exception that's thrown when an operation in the :class:`Client` fails. + + These are usually for exceptions that happened due to user input. + """ + pass + +class NoMoreItems(DiscordException): + """Exception that is thrown when an async iteration operation has no more + items.""" + pass + +class GatewayNotFound(DiscordException): + """An exception that is usually thrown when the gateway hub + for the :class:`Client` websocket is not found.""" + def __init__(self): + message = 'The gateway to connect to discord was not found.' + super(GatewayNotFound, self).__init__(message) + +def flatten_error_dict(d, key=''): + items = [] + for k, v in d.items(): + new_key = key + '.' + k if key else k + + if isinstance(v, dict): + try: + _errors = v['_errors'] + except KeyError: + items.extend(flatten_error_dict(v, new_key).items()) + else: + items.append((new_key, ' '.join(x.get('message', '') for x in _errors))) + else: + items.append((new_key, v)) + + return dict(items) + +class HTTPException(DiscordException): + """Exception that's thrown when an HTTP request operation fails. + + Attributes + ------------ + response: :class:`aiohttp.ClientResponse` + The response of the failed HTTP request. This is an + instance of :class:`aiohttp.ClientResponse`. In some cases + this could also be a :class:`requests.Response`. + + text: :class:`str` + The text of the error. Could be an empty string. + status: :class:`int` + The status code of the HTTP request. + code: :class:`int` + The Discord specific error code for the failure. + """ + + def __init__(self, response, message): + self.response = response + self.status = response.status + if isinstance(message, dict): + self.code = message.get('code', 0) + base = message.get('message', '') + errors = message.get('errors') + if errors: + errors = flatten_error_dict(errors) + helpful = '\n'.join('In %s: %s' % t for t in errors.items()) + self.text = base + '\n' + helpful + else: + self.text = base + else: + self.text = message + self.code = 0 + + fmt = '{0.status} {0.reason} (error code: {1})' + if len(self.text): + fmt = fmt + ': {2}' + + super().__init__(fmt.format(self.response, self.code, self.text)) + +class Forbidden(HTTPException): + """Exception that's thrown for when status code 403 occurs. + + Subclass of :exc:`HTTPException` + """ + pass + +class NotFound(HTTPException): + """Exception that's thrown for when status code 404 occurs. + + Subclass of :exc:`HTTPException` + """ + pass + + +class InvalidData(ClientException): + """Exception that's raised when the library encounters unknown + or invalid data from Discord. + """ + pass + +class InvalidArgument(ClientException): + """Exception that's thrown when an argument to a function + is invalid some way (e.g. wrong value or wrong type). + + This could be considered the analogous of ``ValueError`` and + ``TypeError`` except inherited from :exc:`ClientException` and thus + :exc:`DiscordException`. + """ + pass + +class LoginFailure(ClientException): + """Exception that's thrown when the :meth:`Client.login` function + fails to log you in from improper credentials or some other misc. + failure. + """ + pass + +class ConnectionClosed(ClientException): + """Exception that's thrown when the gateway connection is + closed for reasons that could not be handled internally. + + Attributes + ----------- + code: :class:`int` + The close code of the websocket. + reason: :class:`str` + The reason provided for the closure. + shard_id: Optional[:class:`int`] + The shard ID that got closed if applicable. + """ + def __init__(self, original, *, shard_id): + # This exception is just the same exception except + # reconfigured to subclass ClientException for users + self.code = original.code + self.reason = original.reason + self.shard_id = shard_id + super().__init__(str(original)) diff --git a/venv/lib/python3.7/site-packages/discord/ext/commands/__init__.py b/venv/lib/python3.7/site-packages/discord/ext/commands/__init__.py new file mode 100644 index 0000000..b14fd65 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/ext/commands/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- + +""" +discord.ext.commands +~~~~~~~~~~~~~~~~~~~~~ + +An extension module to facilitate creation of bot commands. + +:copyright: (c) 2019 Rapptz +:license: MIT, see LICENSE for more details. +""" + +from .bot import Bot, AutoShardedBot, when_mentioned, when_mentioned_or +from .context import Context +from .core import * +from .errors import * +from .help import * +from .converter import * +from .cooldowns import * +from .cog import * diff --git a/venv/lib/python3.7/site-packages/discord/ext/commands/_types.py b/venv/lib/python3.7/site-packages/discord/ext/commands/_types.py new file mode 100644 index 0000000..bd25447 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/ext/commands/_types.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +# This is merely a tag type to avoid circular import issues. +# Yes, this is a terrible solution but ultimately it is the only solution. +class _BaseCommand: + __slots__ = () diff --git a/venv/lib/python3.7/site-packages/discord/ext/commands/bot.py b/venv/lib/python3.7/site-packages/discord/ext/commands/bot.py new file mode 100644 index 0000000..66e245d --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/ext/commands/bot.py @@ -0,0 +1,969 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import asyncio +import collections +import inspect +import importlib +import sys +import traceback +import re +import types + +import discord + +from .core import GroupMixin, Command +from .view import StringView +from .context import Context +from . import errors +from .help import HelpCommand, DefaultHelpCommand +from .cog import Cog + +def when_mentioned(bot, msg): + """A callable that implements a command prefix equivalent to being mentioned. + + These are meant to be passed into the :attr:`.Bot.command_prefix` attribute. + """ + return [bot.user.mention + ' ', '<@!%s> ' % bot.user.id] + +def when_mentioned_or(*prefixes): + """A callable that implements when mentioned or other prefixes provided. + + These are meant to be passed into the :attr:`.Bot.command_prefix` attribute. + + Example + -------- + + .. code-block:: python3 + + bot = commands.Bot(command_prefix=commands.when_mentioned_or('!')) + + + .. note:: + + This callable returns another callable, so if this is done inside a custom + callable, you must call the returned callable, for example: + + .. code-block:: python3 + + async def get_prefix(bot, message): + extras = await prefixes_for(message.guild) # returns a list + return commands.when_mentioned_or(*extras)(bot, message) + + + See Also + ---------- + :func:`.when_mentioned` + """ + def inner(bot, msg): + r = list(prefixes) + r = when_mentioned(bot, msg) + r + return r + + return inner + +def _is_submodule(parent, child): + return parent == child or child.startswith(parent + ".") + +class _DefaultRepr: + def __repr__(self): + return '' + +_default = _DefaultRepr() + +class BotBase(GroupMixin): + def __init__(self, command_prefix, help_command=_default, description=None, **options): + super().__init__(**options) + self.command_prefix = command_prefix + self.extra_events = {} + self.__cogs = {} + self.__extensions = {} + self._checks = [] + self._check_once = [] + self._before_invoke = None + self._after_invoke = None + self._help_command = None + self.description = inspect.cleandoc(description) if description else '' + self.owner_id = options.get('owner_id') + + if options.pop('self_bot', False): + self._skip_check = lambda x, y: x != y + else: + self._skip_check = lambda x, y: x == y + + if help_command is _default: + self.help_command = DefaultHelpCommand() + else: + self.help_command = help_command + + # internal helpers + + def dispatch(self, event_name, *args, **kwargs): + super().dispatch(event_name, *args, **kwargs) + ev = 'on_' + event_name + for event in self.extra_events.get(ev, []): + self._schedule_event(event, ev, *args, **kwargs) + + async def close(self): + for extension in tuple(self.__extensions): + try: + self.unload_extension(extension) + except Exception: + pass + + for cog in tuple(self.__cogs): + try: + self.remove_cog(cog) + except Exception: + pass + + await super().close() + + async def on_command_error(self, context, exception): + """|coro| + + The default command error handler provided by the bot. + + By default this prints to :data:`sys.stderr` however it could be + overridden to have a different implementation. + + This only fires if you do not specify any listeners for command error. + """ + if self.extra_events.get('on_command_error', None): + return + + if hasattr(context.command, 'on_error'): + return + + cog = context.cog + if cog: + if Cog._get_overridden_method(cog.cog_command_error) is not None: + return + + print('Ignoring exception in command {}:'.format(context.command), file=sys.stderr) + traceback.print_exception(type(exception), exception, exception.__traceback__, file=sys.stderr) + + # global check registration + + def check(self, func): + r"""A decorator that adds a global check to the bot. + + A global check is similar to a :func:`.check` that is applied + on a per command basis except it is run before any command checks + have been verified and applies to every command the bot has. + + .. note:: + + This function can either be a regular function or a coroutine. + + Similar to a command :func:`.check`\, this takes a single parameter + of type :class:`.Context` and can only raise exceptions inherited from + :exc:`.CommandError`. + + Example + --------- + + .. code-block:: python3 + + @bot.check + def check_commands(ctx): + return ctx.command.qualified_name in allowed_commands + + """ + self.add_check(func) + return func + + def add_check(self, func, *, call_once=False): + """Adds a global check to the bot. + + This is the non-decorator interface to :meth:`.check` + and :meth:`.check_once`. + + Parameters + ----------- + func + The function that was used as a global check. + call_once: :class:`bool` + If the function should only be called once per + :meth:`Command.invoke` call. + """ + + if call_once: + self._check_once.append(func) + else: + self._checks.append(func) + + def remove_check(self, func, *, call_once=False): + """Removes a global check from the bot. + + This function is idempotent and will not raise an exception + if the function is not in the global checks. + + Parameters + ----------- + func + The function to remove from the global checks. + call_once: :class:`bool` + If the function was added with ``call_once=True`` in + the :meth:`.Bot.add_check` call or using :meth:`.check_once`. + """ + l = self._check_once if call_once else self._checks + + try: + l.remove(func) + except ValueError: + pass + + def check_once(self, func): + r"""A decorator that adds a "call once" global check to the bot. + + Unlike regular global checks, this one is called only once + per :meth:`Command.invoke` call. + + Regular global checks are called whenever a command is called + or :meth:`.Command.can_run` is called. This type of check + bypasses that and ensures that it's called only once, even inside + the default help command. + + .. note:: + + This function can either be a regular function or a coroutine. + + Similar to a command :func:`.check`\, this takes a single parameter + of type :class:`.Context` and can only raise exceptions inherited from + :exc:`.CommandError`. + + Example + --------- + + .. code-block:: python3 + + @bot.check_once + def whitelist(ctx): + return ctx.message.author.id in my_whitelist + + """ + self.add_check(func, call_once=True) + return func + + async def can_run(self, ctx, *, call_once=False): + data = self._check_once if call_once else self._checks + + if len(data) == 0: + return True + + return await discord.utils.async_all(f(ctx) for f in data) + + async def is_owner(self, user): + """Checks if a :class:`~discord.User` or :class:`~discord.Member` is the owner of + this bot. + + If an :attr:`owner_id` is not set, it is fetched automatically + through the use of :meth:`~.Bot.application_info`. + + Parameters + ----------- + user: :class:`.abc.User` + The user to check for. + + Returns + -------- + :class:`bool` + Whether the user is the owner. + """ + + if self.owner_id is None: + app = await self.application_info() + self.owner_id = owner_id = app.owner.id + return user.id == owner_id + return user.id == self.owner_id + + def before_invoke(self, coro): + """A decorator that registers a coroutine as a pre-invoke hook. + + A pre-invoke hook is called directly before the command is + called. This makes it a useful function to set up database + connections or any type of set up required. + + This pre-invoke hook takes a sole parameter, a :class:`.Context`. + + .. note:: + + The :meth:`~.Bot.before_invoke` and :meth:`~.Bot.after_invoke` hooks are + only called if all checks and argument parsing procedures pass + without error. If any check or argument parsing procedures fail + then the hooks are not called. + + Parameters + ----------- + coro: :ref:`coroutine ` + The coroutine to register as the pre-invoke hook. + + Raises + ------- + TypeError + The coroutine passed is not actually a coroutine. + """ + if not asyncio.iscoroutinefunction(coro): + raise TypeError('The pre-invoke hook must be a coroutine.') + + self._before_invoke = coro + return coro + + def after_invoke(self, coro): + r"""A decorator that registers a coroutine as a post-invoke hook. + + A post-invoke hook is called directly after the command is + called. This makes it a useful function to clean-up database + connections or any type of clean up required. + + This post-invoke hook takes a sole parameter, a :class:`.Context`. + + .. note:: + + Similar to :meth:`~.Bot.before_invoke`\, this is not called unless + checks and argument parsing procedures succeed. This hook is, + however, **always** called regardless of the internal command + callback raising an error (i.e. :exc:`.CommandInvokeError`\). + This makes it ideal for clean-up scenarios. + + Parameters + ----------- + coro: :ref:`coroutine ` + The coroutine to register as the post-invoke hook. + + Raises + ------- + TypeError + The coroutine passed is not actually a coroutine. + """ + if not asyncio.iscoroutinefunction(coro): + raise TypeError('The post-invoke hook must be a coroutine.') + + self._after_invoke = coro + return coro + + # listener registration + + def add_listener(self, func, name=None): + """The non decorator alternative to :meth:`.listen`. + + Parameters + ----------- + func: :ref:`coroutine ` + The function to call. + name: Optional[:class:`str`] + The name of the event to listen for. Defaults to ``func.__name__``. + + Example + -------- + + .. code-block:: python3 + + async def on_ready(): pass + async def my_message(message): pass + + bot.add_listener(on_ready) + bot.add_listener(my_message, 'on_message') + + """ + name = func.__name__ if name is None else name + + if not asyncio.iscoroutinefunction(func): + raise TypeError('Listeners must be coroutines') + + if name in self.extra_events: + self.extra_events[name].append(func) + else: + self.extra_events[name] = [func] + + def remove_listener(self, func, name=None): + """Removes a listener from the pool of listeners. + + Parameters + ----------- + func + The function that was used as a listener to remove. + name: :class:`str` + The name of the event we want to remove. Defaults to + ``func.__name__``. + """ + + name = func.__name__ if name is None else name + + if name in self.extra_events: + try: + self.extra_events[name].remove(func) + except ValueError: + pass + + def listen(self, name=None): + """A decorator that registers another function as an external + event listener. Basically this allows you to listen to multiple + events from different places e.g. such as :func:`.on_ready` + + The functions being listened to must be a :ref:`coroutine `. + + Example + -------- + + .. code-block:: python3 + + @bot.listen() + async def on_message(message): + print('one') + + # in some other file... + + @bot.listen('on_message') + async def my_message(message): + print('two') + + Would print one and two in an unspecified order. + + Raises + ------- + TypeError + The function being listened to is not a coroutine. + """ + + def decorator(func): + self.add_listener(func, name) + return func + + return decorator + + # cogs + + def add_cog(self, cog): + """Adds a "cog" to the bot. + + A cog is a class that has its own event listeners and commands. + + Parameters + ----------- + cog: :class:`.Cog` + The cog to register to the bot. + + Raises + ------- + TypeError + The cog does not inherit from :class:`.Cog`. + CommandError + An error happened during loading. + """ + + if not isinstance(cog, Cog): + raise TypeError('cogs must derive from Cog') + + cog = cog._inject(self) + self.__cogs[cog.__cog_name__] = cog + + def get_cog(self, name): + """Gets the cog instance requested. + + If the cog is not found, ``None`` is returned instead. + + Parameters + ----------- + name: :class:`str` + The name of the cog you are requesting. + This is equivalent to the name passed via keyword + argument in class creation or the class name if unspecified. + """ + return self.__cogs.get(name) + + def remove_cog(self, name): + """Removes a cog from the bot. + + All registered commands and event listeners that the + cog has registered will be removed as well. + + If no cog is found then this method has no effect. + + Parameters + ----------- + name: :class:`str` + The name of the cog to remove. + """ + + cog = self.__cogs.pop(name, None) + if cog is None: + return + + help_command = self._help_command + if help_command and help_command.cog is cog: + help_command.cog = None + cog._eject(self) + + @property + def cogs(self): + """Mapping[:class:`str`, :class:`Cog`]: A read-only mapping of cog name to cog.""" + return types.MappingProxyType(self.__cogs) + + # extensions + + def _remove_module_references(self, name): + # find all references to the module + # remove the cogs registered from the module + for cogname, cog in self.__cogs.copy().items(): + if _is_submodule(name, cog.__module__): + self.remove_cog(cogname) + + # remove all the commands from the module + for cmd in self.all_commands.copy().values(): + if cmd.module is not None and _is_submodule(name, cmd.module): + if isinstance(cmd, GroupMixin): + cmd.recursively_remove_all_commands() + self.remove_command(cmd.name) + + # remove all the listeners from the module + for event_list in self.extra_events.copy().values(): + remove = [] + for index, event in enumerate(event_list): + if event.__module__ is not None and _is_submodule(name, event.__module__): + remove.append(index) + + for index in reversed(remove): + del event_list[index] + + def _call_module_finalizers(self, lib, key): + try: + func = getattr(lib, 'teardown') + except AttributeError: + pass + else: + try: + func(self) + except Exception: + pass + finally: + self.__extensions.pop(key, None) + sys.modules.pop(key, None) + name = lib.__name__ + for module in list(sys.modules.keys()): + if _is_submodule(name, module): + del sys.modules[module] + + def _load_from_module_spec(self, lib, key): + # precondition: key not in self.__extensions + try: + setup = getattr(lib, 'setup') + except AttributeError: + del sys.modules[key] + raise errors.NoEntryPointError(key) + + try: + setup(self) + except Exception as e: + self._remove_module_references(lib.__name__) + self._call_module_finalizers(lib, key) + raise errors.ExtensionFailed(key, e) from e + else: + self.__extensions[key] = lib + + def load_extension(self, name): + """Loads an extension. + + An extension is a python module that contains commands, cogs, or + listeners. + + An extension must have a global function, ``setup`` defined as + the entry point on what to do when the extension is loaded. This entry + point must have a single argument, the ``bot``. + + Parameters + ------------ + name: :class:`str` + The extension name to load. It must be dot separated like + regular Python imports if accessing a sub-module. e.g. + ``foo.test`` if you want to import ``foo/test.py``. + + Raises + -------- + ExtensionNotFound + The extension could not be imported. + ExtensionAlreadyLoaded + The extension is already loaded. + NoEntryPointError + The extension does not have a setup function. + ExtensionFailed + The extension setup function had an execution error. + """ + + if name in self.__extensions: + raise errors.ExtensionAlreadyLoaded(name) + + try: + lib = importlib.import_module(name) + except ImportError as e: + raise errors.ExtensionNotFound(name, e) from e + else: + self._load_from_module_spec(lib, name) + + def unload_extension(self, name): + """Unloads an extension. + + When the extension is unloaded, all commands, listeners, and cogs are + removed from the bot and the module is un-imported. + + The extension can provide an optional global function, ``teardown``, + to do miscellaneous clean-up if necessary. This function takes a single + parameter, the ``bot``, similar to ``setup`` from + :meth:`~.Bot.load_extension`. + + Parameters + ------------ + name: :class:`str` + The extension name to unload. It must be dot separated like + regular Python imports if accessing a sub-module. e.g. + ``foo.test`` if you want to import ``foo/test.py``. + + Raises + ------- + ExtensionNotLoaded + The extension was not loaded. + """ + + lib = self.__extensions.get(name) + if lib is None: + raise errors.ExtensionNotLoaded(name) + + self._remove_module_references(lib.__name__) + self._call_module_finalizers(lib, name) + + def reload_extension(self, name): + """Atomically reloads an extension. + + This replaces the extension with the same extension, only refreshed. This is + equivalent to a :meth:`unload_extension` followed by a :meth:`load_extension` + except done in an atomic way. That is, if an operation fails mid-reload then + the bot will roll-back to the prior working state. + + Parameters + ------------ + name: :class:`str` + The extension name to reload. It must be dot separated like + regular Python imports if accessing a sub-module. e.g. + ``foo.test`` if you want to import ``foo/test.py``. + + Raises + ------- + ExtensionNotLoaded + The extension was not loaded. + ExtensionNotFound + The extension could not be imported. + NoEntryPointError + The extension does not have a setup function. + ExtensionFailed + The extension setup function had an execution error. + """ + + lib = self.__extensions.get(name) + if lib is None: + raise errors.ExtensionNotLoaded(name) + + # get the previous module states from sys modules + modules = { + name: module + for name, module in sys.modules.items() + if _is_submodule(lib.__name__, name) + } + + try: + # Unload and then load the module... + self._remove_module_references(lib.__name__) + self._call_module_finalizers(lib, name) + self.load_extension(name) + except Exception as e: + # if the load failed, the remnants should have been + # cleaned from the load_extension function call + # so let's load it from our old compiled library. + self._load_from_module_spec(lib, name) + + # revert sys.modules back to normal and raise back to caller + sys.modules.update(modules) + raise + + @property + def extensions(self): + """Mapping[:class:`str`, :class:`py:types.ModuleType`]: A read-only mapping of extension name to extension.""" + return types.MappingProxyType(self.__extensions) + + # help command stuff + + @property + def help_command(self): + return self._help_command + + @help_command.setter + def help_command(self, value): + if value is not None: + if not isinstance(value, HelpCommand): + raise TypeError('help_command must be a subclass of HelpCommand') + if self._help_command is not None: + self._help_command._remove_from_bot(self) + self._help_command = value + value._add_to_bot(self) + elif self._help_command is not None: + self._help_command._remove_from_bot(self) + self._help_command = None + else: + self._help_command = None + + # command processing + + async def get_prefix(self, message): + """|coro| + + Retrieves the prefix the bot is listening to + with the message as a context. + + Parameters + ----------- + message: :class:`discord.Message` + The message context to get the prefix of. + + Returns + -------- + Union[List[:class:`str`], :class:`str`] + A list of prefixes or a single prefix that the bot is + listening for. + """ + prefix = ret = self.command_prefix + if callable(prefix): + ret = await discord.utils.maybe_coroutine(prefix, self, message) + + if not isinstance(ret, str): + try: + ret = list(ret) + except TypeError: + # It's possible that a generator raised this exception. Don't + # replace it with our own error if that's the case. + if isinstance(ret, collections.Iterable): + raise + + raise TypeError("command_prefix must be plain string, iterable of strings, or callable " + "returning either of these, not {}".format(ret.__class__.__name__)) + + if not ret: + raise ValueError("Iterable command_prefix must contain at least one prefix") + + return ret + + async def get_context(self, message, *, cls=Context): + r"""|coro| + + Returns the invocation context from the message. + + This is a more low-level counter-part for :meth:`.process_commands` + to allow users more fine grained control over the processing. + + The returned context is not guaranteed to be a valid invocation + context, :attr:`.Context.valid` must be checked to make sure it is. + If the context is not valid then it is not a valid candidate to be + invoked under :meth:`~.Bot.invoke`. + + Parameters + ----------- + message: :class:`discord.Message` + The message to get the invocation context from. + cls + The factory class that will be used to create the context. + By default, this is :class:`.Context`. Should a custom + class be provided, it must be similar enough to :class:`.Context`\'s + interface. + + Returns + -------- + :class:`.Context` + The invocation context. The type of this can change via the + ``cls`` parameter. + """ + + view = StringView(message.content) + ctx = cls(prefix=None, view=view, bot=self, message=message) + + if self._skip_check(message.author.id, self.user.id): + return ctx + + prefix = await self.get_prefix(message) + invoked_prefix = prefix + + if isinstance(prefix, str): + if not view.skip_string(prefix): + return ctx + else: + try: + # if the context class' __init__ consumes something from the view this + # will be wrong. That seems unreasonable though. + if message.content.startswith(tuple(prefix)): + invoked_prefix = discord.utils.find(view.skip_string, prefix) + else: + return ctx + + except TypeError: + if not isinstance(prefix, list): + raise TypeError("get_prefix must return either a string or a list of string, " + "not {}".format(prefix.__class__.__name__)) + + # It's possible a bad command_prefix got us here. + for value in prefix: + if not isinstance(value, str): + raise TypeError("Iterable command_prefix or list returned from get_prefix must " + "contain only strings, not {}".format(value.__class__.__name__)) + + # Getting here shouldn't happen + raise + + invoker = view.get_word() + ctx.invoked_with = invoker + ctx.prefix = invoked_prefix + ctx.command = self.all_commands.get(invoker) + return ctx + + async def invoke(self, ctx): + """|coro| + + Invokes the command given under the invocation context and + handles all the internal event dispatch mechanisms. + + Parameters + ----------- + ctx: :class:`.Context` + The invocation context to invoke. + """ + if ctx.command is not None: + self.dispatch('command', ctx) + try: + if await self.can_run(ctx, call_once=True): + await ctx.command.invoke(ctx) + except errors.CommandError as exc: + await ctx.command.dispatch_error(ctx, exc) + else: + self.dispatch('command_completion', ctx) + elif ctx.invoked_with: + exc = errors.CommandNotFound('Command "{}" is not found'.format(ctx.invoked_with)) + self.dispatch('command_error', ctx, exc) + + async def process_commands(self, message): + """|coro| + + This function processes the commands that have been registered + to the bot and other groups. Without this coroutine, none of the + commands will be triggered. + + By default, this coroutine is called inside the :func:`.on_message` + event. If you choose to override the :func:`.on_message` event, then + you should invoke this coroutine as well. + + This is built using other low level tools, and is equivalent to a + call to :meth:`~.Bot.get_context` followed by a call to :meth:`~.Bot.invoke`. + + This also checks if the message's author is a bot and doesn't + call :meth:`~.Bot.get_context` or :meth:`~.Bot.invoke` if so. + + Parameters + ----------- + message: :class:`discord.Message` + The message to process commands for. + """ + if message.author.bot: + return + + ctx = await self.get_context(message) + await self.invoke(ctx) + + async def on_message(self, message): + await self.process_commands(message) + +class Bot(BotBase, discord.Client): + """Represents a discord bot. + + This class is a subclass of :class:`discord.Client` and as a result + anything that you can do with a :class:`discord.Client` you can do with + this bot. + + This class also subclasses :class:`.GroupMixin` to provide the functionality + to manage commands. + + Attributes + ----------- + command_prefix + The command prefix is what the message content must contain initially + to have a command invoked. This prefix could either be a string to + indicate what the prefix should be, or a callable that takes in the bot + as its first parameter and :class:`discord.Message` as its second + parameter and returns the prefix. This is to facilitate "dynamic" + command prefixes. This callable can be either a regular function or + a coroutine. + + An empty string as the prefix always matches, enabling prefix-less + command invocation. While this may be useful in DMs it should be avoided + in servers, as it's likely to cause performance issues and unintended + command invocations. + + The command prefix could also be an iterable of strings indicating that + multiple checks for the prefix should be used and the first one to + match will be the invocation prefix. You can get this prefix via + :attr:`.Context.prefix`. To avoid confusion empty iterables are not + allowed. + + .. note:: + + When passing multiple prefixes be careful to not pass a prefix + that matches a longer prefix occurring later in the sequence. For + example, if the command prefix is ``('!', '!?')`` the ``'!?'`` + prefix will never be matched to any message as the previous one + matches messages starting with ``!?``. This is especially important + when passing an empty string, it should always be last as no prefix + after it will be matched. + case_insensitive: :class:`bool` + Whether the commands should be case insensitive. Defaults to ``False``. This + attribute does not carry over to groups. You must set it to every group if + you require group commands to be case insensitive as well. + description: :class:`str` + The content prefixed into the default help message. + self_bot: :class:`bool` + If ``True``, the bot will only listen to commands invoked by itself rather + than ignoring itself. If ``False`` (the default) then the bot will ignore + itself. This cannot be changed once initialised. + help_command: Optional[:class:`.HelpCommand`] + The help command implementation to use. This can be dynamically + set at runtime. To remove the help command pass ``None``. For more + information on implementing a help command, see :ref:`ext_commands_help_command`. + owner_id: Optional[:class:`int`] + The ID that owns the bot. If this is not set and is then queried via + :meth:`.is_owner` then it is fetched automatically using + :meth:`~.Bot.application_info`. + """ + pass + +class AutoShardedBot(BotBase, discord.AutoShardedClient): + """This is similar to :class:`.Bot` except that it is inherited from + :class:`discord.AutoShardedClient` instead. + """ + pass diff --git a/venv/lib/python3.7/site-packages/discord/ext/commands/cog.py b/venv/lib/python3.7/site-packages/discord/ext/commands/cog.py new file mode 100644 index 0000000..9f3c745 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/ext/commands/cog.py @@ -0,0 +1,414 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import inspect +import copy +from ._types import _BaseCommand + +__all__ = ( + 'CogMeta', + 'Cog', +) + +class CogMeta(type): + """A metaclass for defining a cog. + + Note that you should probably not use this directly. It is exposed + purely for documentation purposes along with making custom metaclasses to intermix + with other metaclasses such as the :class:`abc.ABCMeta` metaclass. + + For example, to create an abstract cog mixin class, the following would be done. + + .. code-block:: python3 + + import abc + + class CogABCMeta(commands.CogMeta, abc.ABCMeta): + pass + + class SomeMixin(metaclass=abc.ABCMeta): + pass + + class SomeCogMixin(SomeMixin, commands.Cog, metaclass=CogABCMeta): + pass + + .. note:: + + When passing an attribute of a metaclass that is documented below, note + that you must pass it as a keyword-only argument to the class creation + like the following example: + + .. code-block:: python3 + + class MyCog(commands.Cog, name='My Cog'): + pass + + Attributes + ----------- + name: :class:`str` + The cog name. By default, it is the name of the class with no modification. + command_attrs: :class:`dict` + A list of attributes to apply to every command inside this cog. The dictionary + is passed into the :class:`Command` (or its subclass) options at ``__init__``. + If you specify attributes inside the command attribute in the class, it will + override the one specified inside this attribute. For example: + + .. code-block:: python3 + + class MyCog(commands.Cog, command_attrs=dict(hidden=True)): + @commands.command() + async def foo(self, ctx): + pass # hidden -> True + + @commands.command(hidden=False) + async def bar(self, ctx): + pass # hidden -> False + """ + + def __new__(cls, *args, **kwargs): + name, bases, attrs = args + attrs['__cog_name__'] = kwargs.pop('name', name) + attrs['__cog_settings__'] = command_attrs = kwargs.pop('command_attrs', {}) + + commands = {} + listeners = {} + no_bot_cog = 'Commands or listeners must not start with cog_ or bot_ (in method {0.__name__}.{1})' + + new_cls = super().__new__(cls, name, bases, attrs, **kwargs) + for base in reversed(new_cls.__mro__): + for elem, value in base.__dict__.items(): + if elem in commands: + del commands[elem] + if elem in listeners: + del listeners[elem] + + is_static_method = isinstance(value, staticmethod) + if is_static_method: + value = value.__func__ + if isinstance(value, _BaseCommand): + if is_static_method: + raise TypeError('Command in method {0}.{1!r} must not be staticmethod.'.format(base, elem)) + if elem.startswith(('cog_', 'bot_')): + raise TypeError(no_bot_cog.format(base, elem)) + commands[elem] = value + elif inspect.iscoroutinefunction(value): + try: + is_listener = getattr(value, '__cog_listener__') + except AttributeError: + continue + else: + if elem.startswith(('cog_', 'bot_')): + raise TypeError(no_bot_cog.format(base, elem)) + listeners[elem] = value + + new_cls.__cog_commands__ = list(commands.values()) # this will be copied in Cog.__new__ + + listeners_as_list = [] + for listener in listeners.values(): + for listener_name in listener.__cog_listener_names__: + # I use __name__ instead of just storing the value so I can inject + # the self attribute when the time comes to add them to the bot + listeners_as_list.append((listener_name, listener.__name__)) + + new_cls.__cog_listeners__ = listeners_as_list + return new_cls + + def __init__(self, *args, **kwargs): + super().__init__(*args) + + @classmethod + def qualified_name(cls): + return cls.__cog_name__ + +def _cog_special_method(func): + func.__cog_special_method__ = None + return func + +class Cog(metaclass=CogMeta): + """The base class that all cogs must inherit from. + + A cog is a collection of commands, listeners, and optional state to + help group commands together. More information on them can be found on + the :ref:`ext_commands_cogs` page. + + When inheriting from this class, the options shown in :class:`CogMeta` + are equally valid here. + """ + + def __new__(cls, *args, **kwargs): + # For issue 426, we need to store a copy of the command objects + # since we modify them to inject `self` to them. + # To do this, we need to interfere with the Cog creation process. + self = super().__new__(cls) + cmd_attrs = cls.__cog_settings__ + + # Either update the command with the cog provided defaults or copy it. + self.__cog_commands__ = tuple(c._update_copy(cmd_attrs) for c in cls.__cog_commands__) + + lookup = { + cmd.qualified_name: cmd + for cmd in self.__cog_commands__ + } + + # Update the Command instances dynamically as well + for command in self.__cog_commands__: + setattr(self, command.callback.__name__, command) + parent = command.parent + if parent is not None: + # Get the latest parent reference + parent = lookup[parent.qualified_name] + + # Update our parent's reference to our self + removed = parent.remove_command(command.name) + parent.add_command(command) + + return self + + def get_commands(self): + r"""Returns a :class:`list` of :class:`.Command`\s that are + defined inside this cog. + + .. note:: + + This does not include subcommands. + """ + return [c for c in self.__cog_commands__ if c.parent is None] + + @property + def qualified_name(self): + """:class:`str`: Returns the cog's specified name, not the class name.""" + return self.__cog_name__ + + @property + def description(self): + """:class:`str`: Returns the cog's description, typically the cleaned docstring.""" + try: + return self.__cog_cleaned_doc__ + except AttributeError: + self.__cog_cleaned_doc__ = cleaned = inspect.getdoc(self) + return cleaned + + def walk_commands(self): + """An iterator that recursively walks through this cog's commands and subcommands.""" + from .core import GroupMixin + for command in self.__cog_commands__: + if command.parent is None: + yield command + if isinstance(command, GroupMixin): + yield from command.walk_commands() + + def get_listeners(self): + """Returns a :class:`list` of (name, function) listener pairs that are defined in this cog.""" + return [(name, getattr(self, method_name)) for name, method_name in self.__cog_listeners__] + + @classmethod + def _get_overridden_method(cls, method): + """Return None if the method is not overridden. Otherwise returns the overridden method.""" + return getattr(method.__func__, '__cog_special_method__', method) + + @classmethod + def listener(cls, name=None): + """A decorator that marks a function as a listener. + + This is the cog equivalent of :meth:`.Bot.listen`. + + Parameters + ------------ + name: :class:`str` + The name of the event being listened to. If not provided, it + defaults to the function's name. + + Raises + -------- + TypeError + The function is not a coroutine function or a string was not passed as + the name. + """ + + if name is not None and not isinstance(name, str): + raise TypeError('Cog.listener expected str but received {0.__class__.__name__!r} instead.'.format(name)) + + def decorator(func): + actual = func + if isinstance(actual, staticmethod): + actual = actual.__func__ + if not inspect.iscoroutinefunction(actual): + raise TypeError('Listener function must be a coroutine function.') + actual.__cog_listener__ = True + to_assign = name or actual.__name__ + try: + actual.__cog_listener_names__.append(to_assign) + except AttributeError: + actual.__cog_listener_names__ = [to_assign] + # we have to return `func` instead of `actual` because + # we need the type to be `staticmethod` for the metaclass + # to pick it up but the metaclass unfurls the function and + # thus the assignments need to be on the actual function + return func + return decorator + + @_cog_special_method + def cog_unload(self): + """A special method that is called when the cog gets removed. + + This function **cannot** be a coroutine. It must be a regular + function. + + Subclasses must replace this if they want special unloading behaviour. + """ + pass + + @_cog_special_method + def bot_check_once(self, ctx): + """A special method that registers as a :meth:`.Bot.check_once` + check. + + This function **can** be a coroutine and must take a sole parameter, + ``ctx``, to represent the :class:`.Context`. + """ + return True + + @_cog_special_method + def bot_check(self, ctx): + """A special method that registers as a :meth:`.Bot.check` + check. + + This function **can** be a coroutine and must take a sole parameter, + ``ctx``, to represent the :class:`.Context`. + """ + return True + + @_cog_special_method + def cog_check(self, ctx): + """A special method that registers as a :func:`commands.check` + for every command and subcommand in this cog. + + This function **can** be a coroutine and must take a sole parameter, + ``ctx``, to represent the :class:`.Context`. + """ + return True + + @_cog_special_method + def cog_command_error(self, ctx, error): + """A special method that is called whenever an error + is dispatched inside this cog. + + This is similar to :func:`.on_command_error` except only applying + to the commands inside this cog. + + This function **can** be a coroutine. + + Parameters + ----------- + ctx: :class:`.Context` + The invocation context where the error happened. + error: :class:`CommandError` + The error that happened. + """ + pass + + @_cog_special_method + async def cog_before_invoke(self, ctx): + """A special method that acts as a cog local pre-invoke hook. + + This is similar to :meth:`.Command.before_invoke`. + + This **must** be a coroutine. + + Parameters + ----------- + ctx: :class:`.Context` + The invocation context. + """ + pass + + @_cog_special_method + async def cog_after_invoke(self, ctx): + """A special method that acts as a cog local post-invoke hook. + + This is similar to :meth:`.Command.after_invoke`. + + This **must** be a coroutine. + + Parameters + ----------- + ctx: :class:`.Context` + The invocation context. + """ + pass + + def _inject(self, bot): + cls = self.__class__ + + # realistically, the only thing that can cause loading errors + # is essentially just the command loading, which raises if there are + # duplicates. When this condition is met, we want to undo all what + # we've added so far for some form of atomic loading. + for index, command in enumerate(self.__cog_commands__): + command.cog = self + if command.parent is None: + try: + bot.add_command(command) + except Exception as e: + # undo our additions + for to_undo in self.__cog_commands__[:index]: + bot.remove_command(to_undo) + raise e + + # check if we're overriding the default + if cls.bot_check is not Cog.bot_check: + bot.add_check(self.bot_check) + + if cls.bot_check_once is not Cog.bot_check_once: + bot.add_check(self.bot_check_once, call_once=True) + + # while Bot.add_listener can raise if it's not a coroutine, + # this precondition is already met by the listener decorator + # already, thus this should never raise. + # Outside of, memory errors and the like... + for name, method_name in self.__cog_listeners__: + bot.add_listener(getattr(self, method_name), name) + + return self + + def _eject(self, bot): + cls = self.__class__ + + try: + for command in self.__cog_commands__: + if command.parent is None: + bot.remove_command(command.name) + + for _, method_name in self.__cog_listeners__: + bot.remove_listener(getattr(self, method_name)) + + if cls.bot_check is not Cog.bot_check: + bot.remove_check(self.bot_check) + + if cls.bot_check_once is not Cog.bot_check_once: + bot.remove_check(self.bot_check_once, call_once=True) + finally: + self.cog_unload() diff --git a/venv/lib/python3.7/site-packages/discord/ext/commands/context.py b/venv/lib/python3.7/site-packages/discord/ext/commands/context.py new file mode 100644 index 0000000..fb9dcfc --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/ext/commands/context.py @@ -0,0 +1,298 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import discord.abc +import discord.utils + +class Context(discord.abc.Messageable): + r"""Represents the context in which a command is being invoked under. + + This class contains a lot of meta data to help you understand more about + the invocation context. This class is not created manually and is instead + passed around to commands as the first parameter. + + This class implements the :class:`~discord.abc.Messageable` ABC. + + Attributes + ----------- + message: :class:`.Message` + The message that triggered the command being executed. + bot: :class:`.Bot` + The bot that contains the command being executed. + args: :class:`list` + The list of transformed arguments that were passed into the command. + If this is accessed during the :func:`on_command_error` event + then this list could be incomplete. + kwargs: :class:`dict` + A dictionary of transformed arguments that were passed into the command. + Similar to :attr:`args`\, if this is accessed in the + :func:`on_command_error` event then this dict could be incomplete. + prefix: :class:`str` + The prefix that was used to invoke the command. + command + The command (i.e. :class:`.Command` or its subclasses) that is being + invoked currently. + invoked_with: :class:`str` + The command name that triggered this invocation. Useful for finding out + which alias called the command. + invoked_subcommand + The subcommand (i.e. :class:`.Command` or its subclasses) that was + invoked. If no valid subcommand was invoked then this is equal to + ``None``. + subcommand_passed: Optional[:class:`str`] + The string that was attempted to call a subcommand. This does not have + to point to a valid registered subcommand and could just point to a + nonsense string. If nothing was passed to attempt a call to a + subcommand then this is set to ``None``. + command_failed: :class:`bool` + A boolean that indicates if the command failed to be parsed, checked, + or invoked. + """ + + def __init__(self, **attrs): + self.message = attrs.pop('message', None) + self.bot = attrs.pop('bot', None) + self.args = attrs.pop('args', []) + self.kwargs = attrs.pop('kwargs', {}) + self.prefix = attrs.pop('prefix') + self.command = attrs.pop('command', None) + self.view = attrs.pop('view', None) + self.invoked_with = attrs.pop('invoked_with', None) + self.invoked_subcommand = attrs.pop('invoked_subcommand', None) + self.subcommand_passed = attrs.pop('subcommand_passed', None) + self.command_failed = attrs.pop('command_failed', False) + self._state = self.message._state + + async def invoke(self, *args, **kwargs): + r"""|coro| + + Calls a command with the arguments given. + + This is useful if you want to just call the callback that a + :class:`.Command` holds internally. + + .. note:: + + This does not handle converters, checks, cooldowns, pre-invoke, + or after-invoke hooks in any matter. It calls the internal callback + directly as-if it was a regular function. + + You must take care in passing the proper arguments when + using this function. + + .. warning:: + + The first parameter passed **must** be the command being invoked. + + Parameters + ----------- + command: :class:`.Command` + A command or subclass of a command that is going to be called. + \*args + The arguments to to use. + \*\*kwargs + The keyword arguments to use. + """ + + try: + command = args[0] + except IndexError: + raise TypeError('Missing command to invoke.') from None + + arguments = [] + if command.cog is not None: + arguments.append(command.cog) + + arguments.append(self) + arguments.extend(args[1:]) + + ret = await command.callback(*arguments, **kwargs) + return ret + + async def reinvoke(self, *, call_hooks=False, restart=True): + """|coro| + + Calls the command again. + + This is similar to :meth:`~.Context.invoke` except that it bypasses + checks, cooldowns, and error handlers. + + .. note:: + + If you want to bypass :exc:`.UserInputError` derived exceptions, + it is recommended to use the regular :meth:`~.Context.invoke` + as it will work more naturally. After all, this will end up + using the old arguments the user has used and will thus just + fail again. + + Parameters + ------------ + call_hooks: :class:`bool` + Whether to call the before and after invoke hooks. + restart: :class:`bool` + Whether to start the call chain from the very beginning + or where we left off (i.e. the command that caused the error). + The default is to start where we left off. + """ + cmd = self.command + view = self.view + if cmd is None: + raise ValueError('This context is not valid.') + + # some state to revert to when we're done + index, previous = view.index, view.previous + invoked_with = self.invoked_with + invoked_subcommand = self.invoked_subcommand + subcommand_passed = self.subcommand_passed + + if restart: + to_call = cmd.root_parent or cmd + view.index = len(self.prefix) + view.previous = 0 + view.get_word() # advance to get the root command + else: + to_call = cmd + + try: + await to_call.reinvoke(self, call_hooks=call_hooks) + finally: + self.command = cmd + view.index = index + view.previous = previous + self.invoked_with = invoked_with + self.invoked_subcommand = invoked_subcommand + self.subcommand_passed = subcommand_passed + + @property + def valid(self): + """Checks if the invocation context is valid to be invoked with.""" + return self.prefix is not None and self.command is not None + + async def _get_channel(self): + return self.channel + + @property + def cog(self): + """Returns the cog associated with this context's command. None if it does not exist.""" + + if self.command is None: + return None + return self.command.cog + + @discord.utils.cached_property + def guild(self): + """Returns the guild associated with this context's command. None if not available.""" + return self.message.guild + + @discord.utils.cached_property + def channel(self): + """Returns the channel associated with this context's command. Shorthand for :attr:`.Message.channel`.""" + return self.message.channel + + @discord.utils.cached_property + def author(self): + """Returns the author associated with this context's command. Shorthand for :attr:`.Message.author`""" + return self.message.author + + @discord.utils.cached_property + def me(self): + """Similar to :attr:`.Guild.me` except it may return the :class:`.ClientUser` in private message contexts.""" + return self.guild.me if self.guild is not None else self.bot.user + + @property + def voice_client(self): + r"""Optional[:class:`.VoiceClient`]: A shortcut to :attr:`.Guild.voice_client`\, if applicable.""" + g = self.guild + return g.voice_client if g else None + + async def send_help(self, *args): + """send_help(entity=) + + |coro| + + Shows the help command for the specified entity if given. + The entity can be a command or a cog. + + If no entity is given, then it'll show help for the + entire bot. + + If the entity is a string, then it looks up whether it's a + :class:`Cog` or a :class:`Command`. + + .. note:: + + Due to the way this function works, instead of returning + something similar to :meth:`~.commands.HelpCommand.command_not_found` + this returns :class:`None` on bad input or no help command. + + Parameters + ------------ + entity: Optional[Union[:class:`Command`, :class:`Cog`, :class:`str`]] + The entity to show help for. + + Returns + -------- + Any + The result of the help command, if any. + """ + from .core import Group, Command + + bot = self.bot + cmd = bot.help_command + + if cmd is None: + return None + + cmd = cmd.copy() + cmd.context = self + if len(args) == 0: + await cmd.prepare_help_command(self, None) + mapping = cmd.get_bot_mapping() + return await cmd.send_bot_help(mapping) + + entity = args[0] + if entity is None: + return None + + if isinstance(entity, str): + entity = bot.get_cog(entity) or bot.get_command(entity) + + try: + qualified_name = entity.qualified_name + except AttributeError: + # if we're here then it's not a cog, group, or command. + return None + + await cmd.prepare_help_command(self, entity.qualified_name) + + if hasattr(entity, '__cog_commands__'): + return await cmd.send_cog_help(entity) + elif isinstance(entity, Group): + return await cmd.send_group_help(entity) + elif isinstance(entity, Command): + return await cmd.send_command_help(entity) + else: + return None diff --git a/venv/lib/python3.7/site-packages/discord/ext/commands/converter.py b/venv/lib/python3.7/site-packages/discord/ext/commands/converter.py new file mode 100644 index 0000000..86a3db4 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/ext/commands/converter.py @@ -0,0 +1,553 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import re +import inspect + +import discord + +from .errors import BadArgument, NoPrivateMessage + +__all__ = ( + 'Converter', + 'MemberConverter', + 'UserConverter', + 'MessageConverter', + 'TextChannelConverter', + 'InviteConverter', + 'RoleConverter', + 'GameConverter', + 'ColourConverter', + 'VoiceChannelConverter', + 'EmojiConverter', + 'PartialEmojiConverter', + 'CategoryChannelConverter', + 'IDConverter', + 'clean_content', + 'Greedy', +) + +def _get_from_guilds(bot, getter, argument): + result = None + for guild in bot.guilds: + result = getattr(guild, getter)(argument) + if result: + return result + return result + +class Converter: + """The base class of custom converters that require the :class:`.Context` + to be passed to be useful. + + This allows you to implement converters that function similar to the + special cased ``discord`` classes. + + Classes that derive from this should override the :meth:`~.Converter.convert` + method to do its conversion logic. This method must be a :ref:`coroutine `. + """ + + async def convert(self, ctx, argument): + """|coro| + + The method to override to do conversion logic. + + If an error is found while converting, it is recommended to + raise a :exc:`.CommandError` derived exception as it will + properly propagate to the error handlers. + + Parameters + ----------- + ctx: :class:`.Context` + The invocation context that the argument is being used in. + argument: :class:`str` + The argument that is being converted. + """ + raise NotImplementedError('Derived classes need to implement this.') + +class IDConverter(Converter): + def __init__(self): + self._id_regex = re.compile(r'([0-9]{15,21})$') + super().__init__() + + def _get_id_match(self, argument): + return self._id_regex.match(argument) + +class MemberConverter(IDConverter): + """Converts to a :class:`~discord.Member`. + + All lookups are via the local guild. If in a DM context, then the lookup + is done by the global cache. + + The lookup strategy is as follows (in order): + + 1. Lookup by ID. + 2. Lookup by mention. + 3. Lookup by name#discrim + 4. Lookup by name + 5. Lookup by nickname + """ + + async def convert(self, ctx, argument): + bot = ctx.bot + match = self._get_id_match(argument) or re.match(r'<@!?([0-9]+)>$', argument) + guild = ctx.guild + result = None + if match is None: + # not a mention... + if guild: + result = guild.get_member_named(argument) + else: + result = _get_from_guilds(bot, 'get_member_named', argument) + else: + user_id = int(match.group(1)) + if guild: + result = guild.get_member(user_id) + else: + result = _get_from_guilds(bot, 'get_member', user_id) + + if result is None: + raise BadArgument('Member "{}" not found'.format(argument)) + + return result + +class UserConverter(IDConverter): + """Converts to a :class:`~discord.User`. + + All lookups are via the global user cache. + + The lookup strategy is as follows (in order): + + 1. Lookup by ID. + 2. Lookup by mention. + 3. Lookup by name#discrim + 4. Lookup by name + """ + async def convert(self, ctx, argument): + match = self._get_id_match(argument) or re.match(r'<@!?([0-9]+)>$', argument) + result = None + state = ctx._state + + if match is not None: + user_id = int(match.group(1)) + result = ctx.bot.get_user(user_id) + else: + arg = argument + # check for discriminator if it exists + if len(arg) > 5 and arg[-5] == '#': + discrim = arg[-4:] + name = arg[:-5] + predicate = lambda u: u.name == name and u.discriminator == discrim + result = discord.utils.find(predicate, state._users.values()) + if result is not None: + return result + + predicate = lambda u: u.name == arg + result = discord.utils.find(predicate, state._users.values()) + + if result is None: + raise BadArgument('User "{}" not found'.format(argument)) + + return result + +class MessageConverter(Converter): + """Converts to a :class:`discord.Message`. + + .. versionadded:: 1.1.0 + + The lookup strategy is as follows (in order): + + 1. Lookup by "{channel ID}-{message ID}" (retrieved by shift-clicking on "Copy ID") + 2. Lookup by message ID (the message **must** be in the context channel) + 3. Lookup by message URL + """ + + async def convert(self, ctx, argument): + id_regex = re.compile(r'^(?:(?P[0-9]{15,21})-)?(?P[0-9]{15,21})$') + link_regex = re.compile( + r'^https?://(?:(ptb|canary)\.)?discordapp\.com/channels/' + r'(?:([0-9]{15,21})|(@me))' + r'/(?P[0-9]{15,21})/(?P[0-9]{15,21})/?$' + ) + match = id_regex.match(argument) or link_regex.match(argument) + if not match: + raise BadArgument('Message "{msg}" not found.'.format(msg=argument)) + message_id = int(match.group("message_id")) + channel_id = match.group("channel_id") + message = ctx.bot._connection._get_message(message_id) + if message: + return message + channel = ctx.bot.get_channel(int(channel_id)) if channel_id else ctx.channel + if not channel: + raise BadArgument('Channel "{channel}" not found.'.format(channel=channel_id)) + try: + return await channel.fetch_message(message_id) + except discord.NotFound: + raise BadArgument('Message "{msg}" not found.'.format(msg=argument)) + except discord.Forbidden: + raise BadArgument("Can't read messages in {channel}".format(channel=channel.mention)) + +class TextChannelConverter(IDConverter): + """Converts to a :class:`~discord.TextChannel`. + + All lookups are via the local guild. If in a DM context, then the lookup + is done by the global cache. + + The lookup strategy is as follows (in order): + + 1. Lookup by ID. + 2. Lookup by mention. + 3. Lookup by name + """ + async def convert(self, ctx, argument): + bot = ctx.bot + + match = self._get_id_match(argument) or re.match(r'<#([0-9]+)>$', argument) + result = None + guild = ctx.guild + + if match is None: + # not a mention + if guild: + result = discord.utils.get(guild.text_channels, name=argument) + else: + def check(c): + return isinstance(c, discord.TextChannel) and c.name == argument + result = discord.utils.find(check, bot.get_all_channels()) + else: + channel_id = int(match.group(1)) + if guild: + result = guild.get_channel(channel_id) + else: + result = _get_from_guilds(bot, 'get_channel', channel_id) + + if not isinstance(result, discord.TextChannel): + raise BadArgument('Channel "{}" not found.'.format(argument)) + + return result + +class VoiceChannelConverter(IDConverter): + """Converts to a :class:`~discord.VoiceChannel`. + + All lookups are via the local guild. If in a DM context, then the lookup + is done by the global cache. + + The lookup strategy is as follows (in order): + + 1. Lookup by ID. + 2. Lookup by mention. + 3. Lookup by name + """ + async def convert(self, ctx, argument): + bot = ctx.bot + match = self._get_id_match(argument) or re.match(r'<#([0-9]+)>$', argument) + result = None + guild = ctx.guild + + if match is None: + # not a mention + if guild: + result = discord.utils.get(guild.voice_channels, name=argument) + else: + def check(c): + return isinstance(c, discord.VoiceChannel) and c.name == argument + result = discord.utils.find(check, bot.get_all_channels()) + else: + channel_id = int(match.group(1)) + if guild: + result = guild.get_channel(channel_id) + else: + result = _get_from_guilds(bot, 'get_channel', channel_id) + + if not isinstance(result, discord.VoiceChannel): + raise BadArgument('Channel "{}" not found.'.format(argument)) + + return result + +class CategoryChannelConverter(IDConverter): + """Converts to a :class:`~discord.CategoryChannel`. + + All lookups are via the local guild. If in a DM context, then the lookup + is done by the global cache. + + The lookup strategy is as follows (in order): + + 1. Lookup by ID. + 2. Lookup by mention. + 3. Lookup by name + """ + async def convert(self, ctx, argument): + bot = ctx.bot + + match = self._get_id_match(argument) or re.match(r'<#([0-9]+)>$', argument) + result = None + guild = ctx.guild + + if match is None: + # not a mention + if guild: + result = discord.utils.get(guild.categories, name=argument) + else: + def check(c): + return isinstance(c, discord.CategoryChannel) and c.name == argument + result = discord.utils.find(check, bot.get_all_channels()) + else: + channel_id = int(match.group(1)) + if guild: + result = guild.get_channel(channel_id) + else: + result = _get_from_guilds(bot, 'get_channel', channel_id) + + if not isinstance(result, discord.CategoryChannel): + raise BadArgument('Channel "{}" not found.'.format(argument)) + + return result + +class ColourConverter(Converter): + """Converts to a :class:`~discord.Colour`. + + The following formats are accepted: + + - ``0x`` + - ``#`` + - ``0x#`` + - Any of the ``classmethod`` in :class:`Colour` + + - The ``_`` in the name can be optionally replaced with spaces. + """ + async def convert(self, ctx, argument): + arg = argument.replace('0x', '').lower() + + if arg[0] == '#': + arg = arg[1:] + try: + value = int(arg, base=16) + if not (0 <= value <= 0xFFFFFF): + raise BadArgument('Colour "{}" is invalid.'.format(arg)) + return discord.Colour(value=value) + except ValueError: + arg = arg.replace(' ', '_') + method = getattr(discord.Colour, arg, None) + if arg.startswith('from_') or method is None or not inspect.ismethod(method): + raise BadArgument('Colour "{}" is invalid.'.format(arg)) + return method() + +class RoleConverter(IDConverter): + """Converts to a :class:`~discord.Role`. + + All lookups are via the local guild. If in a DM context, then the lookup + is done by the global cache. + + The lookup strategy is as follows (in order): + + 1. Lookup by ID. + 2. Lookup by mention. + 3. Lookup by name + """ + async def convert(self, ctx, argument): + guild = ctx.guild + if not guild: + raise NoPrivateMessage() + + match = self._get_id_match(argument) or re.match(r'<@&([0-9]+)>$', argument) + if match: + result = guild.get_role(int(match.group(1))) + else: + result = discord.utils.get(guild._roles.values(), name=argument) + + if result is None: + raise BadArgument('Role "{}" not found.'.format(argument)) + return result + +class GameConverter(Converter): + """Converts to :class:`~discord.Game`.""" + async def convert(self, ctx, argument): + return discord.Game(name=argument) + +class InviteConverter(Converter): + """Converts to a :class:`~discord.Invite`. + + This is done via an HTTP request using :meth:`.Bot.fetch_invite`. + """ + async def convert(self, ctx, argument): + try: + invite = await ctx.bot.fetch_invite(argument) + return invite + except Exception as exc: + raise BadArgument('Invite is invalid or expired') from exc + +class EmojiConverter(IDConverter): + """Converts to a :class:`~discord.Emoji`. + + All lookups are done for the local guild first, if available. If that lookup + fails, then it checks the client's global cache. + + The lookup strategy is as follows (in order): + + 1. Lookup by ID. + 2. Lookup by extracting ID from the emoji. + 3. Lookup by name + """ + async def convert(self, ctx, argument): + match = self._get_id_match(argument) or re.match(r'$', argument) + result = None + bot = ctx.bot + guild = ctx.guild + + if match is None: + # Try to get the emoji by name. Try local guild first. + if guild: + result = discord.utils.get(guild.emojis, name=argument) + + if result is None: + result = discord.utils.get(bot.emojis, name=argument) + else: + emoji_id = int(match.group(1)) + + # Try to look up emoji by id. + if guild: + result = discord.utils.get(guild.emojis, id=emoji_id) + + if result is None: + result = discord.utils.get(bot.emojis, id=emoji_id) + + if result is None: + raise BadArgument('Emoji "{}" not found.'.format(argument)) + + return result + +class PartialEmojiConverter(Converter): + """Converts to a :class:`~discord.PartialEmoji`. + + This is done by extracting the animated flag, name and ID from the emoji. + """ + async def convert(self, ctx, argument): + match = re.match(r'<(a?):([a-zA-Z0-9\_]+):([0-9]+)>$', argument) + + if match: + emoji_animated = bool(match.group(1)) + emoji_name = match.group(2) + emoji_id = int(match.group(3)) + + return discord.PartialEmoji.with_state(ctx.bot._connection, animated=emoji_animated, name=emoji_name, + id=emoji_id) + + raise BadArgument('Couldn\'t convert "{}" to PartialEmoji.'.format(argument)) + +class clean_content(Converter): + """Converts the argument to mention scrubbed version of + said content. + + This behaves similarly to :attr:`~discord.Message.clean_content`. + + Attributes + ------------ + fix_channel_mentions: :class:`bool` + Whether to clean channel mentions. + use_nicknames: :class:`bool` + Whether to use nicknames when transforming mentions. + escape_markdown: :class:`bool` + Whether to also escape special markdown characters. + """ + def __init__(self, *, fix_channel_mentions=False, use_nicknames=True, escape_markdown=False): + self.fix_channel_mentions = fix_channel_mentions + self.use_nicknames = use_nicknames + self.escape_markdown = escape_markdown + + async def convert(self, ctx, argument): + message = ctx.message + transformations = {} + + if self.fix_channel_mentions and ctx.guild: + def resolve_channel(id, *, _get=ctx.guild.get_channel): + ch = _get(id) + return ('<#%s>' % id), ('#' + ch.name if ch else '#deleted-channel') + + transformations.update(resolve_channel(channel) for channel in message.raw_channel_mentions) + + if self.use_nicknames and ctx.guild: + def resolve_member(id, *, _get=ctx.guild.get_member): + m = _get(id) + return '@' + m.display_name if m else '@deleted-user' + else: + def resolve_member(id, *, _get=ctx.bot.get_user): + m = _get(id) + return '@' + m.name if m else '@deleted-user' + + + transformations.update( + ('<@%s>' % member_id, resolve_member(member_id)) + for member_id in message.raw_mentions + ) + + transformations.update( + ('<@!%s>' % member_id, resolve_member(member_id)) + for member_id in message.raw_mentions + ) + + if ctx.guild: + def resolve_role(_id, *, _find=ctx.guild.get_role): + r = _find(_id) + return '@' + r.name if r else '@deleted-role' + + transformations.update( + ('<@&%s>' % role_id, resolve_role(role_id)) + for role_id in message.raw_role_mentions + ) + + def repl(obj): + return transformations.get(obj.group(0), '') + + pattern = re.compile('|'.join(transformations.keys())) + result = pattern.sub(repl, argument) + + if self.escape_markdown: + result = discord.utils.escape_markdown(result) + + # Completely ensure no mentions escape: + return discord.utils.escape_mentions(result) + +class _Greedy: + __slots__ = ('converter',) + + def __init__(self, *, converter=None): + self.converter = converter + + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + if len(params) != 1: + raise TypeError('Greedy[...] only takes a single argument') + converter = params[0] + + if not (callable(converter) or isinstance(converter, Converter) or hasattr(converter, '__origin__')): + raise TypeError('Greedy[...] expects a type or a Converter instance.') + + if converter is str or converter is type(None) or converter is _Greedy: + raise TypeError('Greedy[%s] is invalid.' % converter.__name__) + + return self.__class__(converter=converter) + +Greedy = _Greedy() diff --git a/venv/lib/python3.7/site-packages/discord/ext/commands/cooldowns.py b/venv/lib/python3.7/site-packages/discord/ext/commands/cooldowns.py new file mode 100644 index 0000000..5cd305f --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/ext/commands/cooldowns.py @@ -0,0 +1,156 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from discord.enums import Enum +import time + +__all__ = ( + 'BucketType', + 'Cooldown', + 'CooldownMapping', +) + +class BucketType(Enum): + default = 0 + user = 1 + guild = 2 + channel = 3 + member = 4 + category = 5 + +class Cooldown: + __slots__ = ('rate', 'per', 'type', '_window', '_tokens', '_last') + + def __init__(self, rate, per, type): + self.rate = int(rate) + self.per = float(per) + self.type = type + self._window = 0.0 + self._tokens = self.rate + self._last = 0.0 + + if not isinstance(self.type, BucketType): + raise TypeError('Cooldown type must be a BucketType') + + def get_tokens(self, current=None): + if not current: + current = time.time() + + tokens = self._tokens + + if current > self._window + self.per: + tokens = self.rate + return tokens + + def update_rate_limit(self, current=None): + current = current or time.time() + self._last = current + + self._tokens = self.get_tokens(current) + + # first token used means that we start a new rate limit window + if self._tokens == self.rate: + self._window = current + + # check if we are rate limited + if self._tokens == 0: + return self.per - (current - self._window) + + # we're not so decrement our tokens + self._tokens -= 1 + + # see if we got rate limited due to this token change, and if + # so update the window to point to our current time frame + if self._tokens == 0: + self._window = current + + def reset(self): + self._tokens = self.rate + self._last = 0.0 + + def copy(self): + return Cooldown(self.rate, self.per, self.type) + + def __repr__(self): + return ''.format(self) + +class CooldownMapping: + def __init__(self, original): + self._cache = {} + self._cooldown = original + + def copy(self): + ret = CooldownMapping(self._cooldown) + ret._cache = self._cache.copy() + return ret + + @property + def valid(self): + return self._cooldown is not None + + @classmethod + def from_cooldown(cls, rate, per, type): + return cls(Cooldown(rate, per, type)) + + def _bucket_key(self, msg): + bucket_type = self._cooldown.type + if bucket_type is BucketType.user: + return msg.author.id + elif bucket_type is BucketType.guild: + return (msg.guild or msg.author).id + elif bucket_type is BucketType.channel: + return msg.channel.id + elif bucket_type is BucketType.member: + return ((msg.guild and msg.guild.id), msg.author.id) + elif bucket_type is BucketType.category: + return (msg.channel.category or msg.channel).id + + def _verify_cache_integrity(self, current=None): + # we want to delete all cache objects that haven't been used + # in a cooldown window. e.g. if we have a command that has a + # cooldown of 60s and it has not been used in 60s then that key should be deleted + current = current or time.time() + dead_keys = [k for k, v in self._cache.items() if current > v._last + v.per] + for k in dead_keys: + del self._cache[k] + + def get_bucket(self, message, current=None): + if self._cooldown.type is BucketType.default: + return self._cooldown + + self._verify_cache_integrity(current) + key = self._bucket_key(message) + if key not in self._cache: + bucket = self._cooldown.copy() + self._cache[key] = bucket + else: + bucket = self._cache[key] + + return bucket + + def update_rate_limit(self, message, current=None): + bucket = self.get_bucket(message, current) + return bucket.update_rate_limit(current) diff --git a/venv/lib/python3.7/site-packages/discord/ext/commands/core.py b/venv/lib/python3.7/site-packages/discord/ext/commands/core.py new file mode 100644 index 0000000..ead83c7 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/ext/commands/core.py @@ -0,0 +1,1627 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import asyncio +import functools +import inspect +import typing +import datetime + +import discord + +from .errors import * +from .cooldowns import Cooldown, BucketType, CooldownMapping +from . import converter as converters +from ._types import _BaseCommand +from .cog import Cog + +__all__ = ( + 'Command', + 'Group', + 'GroupMixin', + 'command', + 'group', + 'has_role', + 'has_permissions', + 'has_any_role', + 'check', + 'bot_has_role', + 'bot_has_permissions', + 'bot_has_any_role', + 'cooldown', + 'dm_only', + 'guild_only', + 'is_owner', + 'is_nsfw', +) + +def wrap_callback(coro): + @functools.wraps(coro) + async def wrapped(*args, **kwargs): + try: + ret = await coro(*args, **kwargs) + except CommandError: + raise + except asyncio.CancelledError: + return + except Exception as exc: + raise CommandInvokeError(exc) from exc + return ret + return wrapped + +def hooked_wrapped_callback(command, ctx, coro): + @functools.wraps(coro) + async def wrapped(*args, **kwargs): + try: + ret = await coro(*args, **kwargs) + except CommandError: + ctx.command_failed = True + raise + except asyncio.CancelledError: + ctx.command_failed = True + return + except Exception as exc: + ctx.command_failed = True + raise CommandInvokeError(exc) from exc + finally: + await command.call_after_hooks(ctx) + return ret + return wrapped + +def _convert_to_bool(argument): + lowered = argument.lower() + if lowered in ('yes', 'y', 'true', 't', '1', 'enable', 'on'): + return True + elif lowered in ('no', 'n', 'false', 'f', '0', 'disable', 'off'): + return False + else: + raise BadArgument(lowered + ' is not a recognised boolean option') + +class _CaseInsensitiveDict(dict): + def __contains__(self, k): + return super().__contains__(k.lower()) + + def __delitem__(self, k): + return super().__delitem__(k.lower()) + + def __getitem__(self, k): + return super().__getitem__(k.lower()) + + def get(self, k, default=None): + return super().get(k.lower(), default) + + def pop(self, k, default=None): + return super().pop(k.lower(), default) + + def __setitem__(self, k, v): + super().__setitem__(k.lower(), v) + +class Command(_BaseCommand): + r"""A class that implements the protocol for a bot text command. + + These are not created manually, instead they are created via the + decorator or functional interface. + + Attributes + ----------- + name: :class:`str` + The name of the command. + callback: :ref:`coroutine ` + The coroutine that is executed when the command is called. + help: :class:`str` + The long help text for the command. + brief: :class:`str` + The short help text for the command. If this is not specified + then the first line of the long help text is used instead. + usage: :class:`str` + A replacement for arguments in the default help text. + aliases: :class:`list` + The list of aliases the command can be invoked under. + enabled: :class:`bool` + A boolean that indicates if the command is currently enabled. + If the command is invoked while it is disabled, then + :exc:`.DisabledCommand` is raised to the :func:`.on_command_error` + event. Defaults to ``True``. + parent: Optional[:class:`Command`] + The parent command that this command belongs to. ``None`` if there + isn't one. + checks: List[Callable[..., :class:`bool`]] + A list of predicates that verifies if the command could be executed + with the given :class:`.Context` as the sole parameter. If an exception + is necessary to be thrown to signal failure, then one inherited from + :exc:`.CommandError` should be used. Note that if the checks fail then + :exc:`.CheckFailure` exception is raised to the :func:`.on_command_error` + event. + description: :class:`str` + The message prefixed into the default help command. + hidden: :class:`bool` + If ``True``\, the default help command does not show this in the + help output. + rest_is_raw: :class:`bool` + If ``False`` and a keyword-only argument is provided then the keyword + only argument is stripped and handled as if it was a regular argument + that handles :exc:`.MissingRequiredArgument` and default values in a + regular matter rather than passing the rest completely raw. If ``True`` + then the keyword-only argument will pass in the rest of the arguments + in a completely raw matter. Defaults to ``False``. + invoked_subcommand: Optional[:class:`Command`] + The subcommand that was invoked, if any. + ignore_extra: :class:`bool` + If ``True``\, ignores extraneous strings passed to a command if all its + requirements are met (e.g. ``?foo a b c`` when only expecting ``a`` + and ``b``). Otherwise :func:`.on_command_error` and local error handlers + are called with :exc:`.TooManyArguments`. Defaults to ``True``. + cooldown_after_parsing: :class:`bool` + If ``True``\, cooldown processing is done after argument parsing, + which calls converters. If ``False`` then cooldown processing is done + first and then the converters are called second. Defaults to ``False``. + """ + + def __new__(cls, *args, **kwargs): + # if you're wondering why this is done, it's because we need to ensure + # we have a complete original copy of **kwargs even for classes that + # mess with it by popping before delegating to the subclass __init__. + # In order to do this, we need to control the instance creation and + # inject the original kwargs through __new__ rather than doing it + # inside __init__. + self = super().__new__(cls) + + # we do a shallow copy because it's probably the most common use case. + # this could potentially break if someone modifies a list or something + # while it's in movement, but for now this is the cheapest and + # fastest way to do what we want. + self.__original_kwargs__ = kwargs.copy() + return self + + def __init__(self, func, **kwargs): + if not asyncio.iscoroutinefunction(func): + raise TypeError('Callback must be a coroutine.') + + self.name = name = kwargs.get('name') or func.__name__ + if not isinstance(name, str): + raise TypeError('Name of a command must be a string.') + + self.callback = func + self.enabled = kwargs.get('enabled', True) + + help_doc = kwargs.get('help') + if help_doc is not None: + help_doc = inspect.cleandoc(help_doc) + else: + help_doc = inspect.getdoc(func) + if isinstance(help_doc, bytes): + help_doc = help_doc.decode('utf-8') + + self.help = help_doc + + self.brief = kwargs.get('brief') + self.usage = kwargs.get('usage') + self.rest_is_raw = kwargs.get('rest_is_raw', False) + self.aliases = kwargs.get('aliases', []) + + if not isinstance(self.aliases, (list, tuple)): + raise TypeError("Aliases of a command must be a list of strings.") + + self.description = inspect.cleandoc(kwargs.get('description', '')) + self.hidden = kwargs.get('hidden', False) + + try: + checks = func.__commands_checks__ + checks.reverse() + except AttributeError: + checks = kwargs.get('checks', []) + finally: + self.checks = checks + + try: + cooldown = func.__commands_cooldown__ + except AttributeError: + cooldown = kwargs.get('cooldown') + finally: + self._buckets = CooldownMapping(cooldown) + + self.ignore_extra = kwargs.get('ignore_extra', True) + self.cooldown_after_parsing = kwargs.get('cooldown_after_parsing', False) + self.cog = None + + # bandaid for the fact that sometimes parent can be the bot instance + parent = kwargs.get('parent') + self.parent = parent if isinstance(parent, _BaseCommand) else None + self._before_invoke = None + self._after_invoke = None + + @property + def callback(self): + return self._callback + + @callback.setter + def callback(self, function): + self._callback = function + self.module = function.__module__ + + signature = inspect.signature(function) + self.params = signature.parameters.copy() + + # PEP-563 allows postponing evaluation of annotations with a __future__ + # import. When postponed, Parameter.annotation will be a string and must + # be replaced with the real value for the converters to work later on + for key, value in self.params.items(): + if isinstance(value.annotation, str): + self.params[key] = value = value.replace(annotation=eval(value.annotation, function.__globals__)) + + # fail early for when someone passes an unparameterized Greedy type + if value.annotation is converters.Greedy: + raise TypeError('Unparameterized Greedy[...] is disallowed in signature.') + + def update(self, **kwargs): + """Updates :class:`Command` instance with updated attribute. + + This works similarly to the :func:`.command` decorator in terms + of parameters in that they are passed to the :class:`Command` or + subclass constructors, sans the name and callback. + """ + self.__init__(self.callback, **dict(self.__original_kwargs__, **kwargs)) + + def _ensure_assignment_on_copy(self, other): + other._before_invoke = self._before_invoke + other._after_invoke = self._after_invoke + if self.checks != other.checks: + other.checks = self.checks.copy() + if self._buckets.valid and not other._buckets.valid: + other._buckets = self._buckets.copy() + try: + other.on_error = self.on_error + except AttributeError: + pass + return other + + def copy(self): + """Creates a copy of this command.""" + ret = self.__class__(self.callback, **self.__original_kwargs__) + return self._ensure_assignment_on_copy(ret) + + def _update_copy(self, kwargs): + if kwargs: + kw = kwargs.copy() + kw.update(self.__original_kwargs__) + copy = self.__class__(self.callback, **kw) + return self._ensure_assignment_on_copy(copy) + else: + return self.copy() + + async def dispatch_error(self, ctx, error): + ctx.command_failed = True + cog = self.cog + try: + coro = self.on_error + except AttributeError: + pass + else: + injected = wrap_callback(coro) + if cog is not None: + await injected(cog, ctx, error) + else: + await injected(ctx, error) + + try: + if cog is not None: + local = Cog._get_overridden_method(cog.cog_command_error) + if local is not None: + wrapped = wrap_callback(local) + await wrapped(ctx, error) + finally: + ctx.bot.dispatch('command_error', ctx, error) + + async def _actual_conversion(self, ctx, converter, argument, param): + if converter is bool: + return _convert_to_bool(argument) + + try: + module = converter.__module__ + except AttributeError: + pass + else: + if module is not None and (module.startswith('discord.') and not module.endswith('converter')): + converter = getattr(converters, converter.__name__ + 'Converter') + + try: + if inspect.isclass(converter): + if issubclass(converter, converters.Converter): + instance = converter() + ret = await instance.convert(ctx, argument) + return ret + else: + method = getattr(converter, 'convert', None) + if method is not None and inspect.ismethod(method): + ret = await method(ctx, argument) + return ret + elif isinstance(converter, converters.Converter): + ret = await converter.convert(ctx, argument) + return ret + except CommandError: + raise + except Exception as exc: + raise ConversionError(converter, exc) from exc + + try: + return converter(argument) + except CommandError: + raise + except Exception as exc: + try: + name = converter.__name__ + except AttributeError: + name = converter.__class__.__name__ + + raise BadArgument('Converting to "{}" failed for parameter "{}".'.format(name, param.name)) from exc + + async def do_conversion(self, ctx, converter, argument, param): + try: + origin = converter.__origin__ + except AttributeError: + pass + else: + if origin is typing.Union: + errors = [] + _NoneType = type(None) + for conv in converter.__args__: + # if we got to this part in the code, then the previous conversions have failed + # so we should just undo the view, return the default, and allow parsing to continue + # with the other parameters + if conv is _NoneType and param.kind != param.VAR_POSITIONAL: + ctx.view.undo() + return None if param.default is param.empty else param.default + + try: + value = await self._actual_conversion(ctx, conv, argument, param) + except CommandError as exc: + errors.append(exc) + else: + return value + + # if we're here, then we failed all the converters + raise BadUnionArgument(param, converter.__args__, errors) + + return await self._actual_conversion(ctx, converter, argument, param) + + def _get_converter(self, param): + converter = param.annotation + if converter is param.empty: + if param.default is not param.empty: + converter = str if param.default is None else type(param.default) + else: + converter = str + return converter + + async def transform(self, ctx, param): + required = param.default is param.empty + converter = self._get_converter(param) + consume_rest_is_special = param.kind == param.KEYWORD_ONLY and not self.rest_is_raw + view = ctx.view + view.skip_ws() + + # The greedy converter is simple -- it keeps going until it fails in which case, + # it undos the view ready for the next parameter to use instead + if type(converter) is converters._Greedy: + if param.kind == param.POSITIONAL_OR_KEYWORD: + return await self._transform_greedy_pos(ctx, param, required, converter.converter) + elif param.kind == param.VAR_POSITIONAL: + return await self._transform_greedy_var_pos(ctx, param, converter.converter) + else: + # if we're here, then it's a KEYWORD_ONLY param type + # since this is mostly useless, we'll helpfully transform Greedy[X] + # into just X and do the parsing that way. + converter = converter.converter + + if view.eof: + if param.kind == param.VAR_POSITIONAL: + raise RuntimeError() # break the loop + if required: + if self._is_typing_optional(param.annotation): + return None + raise MissingRequiredArgument(param) + return param.default + + previous = view.index + if consume_rest_is_special: + argument = view.read_rest().strip() + else: + argument = view.get_quoted_word() + view.previous = previous + + return await self.do_conversion(ctx, converter, argument, param) + + async def _transform_greedy_pos(self, ctx, param, required, converter): + view = ctx.view + result = [] + while not view.eof: + # for use with a manual undo + previous = view.index + + view.skip_ws() + argument = view.get_quoted_word() + try: + value = await self.do_conversion(ctx, converter, argument, param) + except CommandError: + view.index = previous + break + else: + result.append(value) + + if not result and not required: + return param.default + return result + + async def _transform_greedy_var_pos(self, ctx, param, converter): + view = ctx.view + previous = view.index + argument = view.get_quoted_word() + try: + value = await self.do_conversion(ctx, converter, argument, param) + except CommandError: + view.index = previous + raise RuntimeError() from None # break loop + else: + return value + + @property + def clean_params(self): + """Retrieves the parameter OrderedDict without the context or self parameters. + + Useful for inspecting signature. + """ + result = self.params.copy() + if self.cog is not None: + # first parameter is self + result.popitem(last=False) + + try: + # first/second parameter is context + result.popitem(last=False) + except Exception: + raise ValueError('Missing context parameter') from None + + return result + + @property + def full_parent_name(self): + """:class:`str`: Retrieves the fully qualified parent command name. + + This the base command name required to execute it. For example, + in ``?one two three`` the parent name would be ``one two``. + """ + entries = [] + command = self + while command.parent is not None: + command = command.parent + entries.append(command.name) + + return ' '.join(reversed(entries)) + + @property + def parents(self): + """:class:`Command`: Retrieves the parents of this command. + + If the command has no parents then it returns an empty :class:`list`. + + For example in commands ``?a b c test``, the parents are ``[c, b, a]``. + + .. versionadded:: 1.1.0 + """ + entries = [] + command = self + while command.parent is not None: + command = command.parent + entries.append(command) + + return entries + + @property + def root_parent(self): + """Retrieves the root parent of this command. + + If the command has no parents then it returns ``None``. + + For example in commands ``?a b c test``, the root parent is ``a``. + """ + if not self.parent: + return None + return self.parents[-1] + + @property + def qualified_name(self): + """:class:`str`: Retrieves the fully qualified command name. + + This is the full parent name with the command name as well. + For example, in ``?one two three`` the qualified name would be + ``one two three``. + """ + + parent = self.full_parent_name + if parent: + return parent + ' ' + self.name + else: + return self.name + + def __str__(self): + return self.qualified_name + + async def _parse_arguments(self, ctx): + ctx.args = [ctx] if self.cog is None else [self.cog, ctx] + ctx.kwargs = {} + args = ctx.args + kwargs = ctx.kwargs + + view = ctx.view + iterator = iter(self.params.items()) + + if self.cog is not None: + # we have 'self' as the first parameter so just advance + # the iterator and resume parsing + try: + next(iterator) + except StopIteration: + fmt = 'Callback for {0.name} command is missing "self" parameter.' + raise discord.ClientException(fmt.format(self)) + + # next we have the 'ctx' as the next parameter + try: + next(iterator) + except StopIteration: + fmt = 'Callback for {0.name} command is missing "ctx" parameter.' + raise discord.ClientException(fmt.format(self)) + + for name, param in iterator: + if param.kind == param.POSITIONAL_OR_KEYWORD: + transformed = await self.transform(ctx, param) + args.append(transformed) + elif param.kind == param.KEYWORD_ONLY: + # kwarg only param denotes "consume rest" semantics + if self.rest_is_raw: + converter = self._get_converter(param) + argument = view.read_rest() + kwargs[name] = await self.do_conversion(ctx, converter, argument, param) + else: + kwargs[name] = await self.transform(ctx, param) + break + elif param.kind == param.VAR_POSITIONAL: + while not view.eof: + try: + transformed = await self.transform(ctx, param) + args.append(transformed) + except RuntimeError: + break + + if not self.ignore_extra: + if not view.eof: + raise TooManyArguments('Too many arguments passed to ' + self.qualified_name) + + async def _verify_checks(self, ctx): + if not self.enabled: + raise DisabledCommand('{0.name} command is disabled'.format(self)) + + if not await self.can_run(ctx): + raise CheckFailure('The check functions for command {0.qualified_name} failed.'.format(self)) + + async def call_before_hooks(self, ctx): + # now that we're done preparing we can call the pre-command hooks + # first, call the command local hook: + cog = self.cog + if self._before_invoke is not None: + if cog is None: + await self._before_invoke(ctx) + else: + await self._before_invoke(cog, ctx) + + # call the cog local hook if applicable: + if cog is not None: + hook = Cog._get_overridden_method(cog.cog_before_invoke) + if hook is not None: + await hook(ctx) + + # call the bot global hook if necessary + hook = ctx.bot._before_invoke + if hook is not None: + await hook(ctx) + + async def call_after_hooks(self, ctx): + cog = self.cog + if self._after_invoke is not None: + if cog is None: + await self._after_invoke(ctx) + else: + await self._after_invoke(cog, ctx) + + # call the cog local hook if applicable: + if cog is not None: + hook = Cog._get_overridden_method(cog.cog_after_invoke) + if hook is not None: + await hook(ctx) + + hook = ctx.bot._after_invoke + if hook is not None: + await hook(ctx) + + def _prepare_cooldowns(self, ctx): + if self._buckets.valid: + current = ctx.message.created_at.replace(tzinfo=datetime.timezone.utc).timestamp() + bucket = self._buckets.get_bucket(ctx.message, current) + retry_after = bucket.update_rate_limit(current) + if retry_after: + raise CommandOnCooldown(bucket, retry_after) + + async def prepare(self, ctx): + ctx.command = self + await self._verify_checks(ctx) + + if self.cooldown_after_parsing: + await self._parse_arguments(ctx) + self._prepare_cooldowns(ctx) + else: + self._prepare_cooldowns(ctx) + await self._parse_arguments(ctx) + + await self.call_before_hooks(ctx) + + def is_on_cooldown(self, ctx): + """Checks whether the command is currently on cooldown. + + Parameters + ----------- + ctx: :class:`.Context` + The invocation context to use when checking the commands cooldown status. + + Returns + -------- + :class:`bool` + A boolean indicating if the command is on cooldown. + """ + if not self._buckets.valid: + return False + + bucket = self._buckets.get_bucket(ctx.message) + return bucket.get_tokens() == 0 + + def reset_cooldown(self, ctx): + """Resets the cooldown on this command. + + Parameters + ----------- + ctx: :class:`.Context` + The invocation context to reset the cooldown under. + """ + if self._buckets.valid: + bucket = self._buckets.get_bucket(ctx.message) + bucket.reset() + + async def invoke(self, ctx): + await self.prepare(ctx) + + # terminate the invoked_subcommand chain. + # since we're in a regular command (and not a group) then + # the invoked subcommand is None. + ctx.invoked_subcommand = None + injected = hooked_wrapped_callback(self, ctx, self.callback) + await injected(*ctx.args, **ctx.kwargs) + + async def reinvoke(self, ctx, *, call_hooks=False): + ctx.command = self + await self._parse_arguments(ctx) + + if call_hooks: + await self.call_before_hooks(ctx) + + ctx.invoked_subcommand = None + try: + await self.callback(*ctx.args, **ctx.kwargs) + except: + ctx.command_failed = True + raise + finally: + if call_hooks: + await self.call_after_hooks(ctx) + + def error(self, coro): + """A decorator that registers a coroutine as a local error handler. + + A local error handler is an :func:`.on_command_error` event limited to + a single command. However, the :func:`.on_command_error` is still + invoked afterwards as the catch-all. + + Parameters + ----------- + coro: :ref:`coroutine ` + The coroutine to register as the local error handler. + + Raises + ------- + TypeError + The coroutine passed is not actually a coroutine. + """ + + if not asyncio.iscoroutinefunction(coro): + raise TypeError('The error handler must be a coroutine.') + + self.on_error = coro + return coro + + def before_invoke(self, coro): + """A decorator that registers a coroutine as a pre-invoke hook. + + A pre-invoke hook is called directly before the command is + called. This makes it a useful function to set up database + connections or any type of set up required. + + This pre-invoke hook takes a sole parameter, a :class:`.Context`. + + See :meth:`.Bot.before_invoke` for more info. + + Parameters + ----------- + coro: :ref:`coroutine ` + The coroutine to register as the pre-invoke hook. + + Raises + ------- + TypeError + The coroutine passed is not actually a coroutine. + """ + if not asyncio.iscoroutinefunction(coro): + raise TypeError('The pre-invoke hook must be a coroutine.') + + self._before_invoke = coro + return coro + + def after_invoke(self, coro): + """A decorator that registers a coroutine as a post-invoke hook. + + A post-invoke hook is called directly after the command is + called. This makes it a useful function to clean-up database + connections or any type of clean up required. + + This post-invoke hook takes a sole parameter, a :class:`.Context`. + + See :meth:`.Bot.after_invoke` for more info. + + Parameters + ----------- + coro: :ref:`coroutine ` + The coroutine to register as the post-invoke hook. + + Raises + ------- + TypeError + The coroutine passed is not actually a coroutine. + """ + if not asyncio.iscoroutinefunction(coro): + raise TypeError('The post-invoke hook must be a coroutine.') + + self._after_invoke = coro + return coro + + @property + def cog_name(self): + """:class:`str`: The name of the cog this command belongs to. None otherwise.""" + return type(self.cog).__cog_name__ if self.cog is not None else None + + @property + def short_doc(self): + """:class:`str`: Gets the "short" documentation of a command. + + By default, this is the :attr:`brief` attribute. + If that lookup leads to an empty string then the first line of the + :attr:`help` attribute is used instead. + """ + if self.brief is not None: + return self.brief + if self.help is not None: + return self.help.split('\n', 1)[0] + return '' + + def _is_typing_optional(self, annotation): + try: + origin = annotation.__origin__ + except AttributeError: + return False + + if origin is not typing.Union: + return False + + return annotation.__args__[-1] is type(None) + + @property + def signature(self): + """:class:`str`: Returns a POSIX-like signature useful for help command output.""" + if self.usage is not None: + return self.usage + + + params = self.clean_params + if not params: + return '' + + result = [] + for name, param in params.items(): + greedy = isinstance(param.annotation, converters._Greedy) + + if param.default is not param.empty: + # We don't want None or '' to trigger the [name=value] case and instead it should + # do [name] since [name=None] or [name=] are not exactly useful for the user. + should_print = param.default if isinstance(param.default, str) else param.default is not None + if should_print: + result.append('[%s=%s]' % (name, param.default) if not greedy else + '[%s=%s]...' % (name, param.default)) + continue + else: + result.append('[%s]' % name) + + elif param.kind == param.VAR_POSITIONAL: + result.append('[%s...]' % name) + elif greedy: + result.append('[%s]...' % name) + elif self._is_typing_optional(param.annotation): + result.append('[%s]' % name) + else: + result.append('<%s>' % name) + + return ' '.join(result) + + async def can_run(self, ctx): + """|coro| + + Checks if the command can be executed by checking all the predicates + inside the :attr:`.checks` attribute. + + Parameters + ----------- + ctx: :class:`.Context` + The ctx of the command currently being invoked. + + Raises + ------- + :class:`CommandError` + Any command error that was raised during a check call will be propagated + by this function. + + Returns + -------- + :class:`bool` + A boolean indicating if the command can be invoked. + """ + + original = ctx.command + ctx.command = self + + try: + if not await ctx.bot.can_run(ctx): + raise CheckFailure('The global check functions for command {0.qualified_name} failed.'.format(self)) + + cog = self.cog + if cog is not None: + local_check = Cog._get_overridden_method(cog.cog_check) + if local_check is not None: + ret = await discord.utils.maybe_coroutine(local_check, ctx) + if not ret: + return False + + predicates = self.checks + if not predicates: + # since we have no checks, then we just return True. + return True + + return await discord.utils.async_all(predicate(ctx) for predicate in predicates) + finally: + ctx.command = original + +class GroupMixin: + """A mixin that implements common functionality for classes that behave + similar to :class:`.Group` and are allowed to register commands. + + Attributes + ----------- + all_commands: :class:`dict` + A mapping of command name to :class:`.Command` or subclass + objects. + case_insensitive: :class:`bool` + Whether the commands should be case insensitive. Defaults to ``False``. + """ + def __init__(self, *args, **kwargs): + case_insensitive = kwargs.get('case_insensitive', False) + self.all_commands = _CaseInsensitiveDict() if case_insensitive else {} + self.case_insensitive = case_insensitive + super().__init__(*args, **kwargs) + + @property + def commands(self): + """Set[:class:`.Command`]: A unique set of commands without aliases that are registered.""" + return set(self.all_commands.values()) + + def recursively_remove_all_commands(self): + for command in self.all_commands.copy().values(): + if isinstance(command, GroupMixin): + command.recursively_remove_all_commands() + self.remove_command(command.name) + + def add_command(self, command): + """Adds a :class:`.Command` or its subclasses into the internal list + of commands. + + This is usually not called, instead the :meth:`~.GroupMixin.command` or + :meth:`~.GroupMixin.group` shortcut decorators are used instead. + + Parameters + ----------- + command: :class:`Command` + The command to add. + + Raises + ------- + :exc:`.ClientException` + If the command is already registered. + TypeError + If the command passed is not a subclass of :class:`.Command`. + """ + + if not isinstance(command, Command): + raise TypeError('The command passed must be a subclass of Command') + + if isinstance(self, Command): + command.parent = self + + if command.name in self.all_commands: + raise discord.ClientException('Command {0.name} is already registered.'.format(command)) + + self.all_commands[command.name] = command + for alias in command.aliases: + if alias in self.all_commands: + raise discord.ClientException('The alias {} is already an existing command or alias.'.format(alias)) + self.all_commands[alias] = command + + def remove_command(self, name): + """Remove a :class:`.Command` or subclasses from the internal list + of commands. + + This could also be used as a way to remove aliases. + + Parameters + ----------- + name: :class:`str` + The name of the command to remove. + + Returns + -------- + :class:`.Command` or subclass + The command that was removed. If the name is not valid then + `None` is returned instead. + """ + command = self.all_commands.pop(name, None) + + # does not exist + if command is None: + return None + + if name in command.aliases: + # we're removing an alias so we don't want to remove the rest + return command + + # we're not removing the alias so let's delete the rest of them. + for alias in command.aliases: + self.all_commands.pop(alias, None) + return command + + def walk_commands(self): + """An iterator that recursively walks through all commands and subcommands.""" + for command in tuple(self.all_commands.values()): + yield command + if isinstance(command, GroupMixin): + yield from command.walk_commands() + + def get_command(self, name): + """Get a :class:`.Command` or subclasses from the internal list + of commands. + + This could also be used as a way to get aliases. + + The name could be fully qualified (e.g. ``'foo bar'``) will get + the subcommand ``bar`` of the group command ``foo``. If a + subcommand is not found then ``None`` is returned just as usual. + + Parameters + ----------- + name: :class:`str` + The name of the command to get. + + Returns + -------- + :class:`Command` or subclass + The command that was requested. If not found, returns ``None``. + """ + + # fast path, no space in name. + if ' ' not in name: + return self.all_commands.get(name) + + names = name.split() + obj = self.all_commands.get(names[0]) + if not isinstance(obj, GroupMixin): + return obj + + for name in names[1:]: + try: + obj = obj.all_commands[name] + except (AttributeError, KeyError): + return None + + return obj + + def command(self, *args, **kwargs): + """A shortcut decorator that invokes :func:`.command` and adds it to + the internal command list via :meth:`~.GroupMixin.add_command`. + """ + def decorator(func): + kwargs.setdefault('parent', self) + result = command(*args, **kwargs)(func) + self.add_command(result) + return result + + return decorator + + def group(self, *args, **kwargs): + """A shortcut decorator that invokes :func:`.group` and adds it to + the internal command list via :meth:`~.GroupMixin.add_command`. + """ + def decorator(func): + kwargs.setdefault('parent', self) + result = group(*args, **kwargs)(func) + self.add_command(result) + return result + + return decorator + +class Group(GroupMixin, Command): + """A class that implements a grouping protocol for commands to be + executed as subcommands. + + This class is a subclass of :class:`.Command` and thus all options + valid in :class:`.Command` are valid in here as well. + + Attributes + ----------- + invoke_without_command: Optional[:class:`bool`] + Indicates if the group callback should begin parsing and + invocation only if no subcommand was found. Useful for + making it an error handling function to tell the user that + no subcommand was found or to have different functionality + in case no subcommand was found. If this is ``False``, then + the group callback will always be invoked first. This means + that the checks and the parsing dictated by its parameters + will be executed. Defaults to ``False``. + case_insensitive: Optional[:class:`bool`] + Indicates if the group's commands should be case insensitive. + Defaults to ``False``. + """ + def __init__(self, *args, **attrs): + self.invoke_without_command = attrs.pop('invoke_without_command', False) + super().__init__(*args, **attrs) + + def copy(self): + """Creates a copy of this :class:`Group`.""" + ret = super().copy() + for cmd in self.commands: + ret.add_command(cmd.copy()) + return ret + + async def invoke(self, ctx): + ctx.invoked_subcommand = None + early_invoke = not self.invoke_without_command + if early_invoke: + await self.prepare(ctx) + + view = ctx.view + previous = view.index + view.skip_ws() + trigger = view.get_word() + + if trigger: + ctx.subcommand_passed = trigger + ctx.invoked_subcommand = self.all_commands.get(trigger, None) + + if early_invoke: + injected = hooked_wrapped_callback(self, ctx, self.callback) + await injected(*ctx.args, **ctx.kwargs) + + if trigger and ctx.invoked_subcommand: + ctx.invoked_with = trigger + await ctx.invoked_subcommand.invoke(ctx) + elif not early_invoke: + # undo the trigger parsing + view.index = previous + view.previous = previous + await super().invoke(ctx) + + async def reinvoke(self, ctx, *, call_hooks=False): + ctx.invoked_subcommand = None + early_invoke = not self.invoke_without_command + if early_invoke: + ctx.command = self + await self._parse_arguments(ctx) + + if call_hooks: + await self.call_before_hooks(ctx) + + view = ctx.view + previous = view.index + view.skip_ws() + trigger = view.get_word() + + if trigger: + ctx.subcommand_passed = trigger + ctx.invoked_subcommand = self.all_commands.get(trigger, None) + + if early_invoke: + try: + await self.callback(*ctx.args, **ctx.kwargs) + except: + ctx.command_failed = True + raise + finally: + if call_hooks: + await self.call_after_hooks(ctx) + + if trigger and ctx.invoked_subcommand: + ctx.invoked_with = trigger + await ctx.invoked_subcommand.reinvoke(ctx, call_hooks=call_hooks) + elif not early_invoke: + # undo the trigger parsing + view.index = previous + view.previous = previous + await super().reinvoke(ctx, call_hooks=call_hooks) + +# Decorators + +def command(name=None, cls=None, **attrs): + """A decorator that transforms a function into a :class:`.Command` + or if called with :func:`.group`, :class:`.Group`. + + By default the ``help`` attribute is received automatically from the + docstring of the function and is cleaned up with the use of + ``inspect.cleandoc``. If the docstring is ``bytes``, then it is decoded + into :class:`str` using utf-8 encoding. + + All checks added using the :func:`.check` & co. decorators are added into + the function. There is no way to supply your own checks through this + decorator. + + Parameters + ----------- + name: :class:`str` + The name to create the command with. By default this uses the + function name unchanged. + cls + The class to construct with. By default this is :class:`.Command`. + You usually do not change this. + attrs + Keyword arguments to pass into the construction of the class denoted + by ``cls``. + + Raises + ------- + TypeError + If the function is not a coroutine or is already a command. + """ + if cls is None: + cls = Command + + def decorator(func): + if isinstance(func, Command): + raise TypeError('Callback is already a command.') + return cls(func, name=name, **attrs) + + return decorator + +def group(name=None, **attrs): + """A decorator that transforms a function into a :class:`.Group`. + + This is similar to the :func:`.command` decorator but the ``cls`` + parameter is set to :class:`Group` by default. + + .. versionchanged:: 1.1.0 + The ``cls`` parameter can now be passed. + """ + + attrs.setdefault('cls', Group) + return command(name=name, **attrs) + +def check(predicate): + r"""A decorator that adds a check to the :class:`.Command` or its + subclasses. These checks could be accessed via :attr:`.Command.checks`. + + These checks should be predicates that take in a single parameter taking + a :class:`.Context`. If the check returns a ``False``\-like value then + during invocation a :exc:`.CheckFailure` exception is raised and sent to + the :func:`.on_command_error` event. + + If an exception should be thrown in the predicate then it should be a + subclass of :exc:`.CommandError`. Any exception not subclassed from it + will be propagated while those subclassed will be sent to + :func:`.on_command_error`. + + .. note:: + + These functions can either be regular functions or coroutines. + + Examples + --------- + + Creating a basic check to see if the command invoker is you. + + .. code-block:: python3 + + def check_if_it_is_me(ctx): + return ctx.message.author.id == 85309593344815104 + + @bot.command() + @commands.check(check_if_it_is_me) + async def only_for_me(ctx): + await ctx.send('I know you!') + + Transforming common checks into its own decorator: + + .. code-block:: python3 + + def is_me(): + def predicate(ctx): + return ctx.message.author.id == 85309593344815104 + return commands.check(predicate) + + @bot.command() + @is_me() + async def only_me(ctx): + await ctx.send('Only you!') + + Parameters + ----------- + predicate: Callable[[:class:`Context`], :class:`bool`] + The predicate to check if the command should be invoked. + """ + + def decorator(func): + if isinstance(func, Command): + func.checks.append(predicate) + else: + if not hasattr(func, '__commands_checks__'): + func.__commands_checks__ = [] + + func.__commands_checks__.append(predicate) + + return func + return decorator + +def has_role(item): + """A :func:`.check` that is added that checks if the member invoking the + command has the role specified via the name or ID specified. + + If a string is specified, you must give the exact name of the role, including + caps and spelling. + + If an integer is specified, you must give the exact snowflake ID of the role. + + If the message is invoked in a private message context then the check will + return ``False``. + + This check raises one of two special exceptions, :exc:`.MissingRole` if the user + is missing a role, or :exc:`.NoPrivateMessage` if it is used in a private message. + Both inherit from :exc:`.CheckFailure`. + + .. versionchanged:: 1.1.0 + + Raise :exc:`.MissingRole` or :exc:`.NoPrivateMessage` + instead of generic :exc:`.CheckFailure` + + Parameters + ----------- + item: Union[:class:`int`, :class:`str`] + The name or ID of the role to check. + """ + + def predicate(ctx): + if not isinstance(ctx.channel, discord.abc.GuildChannel): + raise NoPrivateMessage() + + if isinstance(item, int): + role = discord.utils.get(ctx.author.roles, id=item) + else: + role = discord.utils.get(ctx.author.roles, name=item) + if role is None: + raise MissingRole(item) + return True + + return check(predicate) + +def has_any_role(*items): + r"""A :func:`.check` that is added that checks if the member invoking the + command has **any** of the roles specified. This means that if they have + one out of the three roles specified, then this check will return `True`. + + Similar to :func:`.has_role`\, the names or IDs passed in must be exact. + + This check raises one of two special exceptions, :exc:`.MissingAnyRole` if the user + is missing all roles, or :exc:`.NoPrivateMessage` if it is used in a private message. + Both inherit from :exc:`.CheckFailure`. + + .. versionchanged:: 1.1.0 + + Raise :exc:`.MissingAnyRole` or :exc:`.NoPrivateMessage` + instead of generic :exc:`.CheckFailure` + + Parameters + ----------- + items: List[Union[:class:`str`, :class:`int`]] + An argument list of names or IDs to check that the member has roles wise. + + Example + -------- + + .. code-block:: python3 + + @bot.command() + @commands.has_any_role('Library Devs', 'Moderators', 492212595072434186) + async def cool(ctx): + await ctx.send('You are cool indeed') + """ + def predicate(ctx): + if not isinstance(ctx.channel, discord.abc.GuildChannel): + raise NoPrivateMessage() + + getter = functools.partial(discord.utils.get, ctx.author.roles) + if any(getter(id=item) is not None if isinstance(item, int) else getter(name=item) is not None for item in items): + return True + raise MissingAnyRole(items) + + return check(predicate) + +def bot_has_role(item): + """Similar to :func:`.has_role` except checks if the bot itself has the + role. + + This check raises one of two special exceptions, :exc:`.BotMissingRole` if the bot + is missing the role, or :exc:`.NoPrivateMessage` if it is used in a private message. + Both inherit from :exc:`.CheckFailure`. + + .. versionchanged:: 1.1.0 + + Raise :exc:`.BotMissingRole` or :exc:`.NoPrivateMessage` + instead of generic :exc:`.CheckFailure` + """ + + def predicate(ctx): + ch = ctx.channel + if not isinstance(ch, discord.abc.GuildChannel): + raise NoPrivateMessage() + + me = ch.guild.me + if isinstance(item, int): + role = discord.utils.get(me.roles, id=item) + else: + role = discord.utils.get(me.roles, name=item) + if role is None: + raise BotMissingRole(item) + return True + return check(predicate) + +def bot_has_any_role(*items): + """Similar to :func:`.has_any_role` except checks if the bot itself has + any of the roles listed. + + This check raises one of two special exceptions, :exc:`.BotMissingAnyRole` if the bot + is missing all roles, or :exc:`.NoPrivateMessage` if it is used in a private message. + Both inherit from :exc:`.CheckFailure`. + + .. versionchanged:: 1.1.0 + + Raise :exc:`.BotMissingAnyRole` or :exc:`.NoPrivateMessage` + instead of generic checkfailure + """ + def predicate(ctx): + ch = ctx.channel + if not isinstance(ch, discord.abc.GuildChannel): + raise NoPrivateMessage() + + me = ch.guild.me + getter = functools.partial(discord.utils.get, me.roles) + if any(getter(id=item) is not None if isinstance(item, int) else getter(name=item) is not None for item in items): + return True + raise BotMissingAnyRole(items) + return check(predicate) + +def has_permissions(**perms): + """A :func:`.check` that is added that checks if the member has all of + the permissions necessary. + + The permissions passed in must be exactly like the properties shown under + :class:`.discord.Permissions`. + + This check raises a special exception, :exc:`.MissingPermissions` + that is inherited from :exc:`.CheckFailure`. + + Parameters + ------------ + perms + An argument list of permissions to check for. + + Example + --------- + + .. code-block:: python3 + + @bot.command() + @commands.has_permissions(manage_messages=True) + async def test(ctx): + await ctx.send('You can manage messages.') + + """ + def predicate(ctx): + ch = ctx.channel + permissions = ch.permissions_for(ctx.author) + + missing = [perm for perm, value in perms.items() if getattr(permissions, perm, None) != value] + + if not missing: + return True + + raise MissingPermissions(missing) + + return check(predicate) + +def bot_has_permissions(**perms): + """Similar to :func:`.has_permissions` except checks if the bot itself has + the permissions listed. + + This check raises a special exception, :exc:`.BotMissingPermissions` + that is inherited from :exc:`.CheckFailure`. + """ + def predicate(ctx): + guild = ctx.guild + me = guild.me if guild is not None else ctx.bot.user + permissions = ctx.channel.permissions_for(me) + + missing = [perm for perm, value in perms.items() if getattr(permissions, perm, None) != value] + + if not missing: + return True + + raise BotMissingPermissions(missing) + + return check(predicate) + +def dm_only(): + """A :func:`.check` that indicates this command must only be used in a + DM context. Only private messages are allowed when + using the command. + + This check raises a special exception, :exc:`.PrivateMessageOnly` + that is inherited from :exc:`.CheckFailure`. + + .. versionadded:: 1.1.0 + """ + + def predicate(ctx): + if ctx.guild is not None: + raise PrivateMessageOnly() + return True + + return check(predicate) + +def guild_only(): + """A :func:`.check` that indicates this command must only be used in a + guild context only. Basically, no private messages are allowed when + using the command. + + This check raises a special exception, :exc:`.NoPrivateMessage` + that is inherited from :exc:`.CheckFailure`. + """ + + def predicate(ctx): + if ctx.guild is None: + raise NoPrivateMessage() + return True + + return check(predicate) + +def is_owner(): + """A :func:`.check` that checks if the person invoking this command is the + owner of the bot. + + This is powered by :meth:`.Bot.is_owner`. + + This check raises a special exception, :exc:`.NotOwner` that is derived + from :exc:`.CheckFailure`. + """ + + async def predicate(ctx): + if not await ctx.bot.is_owner(ctx.author): + raise NotOwner('You do not own this bot.') + return True + + return check(predicate) + +def is_nsfw(): + """A :func:`.check` that checks if the channel is a NSFW channel. + + This check raises a special exception, :exc:`.NSFWChannelRequired` + that is derived from :exc:`.CheckFailure`. + + .. versionchanged:: 1.1.0 + + Raise :exc:`.NSFWChannelRequired` instead of generic :exc:`.CheckFailure`. + DM channels will also now pass this check. + """ + def pred(ctx): + ch = ctx.channel + if ctx.guild is None or (isinstance(ch, discord.TextChannel) and ch.is_nsfw()): + return True + raise NSFWChannelRequired(ch) + return check(pred) + +def cooldown(rate, per, type=BucketType.default): + """A decorator that adds a cooldown to a :class:`.Command` + or its subclasses. + + A cooldown allows a command to only be used a specific amount + of times in a specific time frame. These cooldowns can be based + either on a per-guild, per-channel, per-user, or global basis. + Denoted by the third argument of ``type`` which must be of enum + type ``BucketType`` which could be either: + + - ``BucketType.default`` for a global basis. + - ``BucketType.user`` for a per-user basis. + - ``BucketType.guild`` for a per-guild basis. + - ``BucketType.channel`` for a per-channel basis. + - ``BucketType.member`` for a per-member basis. + - ``BucketType.category`` for a per-category basis. + + If a cooldown is triggered, then :exc:`.CommandOnCooldown` is triggered in + :func:`.on_command_error` and the local error handler. + + A command can only have a single cooldown. + + Parameters + ------------ + rate: :class:`int` + The number of times a command can be used before triggering a cooldown. + per: :class:`float` + The amount of seconds to wait for a cooldown when it's been triggered. + type: ``BucketType`` + The type of cooldown to have. + """ + + def decorator(func): + if isinstance(func, Command): + func._buckets = CooldownMapping(Cooldown(rate, per, type)) + else: + func.__commands_cooldown__ = Cooldown(rate, per, type) + return func + return decorator diff --git a/venv/lib/python3.7/site-packages/discord/ext/commands/errors.py b/venv/lib/python3.7/site-packages/discord/ext/commands/errors.py new file mode 100644 index 0000000..fdbd903 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/ext/commands/errors.py @@ -0,0 +1,539 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from discord.errors import DiscordException + + +__all__ = ( + 'CommandError', + 'MissingRequiredArgument', + 'BadArgument', + 'PrivateMessageOnly', + 'NoPrivateMessage', + 'CheckFailure', + 'CommandNotFound', + 'DisabledCommand', + 'CommandInvokeError', + 'TooManyArguments', + 'UserInputError', + 'CommandOnCooldown', + 'NotOwner', + 'MissingRole', + 'BotMissingRole', + 'MissingAnyRole', + 'BotMissingAnyRole', + 'MissingPermissions', + 'BotMissingPermissions', + 'NSFWChannelRequired', + 'ConversionError', + 'BadUnionArgument', + 'ArgumentParsingError', + 'UnexpectedQuoteError', + 'InvalidEndOfQuotedStringError', + 'ExpectedClosingQuoteError', + 'ExtensionError', + 'ExtensionAlreadyLoaded', + 'ExtensionNotLoaded', + 'NoEntryPointError', + 'ExtensionFailed', + 'ExtensionNotFound', +) + +class CommandError(DiscordException): + r"""The base exception type for all command related errors. + + This inherits from :exc:`discord.DiscordException`. + + This exception and exceptions inherited from it are handled + in a special way as they are caught and passed into a special event + from :class:`.Bot`\, :func:`on_command_error`. + """ + def __init__(self, message=None, *args): + if message is not None: + # clean-up @everyone and @here mentions + m = message.replace('@everyone', '@\u200beveryone').replace('@here', '@\u200bhere') + super().__init__(m, *args) + else: + super().__init__(*args) + +class ConversionError(CommandError): + """Exception raised when a Converter class raises non-CommandError. + + This inherits from :exc:`CommandError`. + + Attributes + ---------- + converter: :class:`discord.ext.commands.Converter` + The converter that failed. + original + The original exception that was raised. You can also get this via + the ``__cause__`` attribute. + """ + def __init__(self, converter, original): + self.converter = converter + self.original = original + +class UserInputError(CommandError): + """The base exception type for errors that involve errors + regarding user input. + + This inherits from :exc:`CommandError`. + """ + pass + +class CommandNotFound(CommandError): + """Exception raised when a command is attempted to be invoked + but no command under that name is found. + + This is not raised for invalid subcommands, rather just the + initial main command that is attempted to be invoked. + + This inherits from :exc:`CommandError`. + """ + pass + +class MissingRequiredArgument(UserInputError): + """Exception raised when parsing a command and a parameter + that is required is not encountered. + + This inherits from :exc:`UserInputError` + + Attributes + ----------- + param: :class:`inspect.Parameter` + The argument that is missing. + """ + def __init__(self, param): + self.param = param + super().__init__('{0.name} is a required argument that is missing.'.format(param)) + +class TooManyArguments(UserInputError): + """Exception raised when the command was passed too many arguments and its + :attr:`.Command.ignore_extra` attribute was not set to ``True``. + + This inherits from :exc:`UserInputError` + """ + pass + +class BadArgument(UserInputError): + """Exception raised when a parsing or conversion failure is encountered + on an argument to pass into a command. + + This inherits from :exc:`UserInputError` + """ + pass + +class CheckFailure(CommandError): + """Exception raised when the predicates in :attr:`.Command.checks` have failed. + + This inherits from :exc:`CommandError` + """ + pass + +class PrivateMessageOnly(CheckFailure): + """Exception raised when an operation does not work outside of private + message contexts. + + This inherits from :exc:`CheckFailure` + """ + def __init__(self, message=None): + super().__init__(message or 'This command can only be used in private messages.') + +class NoPrivateMessage(CheckFailure): + """Exception raised when an operation does not work in private message + contexts. + + This inherits from :exc:`CheckFailure` + """ + + def __init__(self, message=None): + super().__init__(message or 'This command cannot be used in private messages.') + +class NotOwner(CheckFailure): + """Exception raised when the message author is not the owner of the bot. + + This inherits from :exc:`CheckFailure` + """ + pass + +class DisabledCommand(CommandError): + """Exception raised when the command being invoked is disabled. + + This inherits from :exc:`CommandError` + """ + pass + +class CommandInvokeError(CommandError): + """Exception raised when the command being invoked raised an exception. + + This inherits from :exc:`CommandError` + + Attributes + ----------- + original + The original exception that was raised. You can also get this via + the ``__cause__`` attribute. + """ + def __init__(self, e): + self.original = e + super().__init__('Command raised an exception: {0.__class__.__name__}: {0}'.format(e)) + +class CommandOnCooldown(CommandError): + """Exception raised when the command being invoked is on cooldown. + + This inherits from :exc:`CommandError` + + Attributes + ----------- + cooldown: Cooldown + A class with attributes ``rate``, ``per``, and ``type`` similar to + the :func:`.cooldown` decorator. + retry_after: :class:`float` + The amount of seconds to wait before you can retry again. + """ + def __init__(self, cooldown, retry_after): + self.cooldown = cooldown + self.retry_after = retry_after + super().__init__('You are on cooldown. Try again in {:.2f}s'.format(retry_after)) + +class MissingRole(CheckFailure): + """Exception raised when the command invoker lacks a role to run a command. + + This inherits from :exc:`CheckFailure` + + .. versionadded:: 1.1.0 + + Attributes + ----------- + missing_role: Union[:class:`str`, :class:`int`] + The required role that is missing. + This is the parameter passed to :func:`~.commands.has_role`. + """ + def __init__(self, missing_role): + self.missing_role = missing_role + message = 'Role {0!r} is required to run this command.'.format(missing_role) + super().__init__(message) + +class BotMissingRole(CheckFailure): + """Exception raised when the bot's member lacks a role to run a command. + + This inherits from :exc:`CheckFailure` + + .. versionadded:: 1.1.0 + + Attributes + ----------- + missing_role: Union[:class:`str`, :class:`int`] + The required role that is missing. + This is the parameter passed to :func:`~.commands.has_role`. + """ + def __init__(self, missing_role): + self.missing_role = missing_role + message = 'Bot requires the role {0!r} to run this command'.format(missing_role) + super().__init__(message) + +class MissingAnyRole(CheckFailure): + """Exception raised when the command invoker lacks any of + the roles specified to run a command. + + This inherits from :exc:`CheckFailure` + + .. versionadded:: 1.1.0 + + Attributes + ----------- + missing_roles: List[Union[:class:`str`, :class:`int`]] + The roles that the invoker is missing. + These are the parameters passed to :func:`~.commands.has_any_role`. + """ + def __init__(self, missing_roles): + self.missing_roles = missing_roles + + missing = ["'{}'".format(role) for role in missing_roles] + + if len(missing) > 2: + fmt = '{}, or {}'.format(", ".join(missing[:-1]), missing[-1]) + else: + fmt = ' or '.join(missing) + + message = "You are missing at least one of the required roles: {}".format(fmt) + super().__init__(message) + + +class BotMissingAnyRole(CheckFailure): + """Exception raised when the bot's member lacks any of + the roles specified to run a command. + + This inherits from :exc:`CheckFailure` + + .. versionadded:: 1.1.0 + + Attributes + ----------- + missing_roles: List[Union[:class:`str`, :class:`int`]] + The roles that the bot's member is missing. + These are the parameters passed to :func:`~.commands.has_any_role`. + + """ + def __init__(self, missing_roles): + self.missing_roles = missing_roles + + missing = ["'{}'".format(role) for role in missing_roles] + + if len(missing) > 2: + fmt = '{}, or {}'.format(", ".join(missing[:-1]), missing[-1]) + else: + fmt = ' or '.join(missing) + + message = "Bot is missing at least one of the required roles: {}".format(fmt) + super().__init__(message) + +class NSFWChannelRequired(CheckFailure): + """Exception raised when a channel does not have the required NSFW setting. + + This inherits from :exc:`CheckFailure`. + + .. versionadded:: 1.1.0 + + Parameters + ----------- + channel: :class:`discord.abc.GuildChannel` + The channel that does not have NSFW enabled. + """ + def __init__(self, channel): + self.channel = channel + super().__init__("Channel '{}' needs to be NSFW for this command to work.".format(channel)) + +class MissingPermissions(CheckFailure): + """Exception raised when the command invoker lacks permissions to run a + command. + + This inherits from :exc:`CheckFailure` + + Attributes + ----------- + missing_perms: :class:`list` + The required permissions that are missing. + """ + def __init__(self, missing_perms, *args): + self.missing_perms = missing_perms + + missing = [perm.replace('_', ' ').replace('guild', 'server').title() for perm in missing_perms] + + if len(missing) > 2: + fmt = '{}, and {}'.format(", ".join(missing[:-1]), missing[-1]) + else: + fmt = ' and '.join(missing) + message = 'You are missing {} permission(s) to run this command.'.format(fmt) + super().__init__(message, *args) + +class BotMissingPermissions(CheckFailure): + """Exception raised when the bot's member lacks permissions to run a + command. + + This inherits from :exc:`CheckFailure` + + Attributes + ----------- + missing_perms: :class:`list` + The required permissions that are missing. + """ + def __init__(self, missing_perms, *args): + self.missing_perms = missing_perms + + missing = [perm.replace('_', ' ').replace('guild', 'server').title() for perm in missing_perms] + + if len(missing) > 2: + fmt = '{}, and {}'.format(", ".join(missing[:-1]), missing[-1]) + else: + fmt = ' and '.join(missing) + message = 'Bot requires {} permission(s) to run this command.'.format(fmt) + super().__init__(message, *args) + +class BadUnionArgument(UserInputError): + """Exception raised when a :data:`typing.Union` converter fails for all + its associated types. + + This inherits from :exc:`UserInputError` + + Attributes + ----------- + param: :class:`inspect.Parameter` + The parameter that failed being converted. + converters: Tuple[Type, ...] + A tuple of converters attempted in conversion, in order of failure. + errors: List[:class:`CommandError`] + A list of errors that were caught from failing the conversion. + """ + def __init__(self, param, converters, errors): + self.param = param + self.converters = converters + self.errors = errors + + def _get_name(x): + try: + return x.__name__ + except AttributeError: + return x.__class__.__name__ + + to_string = [_get_name(x) for x in converters] + if len(to_string) > 2: + fmt = '{}, or {}'.format(', '.join(to_string[:-1]), to_string[-1]) + else: + fmt = ' or '.join(to_string) + + super().__init__('Could not convert "{0.name}" into {1}.'.format(param, fmt)) + +class ArgumentParsingError(UserInputError): + """An exception raised when the parser fails to parse a user's input. + + This inherits from :exc:`UserInputError`. + + There are child classes that implement more granular parsing errors for + i18n purposes. + """ + pass + +class UnexpectedQuoteError(ArgumentParsingError): + """An exception raised when the parser encounters a quote mark inside a non-quoted string. + + This inherits from :exc:`ArgumentParsingError`. + + Attributes + ------------ + quote: :class:`str` + The quote mark that was found inside the non-quoted string. + """ + def __init__(self, quote): + self.quote = quote + super().__init__('Unexpected quote mark, {0!r}, in non-quoted string'.format(quote)) + +class InvalidEndOfQuotedStringError(ArgumentParsingError): + """An exception raised when a space is expected after the closing quote in a string + but a different character is found. + + This inherits from :exc:`ArgumentParsingError`. + + Attributes + ----------- + char: :class:`str` + The character found instead of the expected string. + """ + def __init__(self, char): + self.char = char + super().__init__('Expected space after closing quotation but received {0!r}'.format(char)) + +class ExpectedClosingQuoteError(ArgumentParsingError): + """An exception raised when a quote character is expected but not found. + + This inherits from :exc:`ArgumentParsingError`. + + Attributes + ----------- + close_quote: :class:`str` + The quote character expected. + """ + + def __init__(self, close_quote): + self.close_quote = close_quote + super().__init__('Expected closing {}.'.format(close_quote)) + +class ExtensionError(DiscordException): + """Base exception for extension related errors. + + This inherits from :exc:`~discord.DiscordException`. + + Attributes + ------------ + name: :class:`str` + The extension that had an error. + """ + def __init__(self, message=None, *args, name): + self.name = name + message = message or 'Extension {!r} had an error.'.format(name) + # clean-up @everyone and @here mentions + m = message.replace('@everyone', '@\u200beveryone').replace('@here', '@\u200bhere') + super().__init__(m, *args) + +class ExtensionAlreadyLoaded(ExtensionError): + """An exception raised when an extension has already been loaded. + + This inherits from :exc:`ExtensionError` + """ + def __init__(self, name): + super().__init__('Extension {!r} is already loaded.'.format(name), name=name) + +class ExtensionNotLoaded(ExtensionError): + """An exception raised when an extension was not loaded. + + This inherits from :exc:`ExtensionError` + """ + def __init__(self, name): + super().__init__('Extension {!r} has not been loaded.'.format(name), name=name) + +class NoEntryPointError(ExtensionError): + """An exception raised when an extension does not have a ``setup`` entry point function. + + This inherits from :exc:`ExtensionError` + """ + def __init__(self, name): + super().__init__("Extension {!r} has no 'setup' function.".format(name), name=name) + +class ExtensionFailed(ExtensionError): + """An exception raised when an extension failed to load during execution of the ``setup`` entry point. + + This inherits from :exc:`ExtensionError` + + Attributes + ----------- + name: :class:`str` + The extension that had the error. + original: :exc:`Exception` + The original exception that was raised. You can also get this via + the ``__cause__`` attribute. + """ + def __init__(self, name, original): + self.original = original + fmt = 'Extension {0!r} raised an error: {1.__class__.__name__}: {1}' + super().__init__(fmt.format(name, original), name=name) + +class ExtensionNotFound(ExtensionError): + """An exception raised when an extension failed to be imported. + + This inherits from :exc:`ExtensionError` + + Attributes + ----------- + name: :class:`str` + The extension that had the error. + original: :exc:`ImportError` + The original exception that was raised. You can also get this via + the ``__cause__`` attribute. + """ + def __init__(self, name, original): + self.original = original + fmt = 'Extension {0!r} could not be loaded.' + super().__init__(fmt.format(name), name=name) diff --git a/venv/lib/python3.7/site-packages/discord/ext/commands/help.py b/venv/lib/python3.7/site-packages/discord/ext/commands/help.py new file mode 100644 index 0000000..172217c --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/ext/commands/help.py @@ -0,0 +1,1264 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import itertools +import copy +import functools +import inspect +import re +import discord.utils + +from .core import Group, Command +from .errors import CommandError + +__all__ = ( + 'Paginator', + 'HelpCommand', + 'DefaultHelpCommand', + 'MinimalHelpCommand', +) + +# help -> shows info of bot on top/bottom and lists subcommands +# help command -> shows detailed info of command +# help command -> same as above + +# + +# + +# + +# Cog: +# +# +# Other Cog: +# +# No Category: +# + +# Type help command for more info on a command. +# You can also type help category for more info on a category. + +class Paginator: + """A class that aids in paginating code blocks for Discord messages. + + .. container:: operations + + .. describe:: len(x) + + Returns the total number of characters in the paginator. + + Attributes + ----------- + prefix: Optional[:class:`str`] + The prefix inserted to every page. e.g. three backticks. + suffix: Optional[:class:`str`] + The suffix appended at the end of every page. e.g. three backticks. + max_size: :class:`int` + The maximum amount of codepoints allowed in a page. + """ + def __init__(self, prefix='```', suffix='```', max_size=2000): + self.prefix = prefix + self.suffix = suffix + self.max_size = max_size - (0 if suffix is None else len(suffix)) + self.clear() + + def clear(self): + """Clears the paginator to have no pages.""" + if self.prefix is not None: + self._current_page = [self.prefix] + self._count = len(self.prefix) + 1 # prefix + newline + else: + self._current_page = [] + self._count = 0 + self._pages = [] + + @property + def _prefix_len(self): + return len(self.prefix) if self.prefix else 0 + + def add_line(self, line='', *, empty=False): + """Adds a line to the current page. + + If the line exceeds the :attr:`max_size` then an exception + is raised. + + Parameters + ----------- + line: :class:`str` + The line to add. + empty: :class:`bool` + Indicates if another empty line should be added. + + Raises + ------ + RuntimeError + The line was too big for the current :attr:`max_size`. + """ + max_page_size = self.max_size - self._prefix_len - 2 + if len(line) > max_page_size: + raise RuntimeError('Line exceeds maximum page size %s' % (max_page_size)) + + if self._count + len(line) + 1 > self.max_size: + self.close_page() + + self._count += len(line) + 1 + self._current_page.append(line) + + if empty: + self._current_page.append('') + self._count += 1 + + def close_page(self): + """Prematurely terminate a page.""" + if self.suffix is not None: + self._current_page.append(self.suffix) + self._pages.append('\n'.join(self._current_page)) + + if self.prefix is not None: + self._current_page = [self.prefix] + self._count = len(self.prefix) + 1 # prefix + newline + else: + self._current_page = [] + self._count = 0 + + def __len__(self): + total = sum(len(p) for p in self._pages) + return total + self._count + + @property + def pages(self): + """Returns the rendered list of pages.""" + # we have more than just the prefix in our current page + if len(self._current_page) > 1: + self.close_page() + return self._pages + + def __repr__(self): + fmt = '' + return fmt.format(self) + +def _not_overriden(f): + f.__help_command_not_overriden__ = True + return f + +class _HelpCommandImpl(Command): + def __init__(self, inject, *args, **kwargs): + super().__init__(inject.command_callback, *args, **kwargs) + self._original = inject + self._injected = inject + + async def prepare(self, ctx): + self._injected = injected = self._original.copy() + injected.context = ctx + self.callback = injected.command_callback + + on_error = injected.on_help_command_error + if not hasattr(on_error, '__help_command_not_overriden__'): + if self.cog is not None: + self.on_error = self._on_error_cog_implementation + else: + self.on_error = on_error + + await super().prepare(ctx) + + async def _parse_arguments(self, ctx): + # Make the parser think we don't have a cog so it doesn't + # inject the parameter into `ctx.args`. + original_cog = self.cog + self.cog = None + try: + await super()._parse_arguments(ctx) + finally: + self.cog = original_cog + + async def _on_error_cog_implementation(self, dummy, ctx, error): + await self._injected.on_help_command_error(ctx, error) + + @property + def clean_params(self): + result = self.params.copy() + try: + result.popitem(last=False) + except Exception: + raise ValueError('Missing context parameter') from None + else: + return result + + def _inject_into_cog(self, cog): + # Warning: hacky + + # Make the cog think that get_commands returns this command + # as well if we inject it without modifying __cog_commands__ + # since that's used for the injection and ejection of cogs. + def wrapped_get_commands(*, _original=cog.get_commands): + ret = _original() + ret.append(self) + return ret + + # Ditto here + def wrapped_walk_commands(*, _original=cog.walk_commands): + yield from _original() + yield self + + functools.update_wrapper(wrapped_get_commands, cog.get_commands) + functools.update_wrapper(wrapped_walk_commands, cog.walk_commands) + cog.get_commands = wrapped_get_commands + cog.walk_commands = wrapped_walk_commands + self.cog = cog + + def _eject_cog(self): + if self.cog is None: + return + + # revert back into their original methods + cog = self.cog + cog.get_commands = cog.get_commands.__wrapped__ + cog.walk_commands = cog.walk_commands.__wrapped__ + self.cog = None + +class HelpCommand: + r"""The base implementation for help command formatting. + + .. note:: + + Internally instances of this class are deep copied every time + the command itself is invoked to prevent a race condition + mentioned in :issue:`2123`. + + This means that relying on the state of this class to be + the same between command invocations would not work as expected. + + Attributes + ------------ + context: Optional[:class:`Context`] + The context that invoked this help formatter. This is generally set after + the help command assigned, :func:`command_callback`\, has been called. + show_hidden: :class:`bool` + Specifies if hidden commands should be shown in the output. + Defaults to ``False``. + verify_checks: :class:`bool` + Specifies if commands should have their :attr:`.Command.checks` called + and verified. Defaults to ``True``. + command_attrs: :class:`dict` + A dictionary of options to pass in for the construction of the help command. + This allows you to change the command behaviour without actually changing + the implementation of the command. The attributes will be the same as the + ones passed in the :class:`.Command` constructor. + """ + + MENTION_TRANSFORMS = { + '@everyone': '@\u200beveryone', + '@here': '@\u200bhere', + r'<@!?[0-9]{17,22}>': '@deleted-user', + r'<@&[0-9]{17,22}>': '@deleted-role' + } + + MENTION_PATTERN = re.compile('|'.join(MENTION_TRANSFORMS.keys())) + + def __new__(cls, *args, **kwargs): + # To prevent race conditions of a single instance while also allowing + # for settings to be passed the original arguments passed must be assigned + # to allow for easier copies (which will be made when the help command is actually called) + # see issue 2123 + self = super().__new__(cls) + + # Shallow copies cannot be used in this case since it is not unusual to pass + # instances that need state, e.g. Paginator or what have you into the function + # The keys can be safely copied as-is since they're 99.99% certain of being + # string keys + deepcopy = copy.deepcopy + self.__original_kwargs__ = { + k: deepcopy(v) + for k, v in kwargs.items() + } + self.__original_args__ = deepcopy(args) + return self + + def __init__(self, **options): + self.show_hidden = options.pop('show_hidden', False) + self.verify_checks = options.pop('verify_checks', True) + self.command_attrs = attrs = options.pop('command_attrs', {}) + attrs.setdefault('name', 'help') + attrs.setdefault('help', 'Shows this message') + self.context = None + self._command_impl = None + + def copy(self): + obj = self.__class__(*self.__original_args__, **self.__original_kwargs__) + obj._command_impl = self._command_impl + return obj + + def _add_to_bot(self, bot): + command = _HelpCommandImpl(self, **self.command_attrs) + bot.add_command(command) + self._command_impl = command + + def _remove_from_bot(self, bot): + bot.remove_command(self._command_impl.name) + self._command_impl._eject_cog() + self._command_impl = None + + def get_bot_mapping(self): + """Retrieves the bot mapping passed to :meth:`send_bot_help`.""" + bot = self.context.bot + mapping = { + cog: cog.get_commands() + for cog in bot.cogs.values() + } + mapping[None] = [c for c in bot.all_commands.values() if c.cog is None] + return mapping + + @property + def clean_prefix(self): + """The cleaned up invoke prefix. i.e. mentions are ``@name`` instead of ``<@id>``.""" + user = self.context.guild.me if self.context.guild else self.context.bot.user + # this breaks if the prefix mention is not the bot itself but I + # consider this to be an *incredibly* strange use case. I'd rather go + # for this common use case rather than waste performance for the + # odd one. + return self.context.prefix.replace(user.mention, '@' + user.display_name) + + @property + def invoked_with(self): + """Similar to :attr:`Context.invoked_with` except properly handles + the case where :meth:`Context.send_help` is used. + + If the help command was used regularly then this returns + the :attr:`Context.invoked_with` attribute. Otherwise, if + it the help command was called using :meth:`Context.send_help` + then it returns the internal command name of the help command. + + Returns + --------- + :class:`str` + The command name that triggered this invocation. + """ + command_name = self._command_impl.name + ctx = self.context + if ctx is None or ctx.command is None or ctx.command.qualified_name != command_name: + return command_name + return ctx.invoked_with + + def get_command_signature(self, command): + """Retrieves the signature portion of the help page. + + Parameters + ------------ + command: :class:`Command` + The command to get the signature of. + + Returns + -------- + :class:`str` + The signature for the command. + """ + + parent = command.full_parent_name + if len(command.aliases) > 0: + aliases = '|'.join(command.aliases) + fmt = '[%s|%s]' % (command.name, aliases) + if parent: + fmt = parent + ' ' + fmt + alias = fmt + else: + alias = command.name if not parent else parent + ' ' + command.name + + return '%s%s %s' % (self.clean_prefix, alias, command.signature) + + def remove_mentions(self, string): + """Removes mentions from the string to prevent abuse. + + This includes ``@everyone``, ``@here``, member mentions and role mentions. + """ + + def replace(obj, *, transforms=self.MENTION_TRANSFORMS): + return transforms.get(obj.group(0), '@invalid') + + return self.MENTION_PATTERN.sub(replace, string) + + @property + def cog(self): + """A property for retrieving or setting the cog for the help command. + + When a cog is set for the help command, it is as-if the help command + belongs to that cog. All cog special methods will apply to the help + command and it will be automatically unset on unload. + + To unbind the cog from the help command, you can set it to ``None``. + + Returns + -------- + Optional[:class:`Cog`] + The cog that is currently set for the help command. + """ + return self._command_impl.cog + + @cog.setter + def cog(self, cog): + # Remove whatever cog is currently valid, if any + self._command_impl._eject_cog() + + # If a new cog is set then inject it. + if cog is not None: + self._command_impl._inject_into_cog(cog) + + def command_not_found(self, string): + """|maybecoro| + + A method called when a command is not found in the help command. + This is useful to override for i18n. + + Defaults to ``No command called {0} found.`` + + Parameters + ------------ + string: :class:`str` + The string that contains the invalid command. Note that this has + had mentions removed to prevent abuse. + + Returns + --------- + :class:`str` + The string to use when a command has not been found. + """ + return 'No command called "{}" found.'.format(string) + + def subcommand_not_found(self, command, string): + """|maybecoro| + + A method called when a command did not have a subcommand requested in the help command. + This is useful to override for i18n. + + Defaults to either: + + - ``'Command "{command.qualified_name}" has no subcommands.'`` + - If there is no subcommand in the ``command`` parameter. + - ``'Command "{command.qualified_name}" has no subcommand named {string}'`` + - If the ``command`` parameter has subcommands but not one named ``string``. + + Parameters + ------------ + command: :class:`Command` + The command that did not have the subcommand requested. + string: :class:`str` + The string that contains the invalid subcommand. Note that this has + had mentions removed to prevent abuse. + + Returns + --------- + :class:`str` + The string to use when the command did not have the subcommand requested. + """ + if isinstance(command, Group) and len(command.all_commands) > 0: + return 'Command "{0.qualified_name}" has no subcommand named {1}'.format(command, string) + return 'Command "{0.qualified_name}" has no subcommands.'.format(command) + + async def filter_commands(self, commands, *, sort=False, key=None): + """|coro| + + Returns a filtered list of commands and optionally sorts them. + + This takes into account the :attr:`verify_checks` and :attr:`show_hidden` + attributes. + + Parameters + ------------ + commands: Iterable[:class:`Command`] + An iterable of commands that are getting filtered. + sort: :class:`bool` + Whether to sort the result. + key: Optional[Callable[:class:`Command`, Any]] + An optional key function to pass to :func:`py:sorted` that + takes a :class:`Command` as its sole parameter. If ``sort`` is + passed as ``True`` then this will default as the command name. + + Returns + --------- + List[:class:`Command`] + A list of commands that passed the filter. + """ + + if sort and key is None: + key = lambda c: c.name + + iterator = commands if self.show_hidden else filter(lambda c: not c.hidden, commands) + + if not self.verify_checks: + # if we do not need to verify the checks then we can just + # run it straight through normally without using await. + return sorted(iterator, key=key) if sort else list(iterator) + + # if we're here then we need to check every command if it can run + async def predicate(cmd): + try: + return await cmd.can_run(self.context) + except CommandError: + return False + + ret = [] + for cmd in iterator: + valid = await predicate(cmd) + if valid: + ret.append(cmd) + + if sort: + ret.sort(key=key) + return ret + + def get_max_size(self, commands): + """Returns the largest name length of the specified command list. + + Parameters + ------------ + commands: Sequence[:class:`Command`] + A sequence of commands to check for the largest size. + + Returns + -------- + :class:`int` + The maximum width of the commands. + """ + + as_lengths = ( + discord.utils._string_width(c.name) + for c in commands + ) + return max(as_lengths, default=0) + + def get_destination(self): + """Returns the :class:`~discord.abc.Messageable` where the help command will be output. + + You can override this method to customise the behaviour. + + By default this returns the context's channel. + """ + return self.context.channel + + async def send_error_message(self, error): + """|coro| + + Handles the implementation when an error happens in the help command. + For example, the result of :meth:`command_not_found` or + :meth:`command_has_no_subcommand_found` will be passed here. + + You can override this method to customise the behaviour. + + By default, this sends the error message to the destination + specified by :meth:`get_destination`. + + .. note:: + + You can access the invocation context with :attr:`HelpCommand.context`. + + Parameters + ------------ + error: :class:`str` + The error message to display to the user. Note that this has + had mentions removed to prevent abuse. + """ + destination = self.get_destination() + await destination.send(error) + + @_not_overriden + async def on_help_command_error(self, ctx, error): + """|coro| + + The help command's error handler, as specified by :ref:`ext_commands_error_handler`. + + Useful to override if you need some specific behaviour when the error handler + is called. + + By default this method does nothing and just propagates to the default + error handlers. + + Parameters + ------------ + ctx: :class:`Context` + The invocation context. + error: :class:`CommandError` + The error that was raised. + """ + pass + + async def send_bot_help(self, mapping): + """|coro| + + Handles the implementation of the bot command page in the help command. + This function is called when the help command is called with no arguments. + + It should be noted that this method does not return anything -- rather the + actual message sending should be done inside this method. Well behaved subclasses + should use :meth:`get_destination` to know where to send, as this is a customisation + point for other users. + + You can override this method to customise the behaviour. + + .. note:: + + You can access the invocation context with :attr:`HelpCommand.context`. + + Also, the commands in the mapping are not filtered. To do the filtering + you will have to call :meth:`filter_commands` yourself. + + Parameters + ------------ + mapping: Mapping[Optional[:class:`Cog`], List[:class:`Command`]] + A mapping of cogs to commands that have been requested by the user for help. + The key of the mapping is the :class:`~.commands.Cog` that the command belongs to, or + ``None`` if there isn't one, and the value is a list of commands that belongs to that cog. + """ + return None + + async def send_cog_help(self, cog): + """|coro| + + Handles the implementation of the cog page in the help command. + This function is called when the help command is called with a cog as the argument. + + It should be noted that this method does not return anything -- rather the + actual message sending should be done inside this method. Well behaved subclasses + should use :meth:`get_destination` to know where to send, as this is a customisation + point for other users. + + You can override this method to customise the behaviour. + + .. note:: + + You can access the invocation context with :attr:`HelpCommand.context`. + + To get the commands that belong to this cog see :meth:`Cog.get_commands`. + The commands returned not filtered. To do the filtering you will have to call + :meth:`filter_commands` yourself. + + Parameters + ----------- + cog: :class:`Cog` + The cog that was requested for help. + """ + return None + + async def send_group_help(self, group): + """|coro| + + Handles the implementation of the group page in the help command. + This function is called when the help command is called with a group as the argument. + + It should be noted that this method does not return anything -- rather the + actual message sending should be done inside this method. Well behaved subclasses + should use :meth:`get_destination` to know where to send, as this is a customisation + point for other users. + + You can override this method to customise the behaviour. + + .. note:: + + You can access the invocation context with :attr:`HelpCommand.context`. + + To get the commands that belong to this group without aliases see + :attr:`Group.commands`. The commands returned not filtered. To do the + filtering you will have to call :meth:`filter_commands` yourself. + + Parameters + ----------- + group: :class:`Group` + The group that was requested for help. + """ + return None + + async def send_command_help(self, command): + """|coro| + + Handles the implementation of the single command page in the help command. + + It should be noted that this method does not return anything -- rather the + actual message sending should be done inside this method. Well behaved subclasses + should use :meth:`get_destination` to know where to send, as this is a customisation + point for other users. + + You can override this method to customise the behaviour. + + .. note:: + + You can access the invocation context with :attr:`HelpCommand.context`. + + .. admonition:: Showing Help + :class: helpful + + There are certain attributes and methods that are helpful for a help command + to show such as the following: + + - :attr:`Command.help` + - :attr:`Command.brief` + - :attr:`Command.short_doc` + - :attr:`Command.description` + - :meth:`get_command_signature` + + There are more than just these attributes but feel free to play around with + these to help you get started to get the output that you want. + + Parameters + ----------- + command: :class:`Command` + The command that was requested for help. + """ + return None + + async def prepare_help_command(self, ctx, command=None): + """|coro| + + A low level method that can be used to prepare the help command + before it does anything. For example, if you need to prepare + some state in your subclass before the command does its processing + then this would be the place to do it. + + The default implementation does nothing. + + .. note:: + + This is called *inside* the help command callback body. So all + the usual rules that happen inside apply here as well. + + Parameters + ----------- + ctx: :class:`Context` + The invocation context. + command: Optional[:class:`str`] + The argument passed to the help command. + """ + pass + + async def command_callback(self, ctx, *, command=None): + """|coro| + + The actual implementation of the help command. + + It is not recommended to override this method and instead change + the behaviour through the methods that actually get dispatched. + + - :meth:`send_bot_help` + - :meth:`send_cog_help` + - :meth:`send_group_help` + - :meth:`send_command_help` + - :meth:`get_destination` + - :meth:`command_not_found` + - :meth:`subcommand_not_found` + - :meth:`send_error_message` + - :meth:`on_help_command_error` + - :meth:`prepare_help_command` + """ + await self.prepare_help_command(ctx, command) + bot = ctx.bot + + if command is None: + mapping = self.get_bot_mapping() + return await self.send_bot_help(mapping) + + # Check if it's a cog + cog = bot.get_cog(command) + if cog is not None: + return await self.send_cog_help(cog) + + maybe_coro = discord.utils.maybe_coroutine + + # If it's not a cog then it's a command. + # Since we want to have detailed errors when someone + # passes an invalid subcommand, we need to walk through + # the command group chain ourselves. + keys = command.split(' ') + cmd = bot.all_commands.get(keys[0]) + if cmd is None: + string = await maybe_coro(self.command_not_found, self.remove_mentions(keys[0])) + return await self.send_error_message(string) + + for key in keys[1:]: + try: + found = cmd.all_commands.get(key) + except AttributeError: + string = await maybe_coro(self.subcommand_not_found, cmd, self.remove_mentions(key)) + return await self.send_error_message(string) + else: + if found is None: + string = await maybe_coro(self.subcommand_not_found, cmd, self.remove_mentions(key)) + return await self.send_error_message(string) + cmd = found + + if isinstance(cmd, Group): + return await self.send_group_help(cmd) + else: + return await self.send_command_help(cmd) + +class DefaultHelpCommand(HelpCommand): + """The implementation of the default help command. + + This inherits from :class:`HelpCommand`. + + It extends it with the following attributes. + + Attributes + ------------ + width: :class:`int` + The maximum number of characters that fit in a line. + Defaults to 80. + sort_commands: :class:`bool` + Whether to sort the commands in the output alphabetically. Defaults to ``True``. + dm_help: Optional[:class:`bool`] + A tribool that indicates if the help command should DM the user instead of + sending it to the channel it received it from. If the boolean is set to + ``True``, then all help output is DM'd. If ``False``, none of the help + output is DM'd. If ``None``, then the bot will only DM when the help + message becomes too long (dictated by more than :attr:`dm_help_threshold` characters). + Defaults to ``False``. + dm_help_threshold: Optional[:class:`int`] + The number of characters the paginator must accumulate before getting DM'd to the + user if :attr:`dm_help` is set to ``None``. Defaults to 1000. + indent: :class:`int` + How much to intend the commands from a heading. Defaults to ``2``. + commands_heading: :class:`str` + The command list's heading string used when the help command is invoked with a category name. + Useful for i18n. Defaults to ``"Commands:"`` + no_category: :class:`str` + The string used when there is a command which does not belong to any category(cog). + Useful for i18n. Defaults to ``"No Category"`` + paginator: :class:`Paginator` + The paginator used to paginate the help command output. + """ + + def __init__(self, **options): + self.width = options.pop('width', 80) + self.indent = options.pop('indent', 2) + self.sort_commands = options.pop('sort_commands', True) + self.dm_help = options.pop('dm_help', False) + self.dm_help_threshold = options.pop('dm_help_threshold', 1000) + self.commands_heading = options.pop('commands_heading', "Commands:") + self.no_category = options.pop('no_category', 'No Category') + self.paginator = options.pop('paginator', None) + + if self.paginator is None: + self.paginator = Paginator() + + super().__init__(**options) + + def shorten_text(self, text): + """Shortens text to fit into the :attr:`width`.""" + if len(text) > self.width: + return text[:self.width - 3] + '...' + return text + + def get_ending_note(self): + """Returns help command's ending note. This is mainly useful to override for i18n purposes.""" + command_name = self.invoked_with + return "Type {0}{1} command for more info on a command.\n" \ + "You can also type {0}{1} category for more info on a category.".format(self.clean_prefix, command_name) + + def add_indented_commands(self, commands, *, heading, max_size=None): + """Indents a list of commands after the specified heading. + + The formatting is added to the :attr:`paginator`. + + The default implementation is the command name indented by + :attr:`indent` spaces, padded to ``max_size`` followed by + the command's :attr:`Command.short_doc` and then shortened + to fit into the :attr:`width`. + + Parameters + ----------- + commands: Sequence[:class:`Command`] + A list of commands to indent for output. + heading: :class:`str` + The heading to add to the output. This is only added + if the list of commands is greater than 0. + max_size: Optional[:class:`int`] + The max size to use for the gap between indents. + If unspecified, calls :meth:`get_max_size` on the + commands parameter. + """ + + if not commands: + return + + self.paginator.add_line(heading) + max_size = max_size or self.get_max_size(commands) + + get_width = discord.utils._string_width + for command in commands: + name = command.name + width = max_size - (get_width(name) - len(name)) + entry = '{0}{1:<{width}} {2}'.format(self.indent * ' ', name, command.short_doc, width=width) + self.paginator.add_line(self.shorten_text(entry)) + + async def send_pages(self): + """A helper utility to send the page output from :attr:`paginator` to the destination.""" + destination = self.get_destination() + for page in self.paginator.pages: + await destination.send(page) + + def add_command_formatting(self, command): + """A utility function to format the non-indented block of commands and groups. + + Parameters + ------------ + command: :class:`Command` + The command to format. + """ + + if command.description: + self.paginator.add_line(command.description, empty=True) + + signature = self.get_command_signature(command) + self.paginator.add_line(signature, empty=True) + + if command.help: + try: + self.paginator.add_line(command.help, empty=True) + except RuntimeError: + for line in command.help.splitlines(): + self.paginator.add_line(line) + self.paginator.add_line() + + def get_destination(self): + ctx = self.context + if self.dm_help is True: + return ctx.author + elif self.dm_help is None and len(self.paginator) > self.dm_help_threshold: + return ctx.author + else: + return ctx.channel + + async def prepare_help_command(self, ctx, command): + self.paginator.clear() + await super().prepare_help_command(ctx, command) + + async def send_bot_help(self, mapping): + ctx = self.context + bot = ctx.bot + + if bot.description: + # portion + self.paginator.add_line(bot.description, empty=True) + + no_category = '\u200b{0.no_category}:'.format(self) + def get_category(command, *, no_category=no_category): + cog = command.cog + return cog.qualified_name + ':' if cog is not None else no_category + + filtered = await self.filter_commands(bot.commands, sort=True, key=get_category) + max_size = self.get_max_size(filtered) + to_iterate = itertools.groupby(filtered, key=get_category) + + # Now we can add the commands to the page. + for category, commands in to_iterate: + commands = sorted(commands, key=lambda c: c.name) if self.sort_commands else list(commands) + self.add_indented_commands(commands, heading=category, max_size=max_size) + + note = self.get_ending_note() + if note: + self.paginator.add_line() + self.paginator.add_line(note) + + await self.send_pages() + + async def send_command_help(self, command): + self.add_command_formatting(command) + self.paginator.close_page() + await self.send_pages() + + async def send_group_help(self, group): + self.add_command_formatting(group) + + filtered = await self.filter_commands(group.commands, sort=self.sort_commands) + self.add_indented_commands(filtered, heading=self.commands_heading) + + if filtered: + note = self.get_ending_note() + if note: + self.paginator.add_line() + self.paginator.add_line(note) + + await self.send_pages() + + async def send_cog_help(self, cog): + if cog.description: + self.paginator.add_line(cog.description, empty=True) + + filtered = await self.filter_commands(cog.get_commands(), sort=self.sort_commands) + self.add_indented_commands(filtered, heading=self.commands_heading) + + note = self.get_ending_note() + if note: + self.paginator.add_line() + self.paginator.add_line(note) + + await self.send_pages() + +class MinimalHelpCommand(HelpCommand): + """An implementation of a help command with minimal output. + + This inherits from :class:`HelpCommand`. + + Attributes + ------------ + sort_commands: :class:`bool` + Whether to sort the commands in the output alphabetically. Defaults to ``True``. + commands_heading: :class:`str` + The command list's heading string used when the help command is invoked with a category name. + Useful for i18n. Defaults to ``"Commands"`` + aliases_heading: :class:`str` + The alias list's heading string used to list the aliases of the command. Useful for i18n. + Defaults to ``"Aliases:"``. + dm_help: Optional[:class:`bool`] + A tribool that indicates if the help command should DM the user instead of + sending it to the channel it received it from. If the boolean is set to + ``True``, then all help output is DM'd. If ``False``, none of the help + output is DM'd. If ``None``, then the bot will only DM when the help + message becomes too long (dictated by more than :attr:`dm_help_threshold` characters). + Defaults to ``False``. + dm_help_threshold: Optional[:class:`int`] + The number of characters the paginator must accumulate before getting DM'd to the + user if :attr:`dm_help` is set to ``None``. Defaults to 1000. + no_category: :class:`str` + The string used when there is a command which does not belong to any category(cog). + Useful for i18n. Defaults to ``"No Category"`` + paginator: :class:`Paginator` + The paginator used to paginate the help command output. + """ + + def __init__(self, **options): + self.sort_commands = options.pop('sort_commands', True) + self.commands_heading = options.pop('commands_heading', "Commands") + self.dm_help = options.pop('dm_help', False) + self.dm_help_threshold = options.pop('dm_help_threshold', 1000) + self.aliases_heading = options.pop('aliases_heading', "Aliases:") + self.no_category = options.pop('no_category', 'No Category') + self.paginator = options.pop('paginator', None) + + if self.paginator is None: + self.paginator = Paginator(suffix=None, prefix=None) + + super().__init__(**options) + + async def send_pages(self): + """A helper utility to send the page output from :attr:`paginator` to the destination.""" + destination = self.get_destination() + for page in self.paginator.pages: + await destination.send(page) + + def get_opening_note(self): + """Returns help command's opening note. This is mainly useful to override for i18n purposes. + + The default implementation returns :: + + Use `{prefix}{command_name} [command]` for more info on a command. + You can also use `{prefix}{command_name} [category]` for more info on a category. + + """ + command_name = self.invoked_with + return "Use `{0}{1} [command]` for more info on a command.\n" \ + "You can also use `{0}{1} [category]` for more info on a category.".format(self.clean_prefix, command_name) + + def get_command_signature(self, command): + return '%s%s %s' % (self.clean_prefix, command.qualified_name, command.signature) + + def get_ending_note(self): + """Return the help command's ending note. This is mainly useful to override for i18n purposes. + + The default implementation does nothing. + """ + return None + + def add_bot_commands_formatting(self, commands, heading): + """Adds the minified bot heading with commands to the output. + + The formatting should be added to the :attr:`paginator`. + + The default implementation is a bold underline heading followed + by commands separated by an EN SPACE (U+2002) in the next line. + + Parameters + ----------- + commands: Sequence[:class:`Command`] + A list of commands that belong to the heading. + heading: :class:`str` + The heading to add to the line. + """ + if commands: + # U+2002 Middle Dot + joined = '\u2002'.join(c.name for c in commands) + self.paginator.add_line('__**%s**__' % heading) + self.paginator.add_line(joined) + + def add_subcommand_formatting(self, command): + """Adds formatting information on a subcommand. + + The formatting should be added to the :attr:`paginator`. + + The default implementation is the prefix and the :attr:`Command.qualified_name` + optionally followed by an En dash and the command's :attr:`Command.short_doc`. + + Parameters + ----------- + command: :class:`Command` + The command to show information of. + """ + fmt = '{0}{1} \N{EN DASH} {2}' if command.short_doc else '{0}{1}' + self.paginator.add_line(fmt.format(self.clean_prefix, command.qualified_name, command.short_doc)) + + def add_aliases_formatting(self, aliases): + """Adds the formatting information on a command's aliases. + + The formatting should be added to the :attr:`paginator`. + + The default implementation is the :attr:`aliases_heading` bolded + followed by a comma separated list of aliases. + + This is not called if there are no aliases to format. + + Parameters + ----------- + aliases: Sequence[:class:`str`] + A list of aliases to format. + """ + self.paginator.add_line('**%s** %s' % (self.aliases_heading, ', '.join(aliases)), empty=True) + + def add_command_formatting(self, command): + """A utility function to format commands and groups. + + Parameters + ------------ + command: :class:`Command` + The command to format. + """ + + if command.description: + self.paginator.add_line(command.description, empty=True) + + signature = self.get_command_signature(command) + if command.aliases: + self.paginator.add_line(signature) + self.add_aliases_formatting(command.aliases) + else: + self.paginator.add_line(signature, empty=True) + + if command.help: + try: + self.paginator.add_line(command.help, empty=True) + except RuntimeError: + for line in command.help.splitlines(): + self.paginator.add_line(line) + self.paginator.add_line() + + def get_destination(self): + ctx = self.context + if self.dm_help is True: + return ctx.author + elif self.dm_help is None and len(self.paginator) > self.dm_help_threshold: + return ctx.author + else: + return ctx.channel + + async def prepare_help_command(self, ctx, command): + self.paginator.clear() + await super().prepare_help_command(ctx, command) + + async def send_bot_help(self, mapping): + ctx = self.context + bot = ctx.bot + + if bot.description: + self.paginator.add_line(bot.description, empty=True) + + note = self.get_opening_note() + if note: + self.paginator.add_line(note, empty=True) + + no_category = '\u200b{0.no_category}'.format(self) + def get_category(command, *, no_category=no_category): + cog = command.cog + return cog.qualified_name if cog is not None else no_category + + filtered = await self.filter_commands(bot.commands, sort=True, key=get_category) + to_iterate = itertools.groupby(filtered, key=get_category) + + for category, commands in to_iterate: + commands = sorted(commands, key=lambda c: c.name) if self.sort_commands else list(commands) + self.add_bot_commands_formatting(commands, category) + + note = self.get_ending_note() + if note: + self.paginator.add_line() + self.paginator.add_line(note) + + await self.send_pages() + + async def send_cog_help(self, cog): + bot = self.context.bot + if bot.description: + self.paginator.add_line(bot.description, empty=True) + + note = self.get_opening_note() + if note: + self.paginator.add_line(note, empty=True) + + if cog.description: + self.paginator.add_line(cog.description, empty=True) + + filtered = await self.filter_commands(cog.get_commands(), sort=self.sort_commands) + if filtered: + self.paginator.add_line('**%s %s**' % (cog.qualified_name, self.commands_heading)) + for command in filtered: + self.add_subcommand_formatting(command) + + note = self.get_ending_note() + if note: + self.paginator.add_line() + self.paginator.add_line(note) + + await self.send_pages() + + async def send_group_help(self, group): + self.add_command_formatting(group) + + filtered = await self.filter_commands(group.commands, sort=self.sort_commands) + if filtered: + note = self.get_opening_note() + if note: + self.paginator.add_line(note, empty=True) + + self.paginator.add_line('**%s**' % self.commands_heading) + for command in filtered: + self.add_subcommand_formatting(command) + + note = self.get_ending_note() + if note: + self.paginator.add_line() + self.paginator.add_line(note) + + await self.send_pages() + + async def send_command_help(self, command): + self.add_command_formatting(command) + self.paginator.close_page() + await self.send_pages() diff --git a/venv/lib/python3.7/site-packages/discord/ext/commands/view.py b/venv/lib/python3.7/site-packages/discord/ext/commands/view.py new file mode 100644 index 0000000..91cc06b --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/ext/commands/view.py @@ -0,0 +1,194 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from .errors import UnexpectedQuoteError, InvalidEndOfQuotedStringError, ExpectedClosingQuoteError + +# map from opening quotes to closing quotes +_quotes = { + '"': '"', + "‘": "’", + "‚": "‛", + "“": "”", + "„": "‟", + "⹂": "⹂", + "「": "」", + "『": "』", + "〝": "〞", + "﹁": "﹂", + "﹃": "﹄", + """: """, + "「": "」", + "«": "»", + "‹": "›", + "《": "》", + "〈": "〉", +} +_all_quotes = set(_quotes.keys()) | set(_quotes.values()) + +class StringView: + def __init__(self, buffer): + self.index = 0 + self.buffer = buffer + self.end = len(buffer) + self.previous = 0 + + @property + def current(self): + return None if self.eof else self.buffer[self.index] + + @property + def eof(self): + return self.index >= self.end + + def undo(self): + self.index = self.previous + + def skip_ws(self): + pos = 0 + while not self.eof: + try: + current = self.buffer[self.index + pos] + if not current.isspace(): + break + pos += 1 + except IndexError: + break + + self.previous = self.index + self.index += pos + return self.previous != self.index + + def skip_string(self, string): + strlen = len(string) + if self.buffer[self.index:self.index + strlen] == string: + self.previous = self.index + self.index += strlen + return True + return False + + def read_rest(self): + result = self.buffer[self.index:] + self.previous = self.index + self.index = self.end + return result + + def read(self, n): + result = self.buffer[self.index:self.index + n] + self.previous = self.index + self.index += n + return result + + def get(self): + try: + result = self.buffer[self.index + 1] + except IndexError: + result = None + + self.previous = self.index + self.index += 1 + return result + + def get_word(self): + pos = 0 + while not self.eof: + try: + current = self.buffer[self.index + pos] + if current.isspace(): + break + pos += 1 + except IndexError: + break + self.previous = self.index + result = self.buffer[self.index:self.index + pos] + self.index += pos + return result + + def get_quoted_word(self): + current = self.current + if current is None: + return None + + close_quote = _quotes.get(current) + is_quoted = bool(close_quote) + if is_quoted: + result = [] + _escaped_quotes = (current, close_quote) + else: + result = [current] + _escaped_quotes = _all_quotes + + while not self.eof: + current = self.get() + if not current: + if is_quoted: + # unexpected EOF + raise ExpectedClosingQuoteError(close_quote) + return ''.join(result) + + # currently we accept strings in the format of "hello world" + # to embed a quote inside the string you must escape it: "a \"world\"" + if current == '\\': + next_char = self.get() + if not next_char: + # string ends with \ and no character after it + if is_quoted: + # if we're quoted then we're expecting a closing quote + raise ExpectedClosingQuoteError(close_quote) + # if we aren't then we just let it through + return ''.join(result) + + if next_char in _escaped_quotes: + # escaped quote + result.append(next_char) + else: + # different escape character, ignore it + self.undo() + result.append(current) + continue + + if not is_quoted and current in _all_quotes: + # we aren't quoted + raise UnexpectedQuoteError(current) + + # closing quote + if is_quoted and current == close_quote: + next_char = self.get() + valid_eof = not next_char or next_char.isspace() + if not valid_eof: + raise InvalidEndOfQuotedStringError(next_char) + + # we're quoted so it's okay + return ''.join(result) + + if current.isspace() and not is_quoted: + # end of word found + return ''.join(result) + + result.append(current) + + + def __repr__(self): + return ''.format(self) diff --git a/venv/lib/python3.7/site-packages/discord/ext/tasks/__init__.py b/venv/lib/python3.7/site-packages/discord/ext/tasks/__init__.py new file mode 100644 index 0000000..9ea0b1a --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/ext/tasks/__init__.py @@ -0,0 +1,387 @@ +import asyncio +import aiohttp +import websockets +import discord +import inspect +import logging + +from discord.backoff import ExponentialBackoff + +MAX_ASYNCIO_SECONDS = 3456000 + +log = logging.getLogger(__name__) + +class Loop: + """A background task helper that abstracts the loop and reconnection logic for you. + + The main interface to create this is through :func:`loop`. + """ + def __init__(self, coro, seconds, hours, minutes, count, reconnect, loop): + self.coro = coro + self.reconnect = reconnect + self.loop = loop or asyncio.get_event_loop() + self.count = count + self._current_loop = 0 + self._task = None + self._injected = None + self._valid_exception = ( + OSError, + discord.HTTPException, + discord.GatewayNotFound, + discord.ConnectionClosed, + aiohttp.ClientError, + asyncio.TimeoutError, + websockets.InvalidHandshake, + websockets.WebSocketProtocolError, + ) + + self._before_loop = None + self._after_loop = None + self._is_being_cancelled = False + self._has_failed = False + self._stop_next_iteration = False + + if self.count is not None and self.count <= 0: + raise ValueError('count must be greater than 0 or None.') + + self.change_interval(seconds=seconds, minutes=minutes, hours=hours) + + if not inspect.iscoroutinefunction(self.coro): + raise TypeError('Expected coroutine function, not {0.__name__!r}.'.format(type(self.coro))) + + async def _call_loop_function(self, name): + coro = getattr(self, '_' + name) + if coro is None: + return + + if self._injected is not None: + await coro(self._injected) + else: + await coro() + + async def _loop(self, *args, **kwargs): + backoff = ExponentialBackoff() + await self._call_loop_function('before_loop') + try: + while True: + try: + await self.coro(*args, **kwargs) + except self._valid_exception as exc: + if not self.reconnect: + raise + await asyncio.sleep(backoff.delay()) + else: + if self._stop_next_iteration: + return + self._current_loop += 1 + if self._current_loop == self.count: + break + + await asyncio.sleep(self._sleep) + except asyncio.CancelledError: + self._is_being_cancelled = True + raise + except Exception: + self._has_failed = True + log.exception('Internal background task failed.') + raise + finally: + await self._call_loop_function('after_loop') + self._is_being_cancelled = False + self._current_loop = 0 + self._stop_next_iteration = False + self._has_failed = False + + def __get__(self, obj, objtype): + if obj is None: + return self + self._injected = obj + return self + + @property + def current_loop(self): + """:class:`int`: The current iteration of the loop.""" + return self._current_loop + + def start(self, *args, **kwargs): + r"""Starts the internal task in the event loop. + + Parameters + ------------ + \*args + The arguments to to use. + \*\*kwargs + The keyword arguments to use. + + Raises + -------- + RuntimeError + A task has already been launched and is running. + + Returns + --------- + :class:`asyncio.Task` + The task that has been created. + """ + + if self._task is not None and not self._task.done(): + raise RuntimeError('Task is already launched and is not completed.') + + if self._injected is not None: + args = (self._injected, *args) + + self._task = self.loop.create_task(self._loop(*args, **kwargs)) + return self._task + + def stop(self): + r"""Gracefully stops the task from running. + + Unlike :meth:`cancel`\, this allows the task to finish its + current iteration before gracefully exiting. + + .. note:: + + If the internal function raises an error that can be + handled before finishing then it will retry until + it succeeds. + + If this is undesirable, either remove the error handling + before stopping via :meth:`clear_exception_types` or + use :meth:`cancel` instead. + + .. versionadded:: 1.2.0 + """ + if self._task and not self._task.done(): + self._stop_next_iteration = True + + def _can_be_cancelled(self): + return not self._is_being_cancelled and self._task and not self._task.done() + + def cancel(self): + """Cancels the internal task, if it is running.""" + if self._can_be_cancelled(): + self._task.cancel() + + def restart(self, *args, **kwargs): + r"""A convenience method to restart the internal task. + + .. note:: + + Due to the way this function works, the task is not + returned like :meth:`start`. + + Parameters + ------------ + \*args + The arguments to to use. + \*\*kwargs + The keyword arguments to use. + """ + + def restart_when_over(fut, *, args=args, kwargs=kwargs): + self._task.remove_done_callback(restart_when_over) + self.start(*args, **kwargs) + + if self._can_be_cancelled(): + self._task.add_done_callback(restart_when_over) + self._task.cancel() + + def add_exception_type(self, exc): + r"""Adds an exception type to be handled during the reconnect logic. + + By default the exception types handled are those handled by + :meth:`discord.Client.connect`\, which includes a lot of internet disconnection + errors. + + This function is useful if you're interacting with a 3rd party library that + raises its own set of exceptions. + + Parameters + ------------ + exc: Type[:class:`BaseException`] + The exception class to handle. + + Raises + -------- + TypeError + The exception passed is either not a class or not inherited from :class:`BaseException`. + """ + + if not inspect.isclass(exc): + raise TypeError('{0!r} must be a class.'.format(exc)) + if not issubclass(exc, BaseException): + raise TypeError('{0!r} must inherit from BaseException.'.format(exc)) + + self._valid_exception = (*self._valid_exception, exc) + + def clear_exception_types(self): + """Removes all exception types that are handled. + + .. note:: + + This operation obviously cannot be undone! + """ + self._valid_exception = tuple() + + def remove_exception_type(self, exc): + """Removes an exception type from being handled during the reconnect logic. + + Parameters + ------------ + exc: Type[:class:`BaseException`] + The exception class to handle. + + Returns + --------- + :class:`bool` + Whether it was successfully removed. + """ + old_length = len(self._valid_exception) + self._valid_exception = tuple(x for x in self._valid_exception if x is not exc) + return len(self._valid_exception) != old_length + + def get_task(self): + """Optional[:class:`asyncio.Task`]: Fetches the internal task or ``None`` if there isn't one running.""" + return self._task + + def is_being_cancelled(self): + """Whether the task is being cancelled.""" + return self._is_being_cancelled + + def failed(self): + """:class:`bool`: Whether the internal task has failed. + + .. versionadded:: 1.2.0 + """ + return self._has_failed + + def before_loop(self, coro): + """A decorator that registers a coroutine to be called before the loop starts running. + + This is useful if you want to wait for some bot state before the loop starts, + such as :meth:`discord.Client.wait_until_ready`. + + The coroutine must take no arguments (except ``self`` in a class context). + + Parameters + ------------ + coro: :ref:`coroutine ` + The coroutine to register before the loop runs. + + Raises + ------- + TypeError + The function was not a coroutine. + """ + + if not inspect.iscoroutinefunction(coro): + raise TypeError('Expected coroutine function, received {0.__name__!r}.'.format(type(coro))) + + self._before_loop = coro + return coro + + def after_loop(self, coro): + """A decorator that register a coroutine to be called after the loop finished running. + + The coroutine must take no arguments (except ``self`` in a class context). + + .. note:: + + This coroutine is called even during cancellation. If it is desirable + to tell apart whether something was cancelled or not, check to see + whether :meth:`is_being_cancelled` is ``True`` or not. + + Parameters + ------------ + coro: :ref:`coroutine ` + The coroutine to register after the loop finishes. + + Raises + ------- + TypeError + The function was not a coroutine. + """ + + if not inspect.iscoroutinefunction(coro): + raise TypeError('Expected coroutine function, received {0.__name__!r}.'.format(type(coro))) + + self._after_loop = coro + return coro + + def change_interval(self, *, seconds=0, minutes=0, hours=0): + """Changes the interval for the sleep time. + + .. note:: + + This only applies on the next loop iteration. If it is desirable for the change of interval + to be applied right away, cancel the task with :meth:`cancel`. + + .. versionadded:: 1.2.0 + + Parameters + ------------ + seconds: :class:`float` + The number of seconds between every iteration. + minutes: :class:`float` + The number of minutes between every iteration. + hours: :class:`float` + The number of hours between every iteration. + + Raises + ------- + ValueError + An invalid value was given. + """ + + sleep = seconds + (minutes * 60.0) + (hours * 3600.0) + if sleep >= MAX_ASYNCIO_SECONDS: + fmt = 'Total number of seconds exceeds asyncio imposed limit of {0} seconds.' + raise ValueError(fmt.format(MAX_ASYNCIO_SECONDS)) + + if sleep < 0: + raise ValueError('Total number of seconds cannot be less than zero.') + + self._sleep = sleep + self.seconds = seconds + self.hours = hours + self.minutes = minutes + +def loop(*, seconds=0, minutes=0, hours=0, count=None, reconnect=True, loop=None): + """A decorator that schedules a task in the background for you with + optional reconnect logic. + + Parameters + ------------ + seconds: :class:`float` + The number of seconds between every iteration. + minutes: :class:`float` + The number of minutes between every iteration. + hours: :class:`float` + The number of hours between every iteration. + count: Optional[:class:`int`] + The number of loops to do, ``None`` if it should be an + infinite loop. + reconnect: :class:`bool` + Whether to handle errors and restart the task + using an exponential back-off algorithm similar to the + one used in :meth:`discord.Client.connect`. + loop: :class:`asyncio.AbstractEventLoop` + The loop to use to register the task, if not given + defaults to :func:`asyncio.get_event_loop`. + + Raises + -------- + ValueError + An invalid value was given. + TypeError + The function was not a coroutine. + + Returns + --------- + :class:`Loop` + The loop helper that handles the background task. + """ + def decorator(func): + return Loop(func, seconds=seconds, minutes=minutes, hours=hours, + count=count, reconnect=reconnect, loop=loop) + return decorator diff --git a/venv/lib/python3.7/site-packages/discord/file.py b/venv/lib/python3.7/site-packages/discord/file.py new file mode 100644 index 0000000..8ba3a52 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/file.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import os.path +import io + +class File: + """A parameter object used for :meth:`abc.Messageable.send` + for sending file objects. + + Attributes + ----------- + fp: Union[:class:`str`, :class:`io.BufferedIOBase`] + A file-like object opened in binary mode and read mode + or a filename representing a file in the hard drive to + open. + + .. note:: + + If the file-like object passed is opened via ``open`` then the + modes 'rb' should be used. + + To pass binary data, consider usage of ``io.BytesIO``. + + filename: Optional[:class:`str`] + The filename to display when uploading to Discord. + If this is not given then it defaults to ``fp.name`` or if ``fp`` is + a string then the ``filename`` will default to the string given. + spoiler: :class:`bool` + Whether the attachment is a spoiler. + """ + + __slots__ = ('fp', 'filename', '_original_pos', '_owner', '_closer') + + def __init__(self, fp, filename=None, *, spoiler=False): + self.fp = fp + + if isinstance(fp, io.IOBase): + if not (fp.seekable() and fp.readable()): + raise ValueError('File buffer {!r} must be seekable and readable'.format(fp)) + self.fp = fp + self._original_pos = fp.tell() + self._owner = False + else: + self.fp = open(fp, 'rb') + self._original_pos = 0 + self._owner = True + + # aiohttp only uses two methods from IOBase + # read and close, since I want to control when the files + # close, I need to stub it so it doesn't close unless + # I tell it to + self._closer = self.fp.close + self.fp.close = lambda: None + + if filename is None: + if isinstance(fp, str): + _, self.filename = os.path.split(fp) + else: + self.filename = getattr(fp, 'name', None) + else: + self.filename = filename + + if spoiler and self.filename is not None and not self.filename.startswith('SPOILER_'): + self.filename = 'SPOILER_' + self.filename + + def reset(self, *, seek=True): + # The `seek` parameter is needed because + # the retry-loop is iterated over multiple times + # starting from 0, as an implementation quirk + # the resetting must be done at the beginning + # before a request is done, since the first index + # is 0, and thus false, then this prevents an + # unnecessary seek since it's the first request + # done. + if seek: + self.fp.seek(self._original_pos) + + def close(self): + self.fp.close = self._closer + if self._owner: + self._closer() diff --git a/venv/lib/python3.7/site-packages/discord/gateway.py b/venv/lib/python3.7/site-packages/discord/gateway.py new file mode 100644 index 0000000..d3c794f --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/gateway.py @@ -0,0 +1,744 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import asyncio +from collections import namedtuple +import concurrent.futures +import json +import logging +import struct +import sys +import time +import threading +import zlib + +import websockets + +from . import utils +from .activity import _ActivityTag +from .enums import SpeakingState +from .errors import ConnectionClosed, InvalidArgument + +log = logging.getLogger(__name__) + +__all__ = ( + 'DiscordWebSocket', + 'KeepAliveHandler', + 'VoiceKeepAliveHandler', + 'DiscordVoiceWebSocket', + 'ResumeWebSocket', +) + +class ResumeWebSocket(Exception): + """Signals to initialise via RESUME opcode instead of IDENTIFY.""" + def __init__(self, shard_id): + self.shard_id = shard_id + +EventListener = namedtuple('EventListener', 'predicate event result future') + +class KeepAliveHandler(threading.Thread): + def __init__(self, *args, **kwargs): + ws = kwargs.pop('ws', None) + interval = kwargs.pop('interval', None) + shard_id = kwargs.pop('shard_id', None) + threading.Thread.__init__(self, *args, **kwargs) + self.ws = ws + self.interval = interval + self.daemon = True + self.shard_id = shard_id + self.msg = 'Keeping websocket alive with sequence %s.' + self.block_msg = 'Heartbeat blocked for more than %s seconds.' + self.behind_msg = 'Can\'t keep up, websocket is %.1fs behind.' + self._stop_ev = threading.Event() + self._last_ack = time.perf_counter() + self._last_send = time.perf_counter() + self.latency = float('inf') + self.heartbeat_timeout = ws._max_heartbeat_timeout + + def run(self): + while not self._stop_ev.wait(self.interval): + if self._last_ack + self.heartbeat_timeout < time.perf_counter(): + log.warning("Shard ID %s has stopped responding to the gateway. Closing and restarting.", self.shard_id) + coro = self.ws.close(4000) + f = asyncio.run_coroutine_threadsafe(coro, loop=self.ws.loop) + + try: + f.result() + except Exception: + pass + finally: + self.stop() + return + + data = self.get_payload() + log.debug(self.msg, data['d']) + coro = self.ws.send_as_json(data) + f = asyncio.run_coroutine_threadsafe(coro, loop=self.ws.loop) + try: + # block until sending is complete + total = 0 + while True: + try: + f.result(5) + break + except concurrent.futures.TimeoutError: + total += 5 + log.warning(self.block_msg, total) + + except Exception: + self.stop() + else: + self._last_send = time.perf_counter() + + def get_payload(self): + return { + 'op': self.ws.HEARTBEAT, + 'd': self.ws.sequence + } + + def stop(self): + self._stop_ev.set() + + def ack(self): + ack_time = time.perf_counter() + self._last_ack = ack_time + self.latency = ack_time - self._last_send + if self.latency > 10: + log.warning(self.behind_msg, self.latency) + +class VoiceKeepAliveHandler(KeepAliveHandler): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.msg = 'Keeping voice websocket alive with timestamp %s.' + self.block_msg = 'Voice heartbeat blocked for more than %s seconds' + self.behind_msg = 'Can\'t keep up, voice websocket is %.1fs behind' + + def get_payload(self): + return { + 'op': self.ws.HEARTBEAT, + 'd': int(time.time() * 1000) + } + +class DiscordWebSocket(websockets.client.WebSocketClientProtocol): + """Implements a WebSocket for Discord's gateway v6. + + This is created through :func:`create_main_websocket`. Library + users should never create this manually. + + Attributes + ----------- + DISPATCH + Receive only. Denotes an event to be sent to Discord, such as READY. + HEARTBEAT + When received tells Discord to keep the connection alive. + When sent asks if your connection is currently alive. + IDENTIFY + Send only. Starts a new session. + PRESENCE + Send only. Updates your presence. + VOICE_STATE + Send only. Starts a new connection to a voice guild. + VOICE_PING + Send only. Checks ping time to a voice guild, do not use. + RESUME + Send only. Resumes an existing connection. + RECONNECT + Receive only. Tells the client to reconnect to a new gateway. + REQUEST_MEMBERS + Send only. Asks for the full member list of a guild. + INVALIDATE_SESSION + Receive only. Tells the client to optionally invalidate the session + and IDENTIFY again. + HELLO + Receive only. Tells the client the heartbeat interval. + HEARTBEAT_ACK + Receive only. Confirms receiving of a heartbeat. Not having it implies + a connection issue. + GUILD_SYNC + Send only. Requests a guild sync. + gateway + The gateway we are currently connected to. + token + The authentication token for discord. + """ + + DISPATCH = 0 + HEARTBEAT = 1 + IDENTIFY = 2 + PRESENCE = 3 + VOICE_STATE = 4 + VOICE_PING = 5 + RESUME = 6 + RECONNECT = 7 + REQUEST_MEMBERS = 8 + INVALIDATE_SESSION = 9 + HELLO = 10 + HEARTBEAT_ACK = 11 + GUILD_SYNC = 12 + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.max_size = None + # an empty dispatcher to prevent crashes + self._dispatch = lambda *args: None + # generic event listeners + self._dispatch_listeners = [] + # the keep alive + self._keep_alive = None + + # ws related stuff + self.session_id = None + self.sequence = None + self._zlib = zlib.decompressobj() + self._buffer = bytearray() + + @classmethod + async def from_client(cls, client, *, shard_id=None, session=None, sequence=None, resume=False): + """Creates a main websocket for Discord from a :class:`Client`. + + This is for internal use only. + """ + gateway = await client.http.get_gateway() + ws = await websockets.connect(gateway, loop=client.loop, klass=cls, compression=None) + + # dynamically add attributes needed + ws.token = client.http.token + ws._connection = client._connection + ws._discord_parsers = client._connection.parsers + ws._dispatch = client.dispatch + ws.gateway = gateway + ws.shard_id = shard_id + ws.shard_count = client._connection.shard_count + ws.session_id = session + ws.sequence = sequence + ws._max_heartbeat_timeout = client._connection.heartbeat_timeout + + client._connection._update_references(ws) + + log.info('Created websocket connected to %s', gateway) + + # poll event for OP Hello + await ws.poll_event() + + if not resume: + await ws.identify() + return ws + + await ws.resume() + try: + await ws.ensure_open() + except websockets.exceptions.ConnectionClosed: + # ws got closed so let's just do a regular IDENTIFY connect. + log.info('RESUME failed (the websocket decided to close) for Shard ID %s. Retrying.', shard_id) + return await cls.from_client(client, shard_id=shard_id) + else: + return ws + + def wait_for(self, event, predicate, result=None): + """Waits for a DISPATCH'd event that meets the predicate. + + Parameters + ----------- + event: :class:`str` + The event name in all upper case to wait for. + predicate + A function that takes a data parameter to check for event + properties. The data parameter is the 'd' key in the JSON message. + result + A function that takes the same data parameter and executes to send + the result to the future. If ``None``, returns the data. + + Returns + -------- + asyncio.Future + A future to wait for. + """ + + future = self.loop.create_future() + entry = EventListener(event=event, predicate=predicate, result=result, future=future) + self._dispatch_listeners.append(entry) + return future + + async def identify(self): + """Sends the IDENTIFY packet.""" + payload = { + 'op': self.IDENTIFY, + 'd': { + 'token': self.token, + 'properties': { + '$os': sys.platform, + '$browser': 'discord.py', + '$device': 'discord.py', + '$referrer': '', + '$referring_domain': '' + }, + 'compress': True, + 'large_threshold': 250, + 'v': 3 + } + } + + if not self._connection.is_bot: + payload['d']['synced_guilds'] = [] + + if self.shard_id is not None and self.shard_count is not None: + payload['d']['shard'] = [self.shard_id, self.shard_count] + + state = self._connection + if state._activity is not None or state._status is not None: + payload['d']['presence'] = { + 'status': state._status, + 'game': state._activity, + 'since': 0, + 'afk': False + } + + await self.send_as_json(payload) + log.info('Shard ID %s has sent the IDENTIFY payload.', self.shard_id) + + async def resume(self): + """Sends the RESUME packet.""" + payload = { + 'op': self.RESUME, + 'd': { + 'seq': self.sequence, + 'session_id': self.session_id, + 'token': self.token + } + } + + await self.send_as_json(payload) + log.info('Shard ID %s has sent the RESUME payload.', self.shard_id) + + async def received_message(self, msg): + self._dispatch('socket_raw_receive', msg) + + if type(msg) is bytes: + self._buffer.extend(msg) + + if len(msg) >= 4: + if msg[-4:] == b'\x00\x00\xff\xff': + msg = self._zlib.decompress(self._buffer) + msg = msg.decode('utf-8') + self._buffer = bytearray() + else: + return + else: + return + + msg = json.loads(msg) + + log.debug('For Shard ID %s: WebSocket Event: %s', self.shard_id, msg) + self._dispatch('socket_response', msg) + + op = msg.get('op') + data = msg.get('d') + seq = msg.get('s') + if seq is not None: + self.sequence = seq + + if op != self.DISPATCH: + if op == self.RECONNECT: + # "reconnect" can only be handled by the Client + # so we terminate our connection and raise an + # internal exception signalling to reconnect. + log.info('Received RECONNECT opcode.') + await self.close() + raise ResumeWebSocket(self.shard_id) + + if op == self.HEARTBEAT_ACK: + self._keep_alive.ack() + return + + if op == self.HEARTBEAT: + beat = self._keep_alive.get_payload() + await self.send_as_json(beat) + return + + if op == self.HELLO: + interval = data['heartbeat_interval'] / 1000.0 + self._keep_alive = KeepAliveHandler(ws=self, interval=interval, shard_id=self.shard_id) + # send a heartbeat immediately + await self.send_as_json(self._keep_alive.get_payload()) + self._keep_alive.start() + return + + if op == self.INVALIDATE_SESSION: + if data is True: + await asyncio.sleep(5.0, loop=self.loop) + await self.close() + raise ResumeWebSocket(self.shard_id) + + self.sequence = None + self.session_id = None + log.info('Shard ID %s session has been invalidated.', self.shard_id) + await self.identify() + return + + log.warning('Unknown OP code %s.', op) + return + + event = msg.get('t') + + if event == 'READY': + self._trace = trace = data.get('_trace', []) + self.sequence = msg['s'] + self.session_id = data['session_id'] + log.info('Shard ID %s has connected to Gateway: %s (Session ID: %s).', + self.shard_id, ', '.join(trace), self.session_id) + + elif event == 'RESUMED': + self._trace = trace = data.get('_trace', []) + log.info('Shard ID %s has successfully RESUMED session %s under trace %s.', + self.shard_id, self.session_id, ', '.join(trace)) + + try: + func = self._discord_parsers[event] + except KeyError: + log.warning('Unknown event %s.', event) + else: + func(data) + + # remove the dispatched listeners + removed = [] + for index, entry in enumerate(self._dispatch_listeners): + if entry.event != event: + continue + + future = entry.future + if future.cancelled(): + removed.append(index) + continue + + try: + valid = entry.predicate(data) + except Exception as exc: + future.set_exception(exc) + removed.append(index) + else: + if valid: + ret = data if entry.result is None else entry.result(data) + future.set_result(ret) + removed.append(index) + + for index in reversed(removed): + del self._dispatch_listeners[index] + + @property + def latency(self): + """:class:`float`: Measures latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds.""" + heartbeat = self._keep_alive + return float('inf') if heartbeat is None else heartbeat.latency + + def _can_handle_close(self, code): + return code not in (1000, 4004, 4010, 4011) + + async def poll_event(self): + """Polls for a DISPATCH event and handles the general gateway loop. + + Raises + ------ + ConnectionClosed + The websocket connection was terminated for unhandled reasons. + """ + try: + msg = await self.recv() + await self.received_message(msg) + except websockets.exceptions.ConnectionClosed as exc: + if self._can_handle_close(exc.code): + log.info('Websocket closed with %s (%s), attempting a reconnect.', exc.code, exc.reason) + raise ResumeWebSocket(self.shard_id) from exc + else: + log.info('Websocket closed with %s (%s), cannot reconnect.', exc.code, exc.reason) + raise ConnectionClosed(exc, shard_id=self.shard_id) from exc + + async def send(self, data): + self._dispatch('socket_raw_send', data) + await super().send(data) + + async def send_as_json(self, data): + try: + await self.send(utils.to_json(data)) + except websockets.exceptions.ConnectionClosed as exc: + if not self._can_handle_close(exc.code): + raise ConnectionClosed(exc, shard_id=self.shard_id) from exc + + async def change_presence(self, *, activity=None, status=None, afk=False, since=0.0): + if activity is not None: + if not isinstance(activity, _ActivityTag): + raise InvalidArgument('activity must be one of Game, Streaming, or Activity.') + activity = activity.to_dict() + + if status == 'idle': + since = int(time.time() * 1000) + + payload = { + 'op': self.PRESENCE, + 'd': { + 'game': activity, + 'afk': afk, + 'since': since, + 'status': status + } + } + + sent = utils.to_json(payload) + log.debug('Sending "%s" to change status', sent) + await self.send(sent) + + async def request_sync(self, guild_ids): + payload = { + 'op': self.GUILD_SYNC, + 'd': list(guild_ids) + } + await self.send_as_json(payload) + + async def voice_state(self, guild_id, channel_id, self_mute=False, self_deaf=False): + payload = { + 'op': self.VOICE_STATE, + 'd': { + 'guild_id': guild_id, + 'channel_id': channel_id, + 'self_mute': self_mute, + 'self_deaf': self_deaf + } + } + + log.debug('Updating our voice state to %s.', payload) + await self.send_as_json(payload) + + async def close(self, code=1000, reason=''): + if self._keep_alive: + self._keep_alive.stop() + + await super().close(code, reason) + + async def close_connection(self, *args, **kwargs): + if self._keep_alive: + self._keep_alive.stop() + + await super().close_connection(*args, **kwargs) + +class DiscordVoiceWebSocket(websockets.client.WebSocketClientProtocol): + """Implements the websocket protocol for handling voice connections. + + Attributes + ----------- + IDENTIFY + Send only. Starts a new voice session. + SELECT_PROTOCOL + Send only. Tells discord what encryption mode and how to connect for voice. + READY + Receive only. Tells the websocket that the initial connection has completed. + HEARTBEAT + Send only. Keeps your websocket connection alive. + SESSION_DESCRIPTION + Receive only. Gives you the secret key required for voice. + SPEAKING + Send only. Notifies the client if you are currently speaking. + HEARTBEAT_ACK + Receive only. Tells you your heartbeat has been acknowledged. + RESUME + Sent only. Tells the client to resume its session. + HELLO + Receive only. Tells you that your websocket connection was acknowledged. + INVALIDATE_SESSION + Sent only. Tells you that your RESUME request has failed and to re-IDENTIFY. + CLIENT_CONNECT + Indicates a user has connected to voice. + CLIENT_DISCONNECT + Receive only. Indicates a user has disconnected from voice. + """ + + IDENTIFY = 0 + SELECT_PROTOCOL = 1 + READY = 2 + HEARTBEAT = 3 + SESSION_DESCRIPTION = 4 + SPEAKING = 5 + HEARTBEAT_ACK = 6 + RESUME = 7 + HELLO = 8 + INVALIDATE_SESSION = 9 + CLIENT_CONNECT = 12 + CLIENT_DISCONNECT = 13 + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.max_size = None + self._keep_alive = None + + async def send_as_json(self, data): + log.debug('Sending voice websocket frame: %s.', data) + await self.send(utils.to_json(data)) + + async def resume(self): + state = self._connection + payload = { + 'op': self.RESUME, + 'd': { + 'token': state.token, + 'server_id': str(state.server_id), + 'session_id': state.session_id + } + } + await self.send_as_json(payload) + + async def identify(self): + state = self._connection + payload = { + 'op': self.IDENTIFY, + 'd': { + 'server_id': str(state.server_id), + 'user_id': str(state.user.id), + 'session_id': state.session_id, + 'token': state.token + } + } + await self.send_as_json(payload) + + @classmethod + async def from_client(cls, client, *, resume=False): + """Creates a voice websocket for the :class:`VoiceClient`.""" + gateway = 'wss://' + client.endpoint + '/?v=4' + ws = await websockets.connect(gateway, loop=client.loop, klass=cls, compression=None) + ws.gateway = gateway + ws._connection = client + ws._max_heartbeat_timeout = 60.0 + + if resume: + await ws.resume() + else: + await ws.identify() + + return ws + + async def select_protocol(self, ip, port, mode): + payload = { + 'op': self.SELECT_PROTOCOL, + 'd': { + 'protocol': 'udp', + 'data': { + 'address': ip, + 'port': port, + 'mode': mode + } + } + } + + await self.send_as_json(payload) + + async def client_connect(self): + payload = { + 'op': self.CLIENT_CONNECT, + 'd': { + 'audio_ssrc': self._connection.ssrc + } + } + + await self.send_as_json(payload) + + async def speak(self, state=SpeakingState.voice): + payload = { + 'op': self.SPEAKING, + 'd': { + 'speaking': int(state), + 'delay': 0 + } + } + + await self.send_as_json(payload) + + async def received_message(self, msg): + log.debug('Voice websocket frame received: %s', msg) + op = msg['op'] + data = msg.get('d') + + if op == self.READY: + await self.initial_connection(data) + elif op == self.HEARTBEAT_ACK: + self._keep_alive.ack() + elif op == self.INVALIDATE_SESSION: + log.info('Voice RESUME failed.') + await self.identify() + elif op == self.SESSION_DESCRIPTION: + self._connection.mode = data['mode'] + await self.load_secret_key(data) + elif op == self.HELLO: + interval = data['heartbeat_interval'] / 1000.0 + self._keep_alive = VoiceKeepAliveHandler(ws=self, interval=interval) + self._keep_alive.start() + + async def initial_connection(self, data): + state = self._connection + state.ssrc = data['ssrc'] + state.voice_port = data['port'] + state.endpoint_ip = data['ip'] + + packet = bytearray(70) + struct.pack_into('>I', packet, 0, state.ssrc) + state.socket.sendto(packet, (state.endpoint_ip, state.voice_port)) + recv = await self.loop.sock_recv(state.socket, 70) + log.debug('received packet in initial_connection: %s', recv) + + # the ip is ascii starting at the 4th byte and ending at the first null + ip_start = 4 + ip_end = recv.index(0, ip_start) + state.ip = recv[ip_start:ip_end].decode('ascii') + + # the port is a little endian unsigned short in the last two bytes + # yes, this is different endianness from everything else + state.port = struct.unpack_from('' % self.value + + def __iter__(self): + for name, value in self.__class__.__dict__.items(): + if isinstance(value, _flag_descriptor): + yield (name, self._has_flag(value.flag)) + + # For some reason the flags in the Discord API are "inverted" + # ergo, if they're set then it means "suppress" (off in the GUI toggle) + # Since this is counter-intuitive from an API perspective and annoying + # these will be inverted automatically + + def _has_flag(self, o): + return (self.value & o) != o + + def _set_flag(self, o, toggle): + if toggle is True: + self.value &= ~o + elif toggle is False: + self.value |= o + else: + raise TypeError('Value to set for SystemChannelFlags must be a bool.') + + @_flag_descriptor + def join_notifications(self): + """:class:`bool`: Returns True if the system channel is used for member join notifications.""" + return 1 + + @_flag_descriptor + def premium_subscriptions(self): + """:class:`bool`: Returns True if the system channel is used for Nitro boosting notifications.""" + return 2 + + +class Guild(Hashable): + """Represents a Discord guild. + + This is referred to as a "server" in the official Discord UI. + + .. container:: operations + + .. describe:: x == y + + Checks if two guilds are equal. + + .. describe:: x != y + + Checks if two guilds are not equal. + + .. describe:: hash(x) + + Returns the guild's hash. + + .. describe:: str(x) + + Returns the guild's name. + + Attributes + ---------- + name: :class:`str` + The guild name. + emojis: Tuple[:class:`Emoji`, ...] + All emojis that the guild owns. + region: :class:`VoiceRegion` + The region the guild belongs on. There is a chance that the region + will be a :class:`str` if the value is not recognised by the enumerator. + afk_timeout: :class:`int` + The timeout to get sent to the AFK channel. + afk_channel: Optional[:class:`VoiceChannel`] + The channel that denotes the AFK channel. None if it doesn't exist. + icon: Optional[:class:`str`] + The guild's icon. + id: :class:`int` + The guild's ID. + owner_id: :class:`int` + The guild owner's ID. Use :attr:`Guild.owner` instead. + unavailable: :class:`bool` + Indicates if the guild is unavailable. If this is ``True`` then the + reliability of other attributes outside of :meth:`Guild.id` is slim and they might + all be None. It is best to not do anything with the guild if it is unavailable. + + Check the :func:`on_guild_unavailable` and :func:`on_guild_available` events. + max_presences: Optional[:class:`int`] + The maximum amount of presences for the guild. + max_members: Optional[:class:`int`] + The maximum amount of members for the guild. + banner: Optional[:class:`str`] + The guild's banner. + description: Optional[:class:`str`] + The guild's description. + mfa_level: :class:`int` + Indicates the guild's two factor authorisation level. If this value is 0 then + the guild does not require 2FA for their administrative members. If the value is + 1 then they do. + verification_level: :class:`VerificationLevel` + The guild's verification level. + explicit_content_filter: :class:`ContentFilter` + The guild's explicit content filter. + default_notifications: :class:`NotificationLevel` + The guild's notification settings. + features: List[:class:`str`] + A list of features that the guild has. They are currently as follows: + + - ``VIP_REGIONS``: Guild has VIP voice regions + - ``VANITY_URL``: Guild can have a vanity invite URL (e.g. discord.gg/discord-api) + - ``INVITE_SPLASH``: Guild's invite page can have a special splash. + - ``VERIFIED``: Guild is a verified server. + - ``PARTNERED``: Guild is a partnered server. + - ``MORE_EMOJI``: Guild is allowed to have more than 50 custom emoji. + - ``DISCOVERABLE``: Guild shows up in Server Discovery. + - ``COMMERCE``: Guild can sell things using store channels. + - ``LURKABLE``: Users can lurk in this guild via Server Discovery. + - ``NEWS``: Guild can create news channels. + - ``BANNER``: Guild can upload and use a banner (i.e. :meth:`banner_url`). + - ``ANIMATED_ICON``: Guild can upload an animated icon. + + splash: Optional[:class:`str`] + The guild's invite splash. + premium_tier: :class:`int` + The premium tier for this guild. Corresponds to "Nitro Server" in the official UI. + The number goes from 0 to 3 inclusive. + premium_subscription_count: :class:`int` + How many users have currently "boosted" this guild. + """ + + __slots__ = ('afk_timeout', 'afk_channel', '_members', '_channels', 'icon', + 'name', 'id', 'unavailable', 'banner', 'region', '_state', + '_default_role', '_roles', '_member_count', '_large', + 'owner_id', 'mfa_level', 'emojis', 'features', + 'verification_level', 'explicit_content_filter', 'splash', + '_voice_states', '_system_channel_id', 'default_notifications', + 'description', 'max_presences', 'max_members', 'premium_tier', + 'premium_subscription_count', '_system_channel_flags') + + _PREMIUM_GUILD_LIMITS = { + None: _GuildLimit(emoji=50, bitrate=96e3, filesize=8388608), + 0: _GuildLimit(emoji=50, bitrate=96e3, filesize=8388608), + 1: _GuildLimit(emoji=100, bitrate=128e3, filesize=8388608), + 2: _GuildLimit(emoji=150, bitrate=256e3, filesize=52428800), + 3: _GuildLimit(emoji=250, bitrate=384e3, filesize=104857600), + } + + def __init__(self, *, data, state): + self._channels = {} + self._members = {} + self._voice_states = {} + self._state = state + self._from_data(data) + + def _add_channel(self, channel): + self._channels[channel.id] = channel + + def _remove_channel(self, channel): + self._channels.pop(channel.id, None) + + def _voice_state_for(self, user_id): + return self._voice_states.get(user_id) + + def _add_member(self, member): + self._members[member.id] = member + + def _remove_member(self, member): + self._members.pop(member.id, None) + + def __str__(self): + return self.name + + def __repr__(self): + attrs = ( + 'id', 'name', 'shard_id', 'chunked' + ) + resolved = ['%s=%r' % (attr, getattr(self, attr)) for attr in attrs] + resolved.append('member_count=%r' % getattr(self, '_member_count', None)) + return '' % ' '.join(resolved) + + def _update_voice_state(self, data, channel_id): + user_id = int(data['user_id']) + channel = self.get_channel(channel_id) + try: + # check if we should remove the voice state from cache + if channel is None: + after = self._voice_states.pop(user_id) + else: + after = self._voice_states[user_id] + + before = copy.copy(after) + after._update(data, channel) + except KeyError: + # if we're here then we're getting added into the cache + after = VoiceState(data=data, channel=channel) + before = VoiceState(data=data, channel=None) + self._voice_states[user_id] = after + + member = self.get_member(user_id) + return member, before, after + + def _add_role(self, role): + # roles get added to the bottom (position 1, pos 0 is @everyone) + # so since self.roles has the @everyone role, we can't increment + # its position because it's stuck at position 0. Luckily x += False + # is equivalent to adding 0. So we cast the position to a bool and + # increment it. + for r in self._roles.values(): + r.position += (not r.is_default()) + + self._roles[role.id] = role + + def _remove_role(self, role_id): + # this raises KeyError if it fails.. + role = self._roles.pop(role_id) + + # since it didn't, we can change the positions now + # basically the same as above except we only decrement + # the position if we're above the role we deleted. + for r in self._roles.values(): + r.position -= r.position > role.position + + return role + + def _from_data(self, guild): + # according to Stan, this is always available even if the guild is unavailable + # I don't have this guarantee when someone updates the guild. + member_count = guild.get('member_count', None) + if member_count: + self._member_count = member_count + + self.name = guild.get('name') + self.region = try_enum(VoiceRegion, guild.get('region')) + self.verification_level = try_enum(VerificationLevel, guild.get('verification_level')) + self.default_notifications = try_enum(NotificationLevel, guild.get('default_message_notifications')) + self.explicit_content_filter = try_enum(ContentFilter, guild.get('explicit_content_filter', 0)) + self.afk_timeout = guild.get('afk_timeout') + self.icon = guild.get('icon') + self.banner = guild.get('banner') + self.unavailable = guild.get('unavailable', False) + self.id = int(guild['id']) + self._roles = {} + state = self._state # speed up attribute access + for r in guild.get('roles', []): + role = Role(guild=self, data=r, state=state) + self._roles[role.id] = role + + self.mfa_level = guild.get('mfa_level') + self.emojis = tuple(map(lambda d: state.store_emoji(self, d), guild.get('emojis', []))) + self.features = guild.get('features', []) + self.splash = guild.get('splash') + self._system_channel_id = utils._get_as_snowflake(guild, 'system_channel_id') + self.description = guild.get('description') + self.max_presences = guild.get('max_presences') + self.max_members = guild.get('max_members') + self.premium_tier = guild.get('premium_tier', 0) + self.premium_subscription_count = guild.get('premium_subscription_count') or 0 + self._system_channel_flags = guild.get('system_channel_flags', 0) + + for mdata in guild.get('members', []): + member = Member(data=mdata, guild=self, state=state) + self._add_member(member) + + self._sync(guild) + self._large = None if member_count is None else self._member_count >= 250 + + self.owner_id = utils._get_as_snowflake(guild, 'owner_id') + self.afk_channel = self.get_channel(utils._get_as_snowflake(guild, 'afk_channel_id')) + + for obj in guild.get('voice_states', []): + self._update_voice_state(obj, int(obj['channel_id'])) + + def _sync(self, data): + try: + self._large = data['large'] + except KeyError: + pass + + empty_tuple = tuple() + for presence in data.get('presences', []): + user_id = int(presence['user']['id']) + member = self.get_member(user_id) + if member is not None: + member._presence_update(presence, empty_tuple) + + if 'channels' in data: + channels = data['channels'] + for c in channels: + c_type = c['type'] + if c_type in (ChannelType.text.value, ChannelType.news.value): + self._add_channel(TextChannel(guild=self, data=c, state=self._state)) + elif c_type == ChannelType.voice.value: + self._add_channel(VoiceChannel(guild=self, data=c, state=self._state)) + elif c_type == ChannelType.category.value: + self._add_channel(CategoryChannel(guild=self, data=c, state=self._state)) + elif c_type == ChannelType.store.value: + self._add_channel(StoreChannel(guild=self, data=c, state=self._state)) + + @property + def channels(self): + """List[:class:`abc.GuildChannel`]: A list of channels that belongs to this guild.""" + return list(self._channels.values()) + + @property + def large(self): + """:class:`bool`: Indicates if the guild is a 'large' guild. + + A large guild is defined as having more than ``large_threshold`` count + members, which for this library is set to the maximum of 250. + """ + if self._large is None: + try: + return self._member_count >= 250 + except AttributeError: + return len(self._members) >= 250 + return self._large + + @property + def voice_channels(self): + """List[:class:`VoiceChannel`]: A list of voice channels that belongs to this guild. + + This is sorted by the position and are in UI order from top to bottom. + """ + r = [ch for ch in self._channels.values() if isinstance(ch, VoiceChannel)] + r.sort(key=lambda c: (c.position, c.id)) + return r + + @property + def me(self): + """Similar to :attr:`Client.user` except an instance of :class:`Member`. + This is essentially used to get the member version of yourself. + """ + self_id = self._state.user.id + return self.get_member(self_id) + + @property + def voice_client(self): + """Returns the :class:`VoiceClient` associated with this guild, if any.""" + return self._state._get_voice_client(self.id) + + @property + def text_channels(self): + """List[:class:`TextChannel`]: A list of text channels that belongs to this guild. + + This is sorted by the position and are in UI order from top to bottom. + """ + r = [ch for ch in self._channels.values() if isinstance(ch, TextChannel)] + r.sort(key=lambda c: (c.position, c.id)) + return r + + @property + def categories(self): + """List[:class:`CategoryChannel`]: A list of categories that belongs to this guild. + + This is sorted by the position and are in UI order from top to bottom. + """ + r = [ch for ch in self._channels.values() if isinstance(ch, CategoryChannel)] + r.sort(key=lambda c: (c.position, c.id)) + return r + + def by_category(self): + """Returns every :class:`CategoryChannel` and their associated channels. + + These channels and categories are sorted in the official Discord UI order. + + If the channels do not have a category, then the first element of the tuple is + ``None``. + + Returns + -------- + List[Tuple[Optional[:class:`CategoryChannel`], List[:class:`abc.GuildChannel`]]]: + The categories and their associated channels. + """ + grouped = defaultdict(list) + for channel in self._channels.values(): + if isinstance(channel, CategoryChannel): + continue + + grouped[channel.category_id].append(channel) + + def key(t): + k, v = t + return ((k.position, k.id) if k else (-1, -1), v) + + _get = self._channels.get + as_list = [(_get(k), v) for k, v in grouped.items()] + as_list.sort(key=key) + for _, channels in as_list: + channels.sort(key=lambda c: (c._sorting_bucket, c.position, c.id)) + return as_list + + def get_channel(self, channel_id): + """Returns a :class:`abc.GuildChannel` with the given ID. If not found, returns None.""" + return self._channels.get(channel_id) + + @property + def system_channel(self): + """Optional[:class:`TextChannel`]: Returns the guild's channel used for system messages. + + If no channel is set, then this returns ``None``. + """ + channel_id = self._system_channel_id + return channel_id and self._channels.get(channel_id) + + @property + def system_channel_flags(self): + """:class:`SystemChannelFlags`: Returns the guild's system channel settings.""" + return SystemChannelFlags._from_value(self._system_channel_flags) + + @property + def emoji_limit(self): + """:class:`int`: The maximum number of emoji slots this guild has.""" + more_emoji = 200 if 'MORE_EMOJI' in self.features else 50 + return max(more_emoji, self._PREMIUM_GUILD_LIMITS[self.premium_tier].emoji) + + @property + def bitrate_limit(self): + """:class:`float`: The maximum bitrate for voice channels this guild can have.""" + vip_guild = self._PREMIUM_GUILD_LIMITS[1].bitrate if 'VIP_REGIONS' in self.features else 96e3 + return max(vip_guild, self._PREMIUM_GUILD_LIMITS[self.premium_tier].bitrate) + + @property + def filesize_limit(self): + """:class:`int`: The maximum number of bytes files can have when uploaded to this guild.""" + return self._PREMIUM_GUILD_LIMITS[self.premium_tier].filesize + + @property + def members(self): + """List[:class:`Member`]: A list of members that belong to this guild.""" + return list(self._members.values()) + + def get_member(self, user_id): + """Returns a :class:`Member` with the given ID. If not found, returns None.""" + return self._members.get(user_id) + + @property + def premium_subscribers(self): + """List[:class:`Member`]: A list of members who have "boosted" this guild.""" + return [member for member in self.members if member.premium_since is not None] + + @property + def roles(self): + """Returns a :class:`list` of the guild's roles in hierarchy order. + + The first element of this list will be the lowest role in the + hierarchy. + """ + return sorted(self._roles.values()) + + def get_role(self, role_id): + """Returns a :class:`Role` with the given ID. If not found, returns None.""" + return self._roles.get(role_id) + + @utils.cached_slot_property('_default_role') + def default_role(self): + """Gets the @everyone role that all members have by default.""" + return utils.find(lambda r: r.is_default(), self._roles.values()) + + @property + def owner(self): + """:class:`Member`: The member that owns the guild.""" + return self.get_member(self.owner_id) + + @property + def icon_url(self): + """:class:`Asset`: Returns the guild's icon asset.""" + return self.icon_url_as() + + def is_icon_animated(self): + """:class:`bool`: Returns True if the guild has an animated icon.""" + return bool(self.icon and self.icon.startswith('a_')) + + def icon_url_as(self, *, format=None, static_format='webp', size=1024): + """Returns an :class:`Asset` for the guild's icon. + + The format must be one of 'webp', 'jpeg', 'jpg', 'png' or 'gif', and + 'gif' is only valid for animated avatars. The size must be a power of 2 + between 16 and 4096. + + Parameters + ----------- + format: Optional[:class:`str`] + The format to attempt to convert the icon to. + If the format is ``None``, then it is automatically + detected into either 'gif' or static_format depending on the + icon being animated or not. + static_format: Optional[:class:`str`] + Format to attempt to convert only non-animated icons to. + size: :class:`int` + The size of the image to display. + + Raises + ------ + InvalidArgument + Bad image format passed to ``format`` or invalid ``size``. + + Returns + -------- + :class:`Asset` + The resulting CDN asset. + """ + return Asset._from_guild_icon(self._state, self, format=format, static_format=static_format, size=size) + + @property + def banner_url(self): + """:class:`Asset`: Returns the guild's banner asset.""" + return self.banner_url_as() + + def banner_url_as(self, *, format='webp', size=2048): + """Returns an :class:`Asset` for the guild's banner. + + The format must be one of 'webp', 'jpeg', or 'png'. The + size must be a power of 2 between 16 and 4096. + + Parameters + ----------- + format: :class:`str` + The format to attempt to convert the banner to. + size: :class:`int` + The size of the image to display. + + Raises + ------ + InvalidArgument + Bad image format passed to ``format`` or invalid ``size``. + + Returns + -------- + :class:`Asset` + The resulting CDN asset. + """ + return Asset._from_guild_image(self._state, self.id, self.banner, 'banners', format=format, size=size) + + @property + def splash_url(self): + """:class:`Asset`: Returns the guild's invite splash asset.""" + return self.splash_url_as() + + def splash_url_as(self, *, format='webp', size=2048): + """Returns an :class:`Asset` for the guild's invite splash. + + The format must be one of 'webp', 'jpeg', 'jpg', or 'png'. The + size must be a power of 2 between 16 and 4096. + + Parameters + ----------- + format: :class:`str` + The format to attempt to convert the splash to. + size: :class:`int` + The size of the image to display. + + Raises + ------ + InvalidArgument + Bad image format passed to ``format`` or invalid ``size``. + + Returns + -------- + :class:`Asset` + The resulting CDN asset. + """ + return Asset._from_guild_image(self._state, self.id, self.splash, 'splashes', format=format, size=size) + + @property + def member_count(self): + """Returns the true member count regardless of it being loaded fully or not.""" + return self._member_count + + @property + def chunked(self): + """Returns a boolean indicating if the guild is "chunked". + + A chunked guild means that :attr:`member_count` is equal to the + number of members stored in the internal :attr:`members` cache. + + If this value returns ``False``, then you should request for + offline members. + """ + count = getattr(self, '_member_count', None) + if count is None: + return False + return count == len(self._members) + + @property + def shard_id(self): + """:class:`int`: Returns the shard ID for this guild if applicable.""" + count = self._state.shard_count + if count is None: + return None + return (self.id >> 22) % count + + @property + def created_at(self): + """:class:`datetime.datetime`: Returns the guild's creation time in UTC.""" + return utils.snowflake_time(self.id) + + def get_member_named(self, name): + """Returns the first member found that matches the name provided. + + The name can have an optional discriminator argument, e.g. "Jake#0001" + or "Jake" will both do the lookup. However the former will give a more + precise result. Note that the discriminator must have all 4 digits + for this to work. + + If a nickname is passed, then it is looked up via the nickname. Note + however, that a nickname + discriminator combo will not lookup the nickname + but rather the username + discriminator combo due to nickname + discriminator + not being unique. + + If no member is found, ``None`` is returned. + + Parameters + ----------- + name: :class:`str` + The name of the member to lookup with an optional discriminator. + + Returns + -------- + :class:`Member` + The member in this guild with the associated name. If not found + then ``None`` is returned. + """ + + result = None + members = self.members + if len(name) > 5 and name[-5] == '#': + # The 5 length is checking to see if #0000 is in the string, + # as a#0000 has a length of 6, the minimum for a potential + # discriminator lookup. + potential_discriminator = name[-4:] + + # do the actual lookup and return if found + # if it isn't found then we'll do a full name lookup below. + result = utils.get(members, name=name[:-5], discriminator=potential_discriminator) + if result is not None: + return result + + def pred(m): + return m.nick == name or m.name == name + + return utils.find(pred, members) + + def _create_channel(self, name, overwrites, channel_type, category=None, **options): + if overwrites is None: + overwrites = {} + elif not isinstance(overwrites, dict): + raise InvalidArgument('overwrites parameter expects a dict.') + + perms = [] + for target, perm in overwrites.items(): + if not isinstance(perm, PermissionOverwrite): + raise InvalidArgument('Expected PermissionOverwrite received {0.__name__}'.format(type(perm))) + + allow, deny = perm.pair() + payload = { + 'allow': allow.value, + 'deny': deny.value, + 'id': target.id + } + + if isinstance(target, Role): + payload['type'] = 'role' + else: + payload['type'] = 'member' + + perms.append(payload) + + try: + options['rate_limit_per_user'] = options.pop('slowmode_delay') + except KeyError: + pass + + parent_id = category.id if category else None + return self._state.http.create_channel(self.id, channel_type.value, name=name, parent_id=parent_id, + permission_overwrites=perms, **options) + + async def create_text_channel(self, name, *, overwrites=None, category=None, reason=None, **options): + """|coro| + + Creates a :class:`TextChannel` for the guild. + + Note that you need the :attr:`~Permissions.manage_channels` permission + to create the channel. + + The ``overwrites`` parameter can be used to create a 'secret' + channel upon creation. This parameter expects a :class:`dict` of + overwrites with the target (either a :class:`Member` or a :class:`Role`) + as the key and a :class:`PermissionOverwrite` as the value. + + .. note:: + + Creating a channel of a specified position will not update the position of + other channels to follow suit. A follow-up call to :meth:`~TextChannel.edit` + will be required to update the position of the channel in the channel list. + + Examples + ---------- + + Creating a basic channel: + + .. code-block:: python3 + + channel = await guild.create_text_channel('cool-channel') + + Creating a "secret" channel: + + .. code-block:: python3 + + overwrites = { + guild.default_role: discord.PermissionOverwrite(read_messages=False), + guild.me: discord.PermissionOverwrite(read_messages=True) + } + + channel = await guild.create_text_channel('secret', overwrites=overwrites) + + Parameters + ----------- + name: :class:`str` + The channel's name. + overwrites + A :class:`dict` of target (either a role or a member) to + :class:`PermissionOverwrite` to apply upon creation of a channel. + Useful for creating secret channels. + category: Optional[:class:`CategoryChannel`] + The category to place the newly created channel under. + The permissions will be automatically synced to category if no + overwrites are provided. + position: :class:`int` + The position in the channel list. This is a number that starts + at 0. e.g. the top channel is position 0. + topic: Optional[:class:`str`] + The new channel's topic. + slowmode_delay: :class:`int` + Specifies the slowmode rate limit for user in this channel, in seconds. + The maximum value possible is `21600`. + nsfw: :class:`bool` + To mark the channel as NSFW or not. + reason: Optional[:class:`str`] + The reason for creating this channel. Shows up on the audit log. + + Raises + ------- + Forbidden + You do not have the proper permissions to create this channel. + HTTPException + Creating the channel failed. + InvalidArgument + The permission overwrite information is not in proper form. + + Returns + ------- + :class:`TextChannel` + The channel that was just created. + """ + data = await self._create_channel(name, overwrites, ChannelType.text, category, reason=reason, **options) + channel = TextChannel(state=self._state, guild=self, data=data) + + # temporarily add to the cache + self._channels[channel.id] = channel + return channel + + async def create_voice_channel(self, name, *, overwrites=None, category=None, reason=None, **options): + """|coro| + + This is similar to :meth:`create_text_channel` except makes a :class:`VoiceChannel` instead, in addition + to having the following new parameters. + + Parameters + ----------- + bitrate: :class:`int` + The channel's preferred audio bitrate in bits per second. + user_limit: :class:`int` + The channel's limit for number of members that can be in a voice channel. + """ + data = await self._create_channel(name, overwrites, ChannelType.voice, category, reason=reason, **options) + channel = VoiceChannel(state=self._state, guild=self, data=data) + + # temporarily add to the cache + self._channels[channel.id] = channel + return channel + + async def create_category(self, name, *, overwrites=None, reason=None): + """|coro| + + Same as :meth:`create_text_channel` except makes a :class:`CategoryChannel` instead. + + .. note:: + + The ``category`` parameter is not supported in this function since categories + cannot have categories. + """ + data = await self._create_channel(name, overwrites, ChannelType.category, reason=reason) + channel = CategoryChannel(state=self._state, guild=self, data=data) + + # temporarily add to the cache + self._channels[channel.id] = channel + return channel + + create_category_channel = create_category + + async def leave(self): + """|coro| + + Leaves the guild. + + .. note:: + + You cannot leave the guild that you own, you must delete it instead + via :meth:`delete`. + + Raises + -------- + HTTPException + Leaving the guild failed. + """ + await self._state.http.leave_guild(self.id) + + async def delete(self): + """|coro| + + Deletes the guild. You must be the guild owner to delete the + guild. + + Raises + -------- + HTTPException + Deleting the guild failed. + Forbidden + You do not have permissions to delete the guild. + """ + + await self._state.http.delete_guild(self.id) + + async def edit(self, *, reason=None, **fields): + """|coro| + + Edits the guild. + + You must have the :attr:`~Permissions.manage_guild` permission + to edit the guild. + + Parameters + ---------- + name: :class:`str` + The new name of the guild. + description: :class:`str` + The new description of the guild. This is only available to guilds that + contain `VERIFIED` in :attr:`Guild.features`. + icon: :class:`bytes` + A :term:`py:bytes-like object` representing the icon. Only PNG/JPEG supported. + Could be ``None`` to denote removal of the icon. + banner: :class:`bytes` + A :term:`py:bytes-like object` representing the banner. + Could be ``None`` to denote removal of the banner. + splash: :class:`bytes` + A :term:`py:bytes-like object` representing the invite splash. + Only PNG/JPEG supported. Could be ``None`` to denote removing the + splash. Only available for partnered guilds with ``INVITE_SPLASH`` + feature. + region: :class:`VoiceRegion` + The new region for the guild's voice communication. + afk_channel: Optional[:class:`VoiceChannel`] + The new channel that is the AFK channel. Could be ``None`` for no AFK channel. + afk_timeout: :class:`int` + The number of seconds until someone is moved to the AFK channel. + owner: :class:`Member` + The new owner of the guild to transfer ownership to. Note that you must + be owner of the guild to do this. + verification_level: :class:`VerificationLevel` + The new verification level for the guild. + default_notifications: :class:`NotificationLevel` + The new default notification level for the guild. + explicit_content_filter: :class:`ContentFilter` + The new explicit content filter for the guild. + vanity_code: :class:`str` + The new vanity code for the guild. + system_channel: Optional[:class:`TextChannel`] + The new channel that is used for the system channel. Could be ``None`` for no system channel. + system_channel_flags: :class:`SystemChannelFlags` + The new system channel settings to use with the new system channel. + reason: Optional[:class:`str`] + The reason for editing this guild. Shows up on the audit log. + + Raises + ------- + Forbidden + You do not have permissions to edit the guild. + HTTPException + Editing the guild failed. + InvalidArgument + The image format passed in to ``icon`` is invalid. It must be + PNG or JPG. This is also raised if you are not the owner of the + guild and request an ownership transfer. + """ + + http = self._state.http + try: + icon_bytes = fields['icon'] + except KeyError: + icon = self.icon + else: + if icon_bytes is not None: + icon = utils._bytes_to_base64_data(icon_bytes) + else: + icon = None + + try: + banner_bytes = fields['banner'] + except KeyError: + banner = self.banner + else: + if banner_bytes is not None: + banner = utils._bytes_to_base64_data(banner_bytes) + else: + banner = None + + try: + vanity_code = fields['vanity_code'] + except KeyError: + pass + else: + await http.change_vanity_code(self.id, vanity_code, reason=reason) + + try: + splash_bytes = fields['splash'] + except KeyError: + splash = self.splash + else: + if splash_bytes is not None: + splash = utils._bytes_to_base64_data(splash_bytes) + else: + splash = None + + fields['icon'] = icon + fields['banner'] = banner + fields['splash'] = splash + + try: + default_message_notifications = int(fields.pop('default_notifications')) + except (TypeError, KeyError): + pass + else: + fields['default_message_notifications'] = default_message_notifications + + try: + afk_channel = fields.pop('afk_channel') + except KeyError: + pass + else: + if afk_channel is None: + fields['afk_channel_id'] = afk_channel + else: + fields['afk_channel_id'] = afk_channel.id + + try: + system_channel = fields.pop('system_channel') + except KeyError: + pass + else: + if system_channel is None: + fields['system_channel_id'] = system_channel + else: + fields['system_channel_id'] = system_channel.id + + if 'owner' in fields: + if self.owner != self.me: + raise InvalidArgument('To transfer ownership you must be the owner of the guild.') + + fields['owner_id'] = fields['owner'].id + + if 'region' in fields: + fields['region'] = str(fields['region']) + + level = fields.get('verification_level', self.verification_level) + if not isinstance(level, VerificationLevel): + raise InvalidArgument('verification_level field must be of type VerificationLevel') + + fields['verification_level'] = level.value + + explicit_content_filter = fields.get('explicit_content_filter', self.explicit_content_filter) + if not isinstance(explicit_content_filter, ContentFilter): + raise InvalidArgument('explicit_content_filter field must be of type ContentFilter') + + fields['explicit_content_filter'] = explicit_content_filter.value + + system_channel_flags = fields.get('system_channel_flags', self.system_channel_flags) + if not isinstance(system_channel_flags, SystemChannelFlags): + raise InvalidArgument('system_channel_flags field must be of type SystemChannelFlags') + + fields['system_channel_flags'] = system_channel_flags.value + await http.edit_guild(self.id, reason=reason, **fields) + + async def fetch_channels(self): + """|coro| + + Retrieves all :class:`abc.GuildChannel` that the guild has. + + .. note:: + + This method is an API call. For general usage, consider :attr:`channels` instead. + + .. versionadded:: 1.2.0 + + Raises + ------- + InvalidData + An unknown channel type was received from Discord. + HTTPException + Retrieving the channels failed. + + Returns + ------- + List[:class:`abc.GuildChannel`] + All channels in the guild. + """ + data = await self._state.http.get_all_guild_channels(self.id) + + def convert(d): + factory, ch_type = _channel_factory(d['type']) + if factory is None: + raise InvalidData('Unknown channel type {type} for channel ID {id}.'.format_map(data)) + + channel = factory(guild=self, state=self._state, data=d) + return channel + + return [convert(d) for d in data] + + async def fetch_member(self, member_id): + """|coro| + + Retreives a :class:`Member` from a guild ID, and a member ID. + + .. note:: + + This method is an API call. For general usage, consider :meth:`get_member` instead. + + Parameters + ----------- + member_id: :class:`int` + The member's ID to fetch from. + + Raises + ------- + Forbidden + You do not have access to the guild. + HTTPException + Getting the guild failed. + + Returns + -------- + :class:`Member` + The member from the member ID. + """ + data = await self._state.http.get_member(self.id, member_id) + return Member(data=data, state=self._state, guild=self) + + async def fetch_ban(self, user): + """|coro| + + Retrieves the :class:`BanEntry` for a user, which is a namedtuple + with a ``user`` and ``reason`` field. See :meth:`bans` for more + information. + + You must have the :attr:`~Permissions.ban_members` permission + to get this information. + + Parameters + ----------- + user: :class:`abc.Snowflake` + The user to get ban information from. + + Raises + ------ + Forbidden + You do not have proper permissions to get the information. + NotFound + This user is not banned. + HTTPException + An error occurred while fetching the information. + + Returns + ------- + BanEntry + The BanEntry object for the specified user. + """ + data = await self._state.http.get_ban(user.id, self.id) + return BanEntry( + user=User(state=self._state, data=data['user']), + reason=data['reason'] + ) + + async def bans(self): + """|coro| + + Retrieves all the users that are banned from the guild. + + This coroutine returns a :class:`list` of BanEntry objects, which is a + namedtuple with a ``user`` field to denote the :class:`User` + that got banned along with a ``reason`` field specifying + why the user was banned that could be set to ``None``. + + You must have the :attr:`~Permissions.ban_members` permission + to get this information. + + Raises + ------- + Forbidden + You do not have proper permissions to get the information. + HTTPException + An error occurred while fetching the information. + + Returns + -------- + List[BanEntry] + A list of BanEntry objects. + """ + + data = await self._state.http.get_bans(self.id) + return [BanEntry(user=User(state=self._state, data=e['user']), + reason=e['reason']) + for e in data] + + async def prune_members(self, *, days, compute_prune_count=True, reason=None): + r"""|coro| + + Prunes the guild from its inactive members. + + The inactive members are denoted if they have not logged on in + ``days`` number of days and they have no roles. + + You must have the :attr:`~Permissions.kick_members` permission + to use this. + + To check how many members you would prune without actually pruning, + see the :meth:`estimate_pruned_members` function. + + Parameters + ----------- + days: :class:`int` + The number of days before counting as inactive. + reason: Optional[:class:`str`] + The reason for doing this action. Shows up on the audit log. + compute_prune_count: :class:`bool` + Whether to compute the prune count. This defaults to ``True`` + which makes it prone to timeouts in very large guilds. In order + to prevent timeouts, you must set this to ``False``. If this is + set to ``False``\, then this function will always return ``None``. + + Raises + ------- + Forbidden + You do not have permissions to prune members. + HTTPException + An error occurred while pruning members. + InvalidArgument + An integer was not passed for ``days``. + + Returns + --------- + Optional[:class:`int`] + The number of members pruned. If ``compute_prune_count`` is ``False`` + then this returns ``None``. + """ + + if not isinstance(days, int): + raise InvalidArgument('Expected int for ``days``, received {0.__class__.__name__} instead.'.format(days)) + + data = await self._state.http.prune_members(self.id, days, compute_prune_count=compute_prune_count, reason=reason) + return data['pruned'] + + async def webhooks(self): + """|coro| + + Gets the list of webhooks from this guild. + + Requires :attr:`~.Permissions.manage_webhooks` permissions. + + Raises + ------- + Forbidden + You don't have permissions to get the webhooks. + + Returns + -------- + List[:class:`Webhook`] + The webhooks for this guild. + """ + + data = await self._state.http.guild_webhooks(self.id) + return [Webhook.from_state(d, state=self._state) for d in data] + + async def estimate_pruned_members(self, *, days): + """|coro| + + Similar to :meth:`prune_members` except instead of actually + pruning members, it returns how many members it would prune + from the guild had it been called. + + Parameters + ----------- + days: :class:`int` + The number of days before counting as inactive. + + Raises + ------- + Forbidden + You do not have permissions to prune members. + HTTPException + An error occurred while fetching the prune members estimate. + InvalidArgument + An integer was not passed for ``days``. + + Returns + --------- + :class:`int` + The number of members estimated to be pruned. + """ + + if not isinstance(days, int): + raise InvalidArgument('Expected int for ``days``, received {0.__class__.__name__} instead.'.format(days)) + + data = await self._state.http.estimate_pruned_members(self.id, days) + return data['pruned'] + + async def invites(self): + """|coro| + + Returns a list of all active instant invites from the guild. + + You must have the :attr:`~Permissions.manage_guild` permission to get + this information. + + Raises + ------- + Forbidden + You do not have proper permissions to get the information. + HTTPException + An error occurred while fetching the information. + + Returns + ------- + List[:class:`Invite`] + The list of invites that are currently active. + """ + + data = await self._state.http.invites_from(self.id) + result = [] + for invite in data: + channel = self.get_channel(int(invite['channel']['id'])) + invite['channel'] = channel + invite['guild'] = self + result.append(Invite(state=self._state, data=invite)) + + return result + + async def fetch_emojis(self): + r"""|coro| + + Retrieves all custom :class:`Emoji`\s from the guild. + + .. note:: + + This method is an API call. For general usage, consider :attr:`emojis` instead. + + Raises + --------- + HTTPException + An error occurred fetching the emojis. + + Returns + -------- + List[:class:`Emoji`] + The retrieved emojis. + """ + data = await self._state.http.get_all_custom_emojis(self.id) + return [Emoji(guild=self, state=self._state, data=d) for d in data] + + async def fetch_emoji(self, emoji_id): + """|coro| + + Retrieves a custom :class:`Emoji` from the guild. + + .. note:: + + This method is an API call. + For general usage, consider iterating over :attr:`emojis` instead. + + Parameters + ------------- + emoji_id: :class:`int` + The emoji's ID. + + Raises + --------- + NotFound + The emoji requested could not be found. + HTTPException + An error occurred fetching the emoji. + + Returns + -------- + :class:`Emoji` + The retrieved emoji. + """ + data = await self._state.http.get_custom_emoji(self.id, emoji_id) + return Emoji(guild=self, state=self._state, data=data) + + async def create_custom_emoji(self, *, name, image, roles=None, reason=None): + r"""|coro| + + Creates a custom :class:`Emoji` for the guild. + + There is currently a limit of 50 static and animated emojis respectively per guild, + unless the guild has the ``MORE_EMOJI`` feature which extends the limit to 200. + + You must have the :attr:`~Permissions.manage_emojis` permission to + do this. + + Parameters + ----------- + name: :class:`str` + The emoji name. Must be at least 2 characters. + image: :class:`bytes` + The :term:`py:bytes-like object` representing the image data to use. + Only JPG, PNG and GIF images are supported. + roles: Optional[List[:class:`Role`]] + A :class:`list` of :class:`Role`\s that can use this emoji. Leave empty to make it available to everyone. + reason: Optional[:class:`str`] + The reason for creating this emoji. Shows up on the audit log. + + Raises + ------- + Forbidden + You are not allowed to create emojis. + HTTPException + An error occurred creating an emoji. + + Returns + -------- + :class:`Emoji` + The created emoji. + """ + + img = utils._bytes_to_base64_data(image) + if roles: + roles = [role.id for role in roles] + data = await self._state.http.create_custom_emoji(self.id, name, img, roles=roles, reason=reason) + return self._state.store_emoji(self, data) + + async def create_role(self, *, reason=None, **fields): + """|coro| + + Creates a :class:`Role` for the guild. + + All fields are optional. + + You must have the :attr:`~Permissions.manage_roles` permission to + do this. + + Parameters + ----------- + name: :class:`str` + The role name. Defaults to 'new role'. + permissions: :class:`Permissions` + The permissions to have. Defaults to no permissions. + colour: :class:`Colour` + The colour for the role. Defaults to :meth:`Colour.default`. + This is aliased to ``color`` as well. + hoist: :class:`bool` + Indicates if the role should be shown separately in the member list. + Defaults to ``False``. + mentionable: :class:`bool` + Indicates if the role should be mentionable by others. + Defaults to ``False``. + reason: Optional[:class:`str`] + The reason for creating this role. Shows up on the audit log. + + Raises + ------- + Forbidden + You do not have permissions to create the role. + HTTPException + Creating the role failed. + InvalidArgument + An invalid keyword argument was given. + + Returns + -------- + :class:`Role` + The newly created role. + """ + + try: + perms = fields.pop('permissions') + except KeyError: + fields['permissions'] = 0 + else: + fields['permissions'] = perms.value + + try: + colour = fields.pop('colour') + except KeyError: + colour = fields.get('color', Colour.default()) + finally: + fields['color'] = colour.value + + valid_keys = ('name', 'permissions', 'color', 'hoist', 'mentionable') + for key in fields: + if key not in valid_keys: + raise InvalidArgument('%r is not a valid field.' % key) + + data = await self._state.http.create_role(self.id, reason=reason, **fields) + role = Role(guild=self, data=data, state=self._state) + + # TODO: add to cache + return role + + async def kick(self, user, *, reason=None): + """|coro| + + Kicks a user from the guild. + + The user must meet the :class:`abc.Snowflake` abc. + + You must have the :attr:`~Permissions.kick_members` permission to + do this. + + Parameters + ----------- + user: :class:`abc.Snowflake` + The user to kick from their guild. + reason: Optional[:class:`str`] + The reason the user got kicked. + + Raises + ------- + Forbidden + You do not have the proper permissions to kick. + HTTPException + Kicking failed. + """ + await self._state.http.kick(user.id, self.id, reason=reason) + + async def ban(self, user, *, reason=None, delete_message_days=1): + """|coro| + + Bans a user from the guild. + + The user must meet the :class:`abc.Snowflake` abc. + + You must have the :attr:`~Permissions.ban_members` permission to + do this. + + Parameters + ----------- + user: :class:`abc.Snowflake` + The user to ban from their guild. + delete_message_days: :class:`int` + The number of days worth of messages to delete from the user + in the guild. The minimum is 0 and the maximum is 7. + reason: Optional[:class:`str`] + The reason the user got banned. + + Raises + ------- + Forbidden + You do not have the proper permissions to ban. + HTTPException + Banning failed. + """ + await self._state.http.ban(user.id, self.id, delete_message_days, reason=reason) + + async def unban(self, user, *, reason=None): + """|coro| + + Unbans a user from the guild. + + The user must meet the :class:`abc.Snowflake` abc. + + You must have the :attr:`~Permissions.ban_members` permission to + do this. + + Parameters + ----------- + user: :class:`abc.Snowflake` + The user to unban. + reason: Optional[:class:`str`] + The reason for doing this action. Shows up on the audit log. + + Raises + ------- + Forbidden + You do not have the proper permissions to unban. + HTTPException + Unbanning failed. + """ + await self._state.http.unban(user.id, self.id, reason=reason) + + async def vanity_invite(self): + """|coro| + + Returns the guild's special vanity invite. + + The guild must be partnered, i.e. have 'VANITY_URL' in + :attr:`~Guild.features`. + + You must have the :attr:`~Permissions.manage_guild` permission to use + this as well. + + Raises + ------- + Forbidden + You do not have the proper permissions to get this. + HTTPException + Retrieving the vanity invite failed. + + Returns + -------- + :class:`Invite` + The special vanity invite. + """ + + # we start with { code: abc } + payload = await self._state.http.get_vanity_code(self.id) + + # get the vanity URL channel since default channels aren't + # reliable or a thing anymore + data = await self._state.http.get_invite(payload['code']) + + payload['guild'] = self + payload['channel'] = self.get_channel(int(data['channel']['id'])) + payload['revoked'] = False + payload['temporary'] = False + payload['max_uses'] = 0 + payload['max_age'] = 0 + return Invite(state=self._state, data=payload) + + def ack(self): + """|coro| + + Marks every message in this guild as read. + + The user must not be a bot user. + + Raises + ------- + HTTPException + Acking failed. + ClientException + You must not be a bot user. + """ + + state = self._state + if state.is_bot: + raise ClientException('Must not be a bot account to ack messages.') + return state.http.ack_guild(self.id) + + def audit_logs(self, *, limit=100, before=None, after=None, oldest_first=None, user=None, action=None): + """Returns an :class:`AsyncIterator` that enables receiving the guild's audit logs. + + You must have the :attr:`~Permissions.view_audit_log` permission to use this. + + Examples + ---------- + + Getting the first 100 entries: :: + + async for entry in guild.audit_logs(limit=100): + print('{0.user} did {0.action} to {0.target}'.format(entry)) + + Getting entries for a specific action: :: + + async for entry in guild.audit_logs(action=discord.AuditLogAction.ban): + print('{0.user} banned {0.target}'.format(entry)) + + Getting entries made by a specific user: :: + + entries = await guild.audit_logs(limit=None, user=guild.me).flatten() + await channel.send('I made {} moderation actions.'.format(len(entries))) + + Parameters + ----------- + limit: Optional[:class:`int`] + The number of entries to retrieve. If ``None`` retrieve all entries. + before: Union[:class:`abc.Snowflake`, :class:`datetime.datetime`] + Retrieve entries before this date or entry. + If a date is provided it must be a timezone-naive datetime representing UTC time. + after: Union[:class:`abc.Snowflake`, :class:`datetime.datetime`] + Retrieve entries after this date or entry. + If a date is provided it must be a timezone-naive datetime representing UTC time. + oldest_first: :class:`bool` + If set to ``True``, return entries in oldest->newest order. Defaults to True if + ``after`` is specified, otherwise ``False``. + user: :class:`abc.Snowflake` + The moderator to filter entries from. + action: :class:`AuditLogAction` + The action to filter with. + + Raises + ------- + Forbidden + You are not allowed to fetch audit logs + HTTPException + An error occurred while fetching the audit logs. + + Yields + -------- + :class:`AuditLogEntry` + The audit log entry. + """ + if user: + user = user.id + + if action: + action = action.value + + return AuditLogIterator(self, before=before, after=after, limit=limit, + oldest_first=oldest_first, user_id=user, action_type=action) + + async def widget(self): + """|coro| + + Returns the widget of the guild. + + .. note:: + + The guild must have the widget enabled to get this information. + + Raises + ------- + Forbidden + The widget for this guild is disabled. + HTTPException + Retrieving the widget failed. + + Returns + -------- + :class:`Widget` + The guild's widget. + """ + data = await self._state.http.get_widget(self.id) + + return Widget(state=self._state, data=data) diff --git a/venv/lib/python3.7/site-packages/discord/http.py b/venv/lib/python3.7/site-packages/discord/http.py new file mode 100644 index 0000000..16b6571 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/http.py @@ -0,0 +1,834 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import asyncio +import json +import logging +import sys +from urllib.parse import quote as _uriquote +import weakref + +import aiohttp + +from .errors import HTTPException, Forbidden, NotFound, LoginFailure, GatewayNotFound +from . import __version__, utils + +log = logging.getLogger(__name__) + +async def json_or_text(response): + text = await response.text(encoding='utf-8') + if response.headers['content-type'] == 'application/json': + return json.loads(text) + return text + +class Route: + BASE = 'https://discordapp.com/api/v7' + + def __init__(self, method, path, **parameters): + self.path = path + self.method = method + url = (self.BASE + self.path) + if parameters: + self.url = url.format(**{k: _uriquote(v) if isinstance(v, str) else v for k, v in parameters.items()}) + else: + self.url = url + + # major parameters: + self.channel_id = parameters.get('channel_id') + self.guild_id = parameters.get('guild_id') + + @property + def bucket(self): + # the bucket is just method + path w/ major parameters + return '{0.method}:{0.channel_id}:{0.guild_id}:{0.path}'.format(self) + +class MaybeUnlock: + def __init__(self, lock): + self.lock = lock + self._unlock = True + + def __enter__(self): + return self + + def defer(self): + self._unlock = False + + def __exit__(self, type, value, traceback): + if self._unlock: + self.lock.release() + +class HTTPClient: + """Represents an HTTP client sending HTTP requests to the Discord API.""" + + SUCCESS_LOG = '{method} {url} has received {text}' + REQUEST_LOG = '{method} {url} with {json} has returned {status}' + + def __init__(self, connector=None, *, proxy=None, proxy_auth=None, loop=None): + self.loop = asyncio.get_event_loop() if loop is None else loop + self.connector = connector + self.__session = None # filled in static_login + self._locks = weakref.WeakValueDictionary() + self._global_over = asyncio.Event(loop=self.loop) + self._global_over.set() + self.token = None + self.bot_token = False + self.proxy = proxy + self.proxy_auth = proxy_auth + + user_agent = 'DiscordBot (https://github.com/Rapptz/discord.py {0}) Python/{1[0]}.{1[1]} aiohttp/{2}' + self.user_agent = user_agent.format(__version__, sys.version_info, aiohttp.__version__) + + def recreate(self): + if self.__session.closed: + self.__session = aiohttp.ClientSession(connector=self.connector, loop=self.loop) + + async def request(self, route, *, files=None, header_bypass_delay=None, **kwargs): + bucket = route.bucket + method = route.method + url = route.url + + lock = self._locks.get(bucket) + if lock is None: + lock = asyncio.Lock(loop=self.loop) + if bucket is not None: + self._locks[bucket] = lock + + # header creation + headers = { + 'User-Agent': self.user_agent, + } + + if self.token is not None: + headers['Authorization'] = 'Bot ' + self.token if self.bot_token else self.token + # some checking if it's a JSON request + if 'json' in kwargs: + headers['Content-Type'] = 'application/json' + kwargs['data'] = utils.to_json(kwargs.pop('json')) + + try: + reason = kwargs.pop('reason') + except KeyError: + pass + else: + if reason: + headers['X-Audit-Log-Reason'] = _uriquote(reason, safe='/ ') + + kwargs['headers'] = headers + + # Proxy support + if self.proxy is not None: + kwargs['proxy'] = self.proxy + if self.proxy_auth is not None: + kwargs['proxy_auth'] = self.proxy_auth + + if not self._global_over.is_set(): + # wait until the global lock is complete + await self._global_over.wait() + + await lock.acquire() + with MaybeUnlock(lock) as maybe_lock: + for tries in range(5): + if files: + for f in files: + f.reset(seek=tries) + + async with self.__session.request(method, url, **kwargs) as r: + log.debug('%s %s with %s has returned %s', method, url, kwargs.get('data'), r.status) + + # even errors have text involved in them so this is safe to call + data = await json_or_text(r) + + # check if we have rate limit header information + remaining = r.headers.get('X-Ratelimit-Remaining') + if remaining == '0' and r.status != 429: + # we've depleted our current bucket + if header_bypass_delay is None: + delta = utils._parse_ratelimit_header(r) + else: + delta = header_bypass_delay + + log.debug('A rate limit bucket has been exhausted (bucket: %s, retry: %s).', bucket, delta) + maybe_lock.defer() + self.loop.call_later(delta, lock.release) + + # the request was successful so just return the text/json + if 300 > r.status >= 200: + log.debug('%s %s has received %s', method, url, data) + return data + + # we are being rate limited + if r.status == 429: + if not isinstance(data, dict): + # Banned by Cloudflare more than likely. + raise HTTPException(r, data) + + fmt = 'We are being rate limited. Retrying in %.2f seconds. Handled under the bucket "%s"' + + # sleep a bit + retry_after = data['retry_after'] / 1000.0 + log.warning(fmt, retry_after, bucket) + + # check if it's a global rate limit + is_global = data.get('global', False) + if is_global: + log.warning('Global rate limit has been hit. Retrying in %.2f seconds.', retry_after) + self._global_over.clear() + + await asyncio.sleep(retry_after, loop=self.loop) + log.debug('Done sleeping for the rate limit. Retrying...') + + # release the global lock now that the + # global rate limit has passed + if is_global: + self._global_over.set() + log.debug('Global rate limit is now over.') + + continue + + # we've received a 500 or 502, unconditional retry + if r.status in {500, 502}: + await asyncio.sleep(1 + tries * 2, loop=self.loop) + continue + + # the usual error cases + if r.status == 403: + raise Forbidden(r, data) + elif r.status == 404: + raise NotFound(r, data) + else: + raise HTTPException(r, data) + + # We've run out of retries, raise. + raise HTTPException(r, data) + + async def get_from_cdn(self, url): + async with self.__session.get(url) as resp: + if resp.status == 200: + return await resp.read() + elif resp.status == 404: + raise NotFound(resp, 'asset not found') + elif resp.status == 403: + raise Forbidden(resp, 'cannot retrieve asset') + else: + raise HTTPException(resp, 'failed to get asset') + + # state management + + async def close(self): + if self.__session: + await self.__session.close() + + def _token(self, token, *, bot=True): + self.token = token + self.bot_token = bot + self._ack_token = None + + # login management + + async def static_login(self, token, *, bot): + # Necessary to get aiohttp to stop complaining about session creation + self.__session = aiohttp.ClientSession(connector=self.connector, loop=self.loop) + old_token, old_bot = self.token, self.bot_token + self._token(token, bot=bot) + + try: + data = await self.request(Route('GET', '/users/@me')) + except HTTPException as exc: + self._token(old_token, bot=old_bot) + if exc.response.status == 401: + raise LoginFailure('Improper token has been passed.') from exc + raise + + return data + + def logout(self): + return self.request(Route('POST', '/auth/logout')) + + # Group functionality + + def start_group(self, user_id, recipients): + payload = { + 'recipients': recipients + } + + return self.request(Route('POST', '/users/{user_id}/channels', user_id=user_id), json=payload) + + def leave_group(self, channel_id): + return self.request(Route('DELETE', '/channels/{channel_id}', channel_id=channel_id)) + + def add_group_recipient(self, channel_id, user_id): + r = Route('PUT', '/channels/{channel_id}/recipients/{user_id}', channel_id=channel_id, user_id=user_id) + return self.request(r) + + def remove_group_recipient(self, channel_id, user_id): + r = Route('DELETE', '/channels/{channel_id}/recipients/{user_id}', channel_id=channel_id, user_id=user_id) + return self.request(r) + + def edit_group(self, channel_id, **options): + valid_keys = ('name', 'icon') + payload = { + k: v for k, v in options.items() if k in valid_keys + } + + return self.request(Route('PATCH', '/channels/{channel_id}', channel_id=channel_id), json=payload) + + def convert_group(self, channel_id): + return self.request(Route('POST', '/channels/{channel_id}/convert', channel_id=channel_id)) + + # Message management + + def start_private_message(self, user_id): + payload = { + 'recipient_id': user_id + } + + return self.request(Route('POST', '/users/@me/channels'), json=payload) + + def send_message(self, channel_id, content, *, tts=False, embed=None, nonce=None): + r = Route('POST', '/channels/{channel_id}/messages', channel_id=channel_id) + payload = {} + + if content: + payload['content'] = content + + if tts: + payload['tts'] = True + + if embed: + payload['embed'] = embed + + if nonce: + payload['nonce'] = nonce + + return self.request(r, json=payload) + + def send_typing(self, channel_id): + return self.request(Route('POST', '/channels/{channel_id}/typing', channel_id=channel_id)) + + def send_files(self, channel_id, *, files, content=None, tts=False, embed=None, nonce=None): + r = Route('POST', '/channels/{channel_id}/messages', channel_id=channel_id) + form = aiohttp.FormData() + + payload = {'tts': tts} + if content: + payload['content'] = content + if embed: + payload['embed'] = embed + if nonce: + payload['nonce'] = nonce + + form.add_field('payload_json', utils.to_json(payload)) + if len(files) == 1: + file = files[0] + form.add_field('file', file.fp, filename=file.filename, content_type='application/octet-stream') + else: + for index, file in enumerate(files): + form.add_field('file%s' % index, file.fp, filename=file.filename, content_type='application/octet-stream') + + return self.request(r, data=form, files=files) + + async def ack_message(self, channel_id, message_id): + r = Route('POST', '/channels/{channel_id}/messages/{message_id}/ack', channel_id=channel_id, message_id=message_id) + data = await self.request(r, json={'token': self._ack_token}) + self._ack_token = data['token'] + + def ack_guild(self, guild_id): + return self.request(Route('POST', '/guilds/{guild_id}/ack', guild_id=guild_id)) + + def delete_message(self, channel_id, message_id, *, reason=None): + r = Route('DELETE', '/channels/{channel_id}/messages/{message_id}', channel_id=channel_id, message_id=message_id) + return self.request(r, reason=reason) + + def delete_messages(self, channel_id, message_ids, *, reason=None): + r = Route('POST', '/channels/{channel_id}/messages/bulk_delete', channel_id=channel_id) + payload = { + 'messages': message_ids + } + + return self.request(r, json=payload, reason=reason) + + def edit_message(self, channel_id, message_id, **fields): + r = Route('PATCH', '/channels/{channel_id}/messages/{message_id}', channel_id=channel_id, message_id=message_id) + return self.request(r, json=fields) + + def add_reaction(self, channel_id, message_id, emoji): + r = Route('PUT', '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/@me', + channel_id=channel_id, message_id=message_id, emoji=emoji) + return self.request(r, header_bypass_delay=0.25) + + def remove_reaction(self, channel_id, message_id, emoji, member_id): + r = Route('DELETE', '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/{member_id}', + channel_id=channel_id, message_id=message_id, member_id=member_id, emoji=emoji) + return self.request(r, header_bypass_delay=0.25) + + def remove_own_reaction(self, channel_id, message_id, emoji): + r = Route('DELETE', '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/@me', + channel_id=channel_id, message_id=message_id, emoji=emoji) + return self.request(r, header_bypass_delay=0.25) + + def get_reaction_users(self, channel_id, message_id, emoji, limit, after=None): + r = Route('GET', '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}', + channel_id=channel_id, message_id=message_id, emoji=emoji) + + params = {'limit': limit} + if after: + params['after'] = after + return self.request(r, params=params) + + def clear_reactions(self, channel_id, message_id): + r = Route('DELETE', '/channels/{channel_id}/messages/{message_id}/reactions', + channel_id=channel_id, message_id=message_id) + + return self.request(r) + + def get_message(self, channel_id, message_id): + r = Route('GET', '/channels/{channel_id}/messages/{message_id}', channel_id=channel_id, message_id=message_id) + return self.request(r) + + def get_channel(self, channel_id): + r = Route('GET', '/channels/{channel_id}', channel_id=channel_id) + return self.request(r) + + def logs_from(self, channel_id, limit, before=None, after=None, around=None): + params = { + 'limit': limit + } + + if before is not None: + params['before'] = before + if after is not None: + params['after'] = after + if around is not None: + params['around'] = around + + return self.request(Route('GET', '/channels/{channel_id}/messages', channel_id=channel_id), params=params) + + def pin_message(self, channel_id, message_id): + return self.request(Route('PUT', '/channels/{channel_id}/pins/{message_id}', + channel_id=channel_id, message_id=message_id)) + + def unpin_message(self, channel_id, message_id): + return self.request(Route('DELETE', '/channels/{channel_id}/pins/{message_id}', + channel_id=channel_id, message_id=message_id)) + + def pins_from(self, channel_id): + return self.request(Route('GET', '/channels/{channel_id}/pins', channel_id=channel_id)) + + # Member management + + def kick(self, user_id, guild_id, reason=None): + r = Route('DELETE', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id) + if reason: + # thanks aiohttp + r.url = '{0.url}?reason={1}'.format(r, _uriquote(reason)) + + return self.request(r) + + def ban(self, user_id, guild_id, delete_message_days=1, reason=None): + r = Route('PUT', '/guilds/{guild_id}/bans/{user_id}', guild_id=guild_id, user_id=user_id) + params = { + 'delete-message-days': delete_message_days, + } + + if reason: + # thanks aiohttp + r.url = '{0.url}?reason={1}'.format(r, _uriquote(reason)) + + return self.request(r, params=params) + + def unban(self, user_id, guild_id, *, reason=None): + r = Route('DELETE', '/guilds/{guild_id}/bans/{user_id}', guild_id=guild_id, user_id=user_id) + return self.request(r, reason=reason) + + def guild_voice_state(self, user_id, guild_id, *, mute=None, deafen=None, reason=None): + r = Route('PATCH', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id) + payload = {} + if mute is not None: + payload['mute'] = mute + + if deafen is not None: + payload['deaf'] = deafen + + return self.request(r, json=payload, reason=reason) + + def edit_profile(self, password, username, avatar, **fields): + payload = { + 'password': password, + 'username': username, + 'avatar': avatar + } + + if 'email' in fields: + payload['email'] = fields['email'] + + if 'new_password' in fields: + payload['new_password'] = fields['new_password'] + + return self.request(Route('PATCH', '/users/@me'), json=payload) + + def change_my_nickname(self, guild_id, nickname, *, reason=None): + r = Route('PATCH', '/guilds/{guild_id}/members/@me/nick', guild_id=guild_id) + payload = { + 'nick': nickname + } + return self.request(r, json=payload, reason=reason) + + def change_nickname(self, guild_id, user_id, nickname, *, reason=None): + r = Route('PATCH', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id) + payload = { + 'nick': nickname + } + return self.request(r, json=payload, reason=reason) + + def edit_member(self, guild_id, user_id, *, reason=None, **fields): + r = Route('PATCH', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id) + return self.request(r, json=fields, reason=reason) + + # Channel management + + def edit_channel(self, channel_id, *, reason=None, **options): + r = Route('PATCH', '/channels/{channel_id}', channel_id=channel_id) + valid_keys = ('name', 'parent_id', 'topic', 'bitrate', 'nsfw', + 'user_limit', 'position', 'permission_overwrites', 'rate_limit_per_user') + payload = { + k: v for k, v in options.items() if k in valid_keys + } + + return self.request(r, reason=reason, json=payload) + + def bulk_channel_update(self, guild_id, data, *, reason=None): + r = Route('PATCH', '/guilds/{guild_id}/channels', guild_id=guild_id) + return self.request(r, json=data, reason=reason) + + def create_channel(self, guild_id, channel_type, *, reason=None, **options): + payload = { + 'type': channel_type + } + + valid_keys = ('name', 'parent_id', 'topic', 'bitrate', 'nsfw', + 'user_limit', 'position', 'permission_overwrites', 'rate_limit_per_user') + payload.update({ + k: v for k, v in options.items() if k in valid_keys and v is not None + }) + + return self.request(Route('POST', '/guilds/{guild_id}/channels', guild_id=guild_id), json=payload, reason=reason) + + def delete_channel(self, channel_id, *, reason=None): + return self.request(Route('DELETE', '/channels/{channel_id}', channel_id=channel_id), reason=reason) + + # Webhook management + + def create_webhook(self, channel_id, *, name, avatar=None, reason=None): + payload = { + 'name': name + } + if avatar is not None: + payload['avatar'] = avatar + + r = Route('POST', '/channels/{channel_id}/webhooks', channel_id=channel_id) + return self.request(r, json=payload, reason=reason) + + def channel_webhooks(self, channel_id): + return self.request(Route('GET', '/channels/{channel_id}/webhooks', channel_id=channel_id)) + + def guild_webhooks(self, guild_id): + return self.request(Route('GET', '/guilds/{guild_id}/webhooks', guild_id=guild_id)) + + def get_webhook(self, webhook_id): + return self.request(Route('GET', '/webhooks/{webhook_id}', webhook_id=webhook_id)) + + # Guild management + + def get_guilds(self, limit, before=None, after=None): + params = { + 'limit': limit + } + + if before: + params['before'] = before + if after: + params['after'] = after + + return self.request(Route('GET', '/users/@me/guilds'), params=params) + + def leave_guild(self, guild_id): + return self.request(Route('DELETE', '/users/@me/guilds/{guild_id}', guild_id=guild_id)) + + def get_guild(self, guild_id): + return self.request(Route('GET', '/guilds/{guild_id}', guild_id=guild_id)) + + def delete_guild(self, guild_id): + return self.request(Route('DELETE', '/guilds/{guild_id}', guild_id=guild_id)) + + def create_guild(self, name, region, icon): + payload = { + 'name': name, + 'icon': icon, + 'region': region + } + + return self.request(Route('POST', '/guilds'), json=payload) + + def edit_guild(self, guild_id, *, reason=None, **fields): + valid_keys = ('name', 'region', 'icon', 'afk_timeout', 'owner_id', + 'afk_channel_id', 'splash', 'verification_level', + 'system_channel_id', 'default_message_notifications', + 'description', 'explicit_content_filter', 'banner', + 'system_channel_flags') + + payload = { + k: v for k, v in fields.items() if k in valid_keys + } + + return self.request(Route('PATCH', '/guilds/{guild_id}', guild_id=guild_id), json=payload, reason=reason) + + def get_bans(self, guild_id): + return self.request(Route('GET', '/guilds/{guild_id}/bans', guild_id=guild_id)) + + def get_ban(self, user_id, guild_id): + return self.request(Route('GET', '/guilds/{guild_id}/bans/{user_id}', guild_id=guild_id, user_id=user_id)) + + def get_vanity_code(self, guild_id): + return self.request(Route('GET', '/guilds/{guild_id}/vanity-url', guild_id=guild_id)) + + def change_vanity_code(self, guild_id, code, *, reason=None): + payload = {'code': code} + return self.request(Route('PATCH', '/guilds/{guild_id}/vanity-url', guild_id=guild_id), json=payload, reason=reason) + + def get_all_guild_channels(self, guild_id): + return self.request(Route('GET', '/guilds/{guild_id}/channels', guild_id=guild_id)) + + def get_member(self, guild_id, member_id): + return self.request(Route('GET', '/guilds/{guild_id}/members/{member_id}', guild_id=guild_id, member_id=member_id)) + + def prune_members(self, guild_id, days, compute_prune_count, *, reason=None): + params = { + 'days': days, + 'compute_prune_count': 'true' if compute_prune_count else 'false' + } + return self.request(Route('POST', '/guilds/{guild_id}/prune', guild_id=guild_id), params=params, reason=reason) + + def estimate_pruned_members(self, guild_id, days): + params = { + 'days': days + } + return self.request(Route('GET', '/guilds/{guild_id}/prune', guild_id=guild_id), params=params) + + def get_all_custom_emojis(self, guild_id): + return self.request(Route('GET', '/guilds/{guild_id}/emojis', guild_id=guild_id)) + + def get_custom_emoji(self, guild_id, emoji_id): + return self.request(Route('GET', '/guilds/{guild_id}/emojis/{emoji_id}', guild_id=guild_id, emoji_id=emoji_id)) + + def create_custom_emoji(self, guild_id, name, image, *, roles=None, reason=None): + payload = { + 'name': name, + 'image': image, + 'roles': roles or [] + } + + r = Route('POST', '/guilds/{guild_id}/emojis', guild_id=guild_id) + return self.request(r, json=payload, reason=reason) + + def delete_custom_emoji(self, guild_id, emoji_id, *, reason=None): + r = Route('DELETE', '/guilds/{guild_id}/emojis/{emoji_id}', guild_id=guild_id, emoji_id=emoji_id) + return self.request(r, reason=reason) + + def edit_custom_emoji(self, guild_id, emoji_id, *, name, roles=None, reason=None): + payload = { + 'name': name, + 'roles': roles or [] + } + r = Route('PATCH', '/guilds/{guild_id}/emojis/{emoji_id}', guild_id=guild_id, emoji_id=emoji_id) + return self.request(r, json=payload, reason=reason) + + def get_audit_logs(self, guild_id, limit=100, before=None, after=None, user_id=None, action_type=None): + params = {'limit': limit} + if before: + params['before'] = before + if after: + params['after'] = after + if user_id: + params['user_id'] = user_id + if action_type: + params['action_type'] = action_type + + r = Route('GET', '/guilds/{guild_id}/audit-logs', guild_id=guild_id) + return self.request(r, params=params) + + def get_widget(self, guild_id): + return self.request(Route('GET', '/guilds/{guild_id}/widget.json', guild_id=guild_id)) + + # Invite management + + def create_invite(self, channel_id, *, reason=None, **options): + r = Route('POST', '/channels/{channel_id}/invites', channel_id=channel_id) + payload = { + 'max_age': options.get('max_age', 0), + 'max_uses': options.get('max_uses', 0), + 'temporary': options.get('temporary', False), + 'unique': options.get('unique', True) + } + + return self.request(r, reason=reason, json=payload) + + def get_invite(self, invite_id, *, with_counts=True): + params = { + 'with_counts': int(with_counts) + } + return self.request(Route('GET', '/invite/{invite_id}', invite_id=invite_id), params=params) + + def invites_from(self, guild_id): + return self.request(Route('GET', '/guilds/{guild_id}/invites', guild_id=guild_id)) + + def invites_from_channel(self, channel_id): + return self.request(Route('GET', '/channels/{channel_id}/invites', channel_id=channel_id)) + + def delete_invite(self, invite_id, *, reason=None): + return self.request(Route('DELETE', '/invite/{invite_id}', invite_id=invite_id), reason=reason) + + # Role management + + def edit_role(self, guild_id, role_id, *, reason=None, **fields): + r = Route('PATCH', '/guilds/{guild_id}/roles/{role_id}', guild_id=guild_id, role_id=role_id) + valid_keys = ('name', 'permissions', 'color', 'hoist', 'mentionable') + payload = { + k: v for k, v in fields.items() if k in valid_keys + } + return self.request(r, json=payload, reason=reason) + + def delete_role(self, guild_id, role_id, *, reason=None): + r = Route('DELETE', '/guilds/{guild_id}/roles/{role_id}', guild_id=guild_id, role_id=role_id) + return self.request(r, reason=reason) + + def replace_roles(self, user_id, guild_id, role_ids, *, reason=None): + return self.edit_member(guild_id=guild_id, user_id=user_id, roles=role_ids, reason=reason) + + def create_role(self, guild_id, *, reason=None, **fields): + r = Route('POST', '/guilds/{guild_id}/roles', guild_id=guild_id) + return self.request(r, json=fields, reason=reason) + + def move_role_position(self, guild_id, positions, *, reason=None): + r = Route('PATCH', '/guilds/{guild_id}/roles', guild_id=guild_id) + return self.request(r, json=positions, reason=reason) + + def add_role(self, guild_id, user_id, role_id, *, reason=None): + r = Route('PUT', '/guilds/{guild_id}/members/{user_id}/roles/{role_id}', + guild_id=guild_id, user_id=user_id, role_id=role_id) + return self.request(r, reason=reason) + + def remove_role(self, guild_id, user_id, role_id, *, reason=None): + r = Route('DELETE', '/guilds/{guild_id}/members/{user_id}/roles/{role_id}', + guild_id=guild_id, user_id=user_id, role_id=role_id) + return self.request(r, reason=reason) + + def edit_channel_permissions(self, channel_id, target, allow, deny, type, *, reason=None): + payload = { + 'id': target, + 'allow': allow, + 'deny': deny, + 'type': type + } + r = Route('PUT', '/channels/{channel_id}/permissions/{target}', channel_id=channel_id, target=target) + return self.request(r, json=payload, reason=reason) + + def delete_channel_permissions(self, channel_id, target, *, reason=None): + r = Route('DELETE', '/channels/{channel_id}/permissions/{target}', channel_id=channel_id, target=target) + return self.request(r, reason=reason) + + # Voice management + + def move_member(self, user_id, guild_id, channel_id, *, reason=None): + return self.edit_member(guild_id=guild_id, user_id=user_id, channel_id=channel_id, reason=reason) + + # Relationship related + + def remove_relationship(self, user_id): + r = Route('DELETE', '/users/@me/relationships/{user_id}', user_id=user_id) + return self.request(r) + + def add_relationship(self, user_id, type=None): + r = Route('PUT', '/users/@me/relationships/{user_id}', user_id=user_id) + payload = {} + if type is not None: + payload['type'] = type + + return self.request(r, json=payload) + + def send_friend_request(self, username, discriminator): + r = Route('POST', '/users/@me/relationships') + payload = { + 'username': username, + 'discriminator': int(discriminator) + } + return self.request(r, json=payload) + + # Misc + + def application_info(self): + return self.request(Route('GET', '/oauth2/applications/@me')) + + async def get_gateway(self, *, encoding='json', v=6, zlib=True): + try: + data = await self.request(Route('GET', '/gateway')) + except HTTPException as exc: + raise GatewayNotFound() from exc + if zlib: + value = '{0}?encoding={1}&v={2}&compress=zlib-stream' + else: + value = '{0}?encoding={1}&v={2}' + return value.format(data['url'], encoding, v) + + async def get_bot_gateway(self, *, encoding='json', v=6, zlib=True): + try: + data = await self.request(Route('GET', '/gateway/bot')) + except HTTPException as exc: + raise GatewayNotFound() from exc + + if zlib: + value = '{0}?encoding={1}&v={2}&compress=zlib-stream' + else: + value = '{0}?encoding={1}&v={2}' + return data['shards'], value.format(data['url'], encoding, v) + + def get_user(self, user_id): + return self.request(Route('GET', '/users/{user_id}', user_id=user_id)) + + def get_user_profile(self, user_id): + return self.request(Route('GET', '/users/{user_id}/profile', user_id=user_id)) + + def get_mutual_friends(self, user_id): + return self.request(Route('GET', '/users/{user_id}/relationships', user_id=user_id)) + + def change_hypesquad_house(self, house_id): + payload = {'house_id': house_id} + return self.request(Route('POST', '/hypesquad/online'), json=payload) + + def leave_hypesquad_house(self): + return self.request(Route('DELETE', '/hypesquad/online')) + + def edit_settings(self, **payload): + return self.request(Route('PATCH', '/users/@me/settings'), json=payload) diff --git a/venv/lib/python3.7/site-packages/discord/invite.py b/venv/lib/python3.7/site-packages/discord/invite.py new file mode 100644 index 0000000..d91d06e --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/invite.py @@ -0,0 +1,336 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from .asset import Asset +from .utils import parse_time, snowflake_time +from .mixins import Hashable +from .enums import ChannelType, VerificationLevel, try_enum +from collections import namedtuple + +class PartialInviteChannel(namedtuple('PartialInviteChannel', 'id name type')): + """Represents a "partial" invite channel. + + This model will be given when the user is not part of the + guild the :class:`Invite` resolves to. + + .. container:: operations + + .. describe:: x == y + + Checks if two partial channels are the same. + + .. describe:: x != y + + Checks if two partial channels are not the same. + + .. describe:: hash(x) + + Return the partial channel's hash. + + .. describe:: str(x) + + Returns the partial channel's name. + + Attributes + ----------- + name: :class:`str` + The partial channel's name. + id: :class:`int` + The partial channel's ID. + type: :class:`ChannelType` + The partial channel's type. + """ + + __slots__ = () + + def __str__(self): + return self.name + + @property + def mention(self): + """:class:`str`: The string that allows you to mention the channel.""" + return '<#%s>' % self.id + + @property + def created_at(self): + """:class:`datetime.datetime`: Returns the channel's creation time in UTC.""" + return snowflake_time(self.id) + +class PartialInviteGuild: + """Represents a "partial" invite guild. + + This model will be given when the user is not part of the + guild the :class:`Invite` resolves to. + + .. container:: operations + + .. describe:: x == y + + Checks if two partial guilds are the same. + + .. describe:: x != y + + Checks if two partial guilds are not the same. + + .. describe:: hash(x) + + Return the partial guild's hash. + + .. describe:: str(x) + + Returns the partial guild's name. + + Attributes + ----------- + name: :class:`str` + The partial guild's name. + id: :class:`int` + The partial guild's ID. + verification_level: :class:`VerificationLevel` + The partial guild's verification level. + features: List[:class:`str`] + A list of features the guild has. See :attr:`Guild.features` for more information. + icon: Optional[:class:`str`] + The partial guild's icon. + banner: Optional[:class:`str`] + The partial guild's banner. + splash: Optional[:class:`str`] + The partial guild's invite splash. + description: Optional[:class:`str`] + The partial guild's description. + """ + + __slots__ = ('_state', 'features', 'icon', 'banner', 'id', 'name', 'splash', + 'verification_level', 'description') + + def __init__(self, state, data, id): + self._state = state + self.id = id + self.name = data['name'] + self.features = data.get('features', []) + self.icon = data.get('icon') + self.banner = data.get('banner') + self.splash = data.get('splash') + self.verification_level = try_enum(VerificationLevel, data.get('verification_level')) + self.description = data.get('description') + + def __str__(self): + return self.name + + def __repr__(self): + return '<{0.__class__.__name__} id={0.id} name={0.name!r} features={0.features} ' \ + 'description={0.description!r}>'.format(self) + + @property + def created_at(self): + """:class:`datetime.datetime`: Returns the guild's creation time in UTC.""" + return snowflake_time(self.id) + + @property + def icon_url(self): + """:class:`Asset`: Returns the guild's icon asset.""" + return self.icon_url_as() + + def icon_url_as(self, *, format='webp', size=1024): + """The same operation as :meth:`Guild.icon_url_as`.""" + return Asset._from_guild_image(self._state, self.id, self.icon, 'icons', format=format, size=size) + + @property + def banner_url(self): + """:class:`Asset`: Returns the guild's banner asset.""" + return self.banner_url_as() + + def banner_url_as(self, *, format='webp', size=2048): + """The same operation as :meth:`Guild.banner_url_as`.""" + return Asset._from_guild_image(self._state, self.id, self.banner, 'banners', format=format, size=size) + + @property + def splash_url(self): + """:class:`Asset`: Returns the guild's invite splash asset.""" + return self.splash_url_as() + + def splash_url_as(self, *, format='webp', size=2048): + """The same operation as :meth:`Guild.splash_url_as`.""" + return Asset._from_guild_image(self._state, self.id, self.splash, 'splashes', format=format, size=size) + +class Invite(Hashable): + r"""Represents a Discord :class:`Guild` or :class:`abc.GuildChannel` invite. + + Depending on the way this object was created, some of the attributes can + have a value of ``None``. + + .. container:: operations + + .. describe:: x == y + + Checks if two invites are equal. + + .. describe:: x != y + + Checks if two invites are not equal. + + .. describe:: hash(x) + + Returns the invite hash. + + .. describe:: str(x) + + Returns the invite URL. + + The following table illustrates what methods will obtain the attributes: + + +------------------------------------+----------------------------------------------------------+ + | Attribute | Method | + +====================================+==========================================================+ + | :attr:`max_age` | :meth:`abc.GuildChannel.invites`\, :meth:`Guild.invites` | + +------------------------------------+----------------------------------------------------------+ + | :attr:`max_uses` | :meth:`abc.GuildChannel.invites`\, :meth:`Guild.invites` | + +------------------------------------+----------------------------------------------------------+ + | :attr:`created_at` | :meth:`abc.GuildChannel.invites`\, :meth:`Guild.invites` | + +------------------------------------+----------------------------------------------------------+ + | :attr:`temporary` | :meth:`abc.GuildChannel.invites`\, :meth:`Guild.invites` | + +------------------------------------+----------------------------------------------------------+ + | :attr:`uses` | :meth:`abc.GuildChannel.invites`\, :meth:`Guild.invites` | + +------------------------------------+----------------------------------------------------------+ + | :attr:`approximate_member_count` | :meth:`Client.fetch_invite` | + +------------------------------------+----------------------------------------------------------+ + | :attr:`approximate_presence_count` | :meth:`Client.fetch_invite` | + +------------------------------------+----------------------------------------------------------+ + + If it's not in the table above then it is available by all methods. + + Attributes + ----------- + max_age: :class:`int` + How long the before the invite expires in seconds. A value of 0 indicates that it doesn't expire. + code: :class:`str` + The URL fragment used for the invite. + guild: Union[:class:`Guild`, :class:`PartialInviteGuild`] + The guild the invite is for. + revoked: :class:`bool` + Indicates if the invite has been revoked. + created_at: :class:`datetime.datetime` + A datetime object denoting the time the invite was created. + temporary: :class:`bool` + Indicates that the invite grants temporary membership. + If ``True``, members who joined via this invite will be kicked upon disconnect. + uses: :class:`int` + How many times the invite has been used. + max_uses: :class:`int` + How many times the invite can be used. + inviter: :class:`User` + The user who created the invite. + approximate_member_count: Optional[:class:`int`] + The approximate number of members in the guild. + approximate_presence_count: Optional[:class:`int`] + The approximate number of members currently active in the guild. + This includes idle, dnd, online, and invisible members. Offline members are excluded. + channel: Union[:class:`abc.GuildChannel`, :class:`PartialInviteChannel`] + The channel the invite is for. + """ + + __slots__ = ('max_age', 'code', 'guild', 'revoked', 'created_at', 'uses', + 'temporary', 'max_uses', 'inviter', 'channel', '_state', + 'approximate_member_count', 'approximate_presence_count' ) + + def __init__(self, *, state, data): + self._state = state + self.max_age = data.get('max_age') + self.code = data.get('code') + self.guild = data.get('guild') + self.revoked = data.get('revoked') + self.created_at = parse_time(data.get('created_at')) + self.temporary = data.get('temporary') + self.uses = data.get('uses') + self.max_uses = data.get('max_uses') + self.approximate_presence_count = data.get('approximate_presence_count') + self.approximate_member_count = data.get('approximate_member_count') + + inviter_data = data.get('inviter') + self.inviter = None if inviter_data is None else self._state.store_user(inviter_data) + self.channel = data.get('channel') + + @classmethod + def from_incomplete(cls, *, state, data): + guild_id = int(data['guild']['id']) + channel_id = int(data['channel']['id']) + guild = state._get_guild(guild_id) + if guild is not None: + channel = guild.get_channel(channel_id) + else: + channel_data = data['channel'] + guild_data = data['guild'] + channel_type = try_enum(ChannelType, channel_data['type']) + channel = PartialInviteChannel(id=channel_id, name=channel_data['name'], type=channel_type) + guild = PartialInviteGuild(state, guild_data, guild_id) + data['guild'] = guild + data['channel'] = channel + return cls(state=state, data=data) + + def __str__(self): + return self.url + + def __repr__(self): + return ''.format(self) + + def __hash__(self): + return hash(self.code) + + @property + def id(self): + """:class:`str`: Returns the proper code portion of the invite.""" + return self.code + + @property + def url(self): + """:class:`str`: A property that retrieves the invite URL.""" + return 'http://discord.gg/' + self.code + + async def delete(self, *, reason=None): + """|coro| + + Revokes the instant invite. + + You must have the :attr:`~Permissions.manage_channels` permission to do this. + + Parameters + ----------- + reason: Optional[:class:`str`] + The reason for deleting this invite. Shows up on the audit log. + + Raises + ------- + Forbidden + You do not have permissions to revoke invites. + NotFound + The invite is invalid or expired. + HTTPException + Revoking the invite failed. + """ + + await self._state.http.delete_invite(self.code, reason=reason) diff --git a/venv/lib/python3.7/site-packages/discord/iterators.py b/venv/lib/python3.7/site-packages/discord/iterators.py new file mode 100644 index 0000000..17c11fb --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/iterators.py @@ -0,0 +1,589 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import asyncio +import datetime + +from .errors import NoMoreItems +from .utils import DISCORD_EPOCH, time_snowflake, maybe_coroutine +from .object import Object +from .audit_logs import AuditLogEntry + +OLDEST_OBJECT = Object(id=0) + +class _AsyncIterator: + __slots__ = () + + def get(self, **attrs): + def predicate(elem): + for attr, val in attrs.items(): + nested = attr.split('__') + obj = elem + for attribute in nested: + obj = getattr(obj, attribute) + + if obj != val: + return False + return True + + return self.find(predicate) + + async def find(self, predicate): + while True: + try: + elem = await self.next() + except NoMoreItems: + return None + + ret = await maybe_coroutine(predicate, elem) + if ret: + return elem + + def map(self, func): + return _MappedAsyncIterator(self, func) + + def filter(self, predicate): + return _FilteredAsyncIterator(self, predicate) + + async def flatten(self): + ret = [] + while True: + try: + item = await self.next() + except NoMoreItems: + return ret + else: + ret.append(item) + + def __aiter__(self): + return self + + async def __anext__(self): + try: + msg = await self.next() + except NoMoreItems: + raise StopAsyncIteration() + else: + return msg + +def _identity(x): + return x + +class _MappedAsyncIterator(_AsyncIterator): + def __init__(self, iterator, func): + self.iterator = iterator + self.func = func + + async def next(self): + # this raises NoMoreItems and will propagate appropriately + item = await self.iterator.next() + return await maybe_coroutine(self.func, item) + +class _FilteredAsyncIterator(_AsyncIterator): + def __init__(self, iterator, predicate): + self.iterator = iterator + + if predicate is None: + predicate = _identity + + self.predicate = predicate + + async def next(self): + getter = self.iterator.next + pred = self.predicate + while True: + # propagate NoMoreItems similar to _MappedAsyncIterator + item = await getter() + ret = await maybe_coroutine(pred, item) + if ret: + return item + +class ReactionIterator(_AsyncIterator): + def __init__(self, message, emoji, limit=100, after=None): + self.message = message + self.limit = limit + self.after = after + state = message._state + self.getter = state.http.get_reaction_users + self.state = state + self.emoji = emoji + self.guild = message.guild + self.channel_id = message.channel.id + self.users = asyncio.Queue(loop=state.loop) + + async def next(self): + if self.users.empty(): + await self.fill_users() + + try: + return self.users.get_nowait() + except asyncio.QueueEmpty: + raise NoMoreItems() + + async def fill_users(self): + # this is a hack because >circular imports< + from .user import User + + if self.limit > 0: + retrieve = self.limit if self.limit <= 100 else 100 + + after = self.after.id if self.after else None + data = await self.getter(self.channel_id, self.message.id, self.emoji, retrieve, after=after) + + if data: + self.limit -= retrieve + self.after = Object(id=int(data[-1]['id'])) + + if self.guild is None: + for element in reversed(data): + await self.users.put(User(state=self.state, data=element)) + else: + for element in reversed(data): + member_id = int(element['id']) + member = self.guild.get_member(member_id) + if member is not None: + await self.users.put(member) + else: + await self.users.put(User(state=self.state, data=element)) + +class HistoryIterator(_AsyncIterator): + """Iterator for receiving a channel's message history. + + The messages endpoint has two behaviours we care about here: + If `before` is specified, the messages endpoint returns the `limit` + newest messages before `before`, sorted with newest first. For filling over + 100 messages, update the `before` parameter to the oldest message received. + Messages will be returned in order by time. + If `after` is specified, it returns the `limit` oldest messages after + `after`, sorted with newest first. For filling over 100 messages, update the + `after` parameter to the newest message received. If messages are not + reversed, they will be out of order (99-0, 199-100, so on) + + A note that if both before and after are specified, before is ignored by the + messages endpoint. + + Parameters + ----------- + messageable: :class:`abc.Messageable` + Messageable class to retrieve message history from. + limit: :class:`int` + Maximum number of messages to retrieve + before: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] + Message before which all messages must be. + after: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] + Message after which all messages must be. + around: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] + Message around which all messages must be. Limit max 101. Note that if + limit is an even number, this will return at most limit+1 messages. + oldest_first: Optional[:class:`bool`] + If set to ``True``, return messages in oldest->newest order. Defaults to + True if ``after`` is specified, otherwise ``False``. + """ + + def __init__(self, messageable, limit, + before=None, after=None, around=None, oldest_first=None): + + if isinstance(before, datetime.datetime): + before = Object(id=time_snowflake(before, high=False)) + if isinstance(after, datetime.datetime): + after = Object(id=time_snowflake(after, high=True)) + if isinstance(around, datetime.datetime): + around = Object(id=time_snowflake(around)) + + if oldest_first is None: + self.reverse = after is not None + else: + self.reverse = oldest_first + + self.messageable = messageable + self.limit = limit + self.before = before + self.after = after or OLDEST_OBJECT + self.around = around + + self._filter = None # message dict -> bool + + self.state = self.messageable._state + self.logs_from = self.state.http.logs_from + self.messages = asyncio.Queue(loop=self.state.loop) + + if self.around: + if self.limit is None: + raise ValueError('history does not support around with limit=None') + if self.limit > 101: + raise ValueError("history max limit 101 when specifying around parameter") + elif self.limit == 101: + self.limit = 100 # Thanks discord + elif self.limit == 1: + raise ValueError("Use fetch_message.") + + self._retrieve_messages = self._retrieve_messages_around_strategy + if self.before and self.after: + self._filter = lambda m: self.after.id < int(m['id']) < self.before.id + elif self.before: + self._filter = lambda m: int(m['id']) < self.before.id + elif self.after: + self._filter = lambda m: self.after.id < int(m['id']) + else: + if self.reverse: + self._retrieve_messages = self._retrieve_messages_after_strategy + if (self.before): + self._filter = lambda m: int(m['id']) < self.before.id + else: + self._retrieve_messages = self._retrieve_messages_before_strategy + if (self.after and self.after != OLDEST_OBJECT): + self._filter = lambda m: int(m['id']) > self.after.id + + async def next(self): + if self.messages.empty(): + await self.fill_messages() + + try: + return self.messages.get_nowait() + except asyncio.QueueEmpty: + raise NoMoreItems() + + def _get_retrieve(self): + l = self.limit + if l is None: + r = 100 + elif l <= 100: + r = l + else: + r = 100 + + self.retrieve = r + return r > 0 + + async def flatten(self): + # this is similar to fill_messages except it uses a list instead + # of a queue to place the messages in. + result = [] + channel = await self.messageable._get_channel() + self.channel = channel + while self._get_retrieve(): + data = await self._retrieve_messages(self.retrieve) + if len(data) < 100: + self.limit = 0 # terminate the infinite loop + + if self.reverse: + data = reversed(data) + if self._filter: + data = filter(self._filter, data) + + for element in data: + result.append(self.state.create_message(channel=channel, data=element)) + return result + + async def fill_messages(self): + if not hasattr(self, 'channel'): + # do the required set up + channel = await self.messageable._get_channel() + self.channel = channel + + if self._get_retrieve(): + data = await self._retrieve_messages(self.retrieve) + if len(data) < 100: + self.limit = 0 # terminate the infinite loop + + if self.reverse: + data = reversed(data) + if self._filter: + data = filter(self._filter, data) + + channel = self.channel + for element in data: + await self.messages.put(self.state.create_message(channel=channel, data=element)) + + async def _retrieve_messages(self, retrieve): + """Retrieve messages and update next parameters.""" + pass + + async def _retrieve_messages_before_strategy(self, retrieve): + """Retrieve messages using before parameter.""" + before = self.before.id if self.before else None + data = await self.logs_from(self.channel.id, retrieve, before=before) + if len(data): + if self.limit is not None: + self.limit -= retrieve + self.before = Object(id=int(data[-1]['id'])) + return data + + async def _retrieve_messages_after_strategy(self, retrieve): + """Retrieve messages using after parameter.""" + after = self.after.id if self.after else None + data = await self.logs_from(self.channel.id, retrieve, after=after) + if len(data): + if self.limit is not None: + self.limit -= retrieve + self.after = Object(id=int(data[0]['id'])) + return data + + async def _retrieve_messages_around_strategy(self, retrieve): + """Retrieve messages using around parameter.""" + if self.around: + around = self.around.id if self.around else None + data = await self.logs_from(self.channel.id, retrieve, around=around) + self.around = None + return data + return [] + +class AuditLogIterator(_AsyncIterator): + def __init__(self, guild, limit=None, before=None, after=None, oldest_first=None, user_id=None, action_type=None): + if isinstance(before, datetime.datetime): + before = Object(id=time_snowflake(before, high=False)) + if isinstance(after, datetime.datetime): + after = Object(id=time_snowflake(after, high=True)) + + + if oldest_first is None: + self.reverse = after is not None + else: + self.reverse = oldest_first + + self.guild = guild + self.loop = guild._state.loop + self.request = guild._state.http.get_audit_logs + self.limit = limit + self.before = before + self.user_id = user_id + self.action_type = action_type + self.after = OLDEST_OBJECT + self._users = {} + self._state = guild._state + + + self._filter = None # entry dict -> bool + + self.entries = asyncio.Queue(loop=self.loop) + + + if self.reverse: + self._strategy = self._after_strategy + if self.before: + self._filter = lambda m: int(m['id']) < self.before.id + else: + self._strategy = self._before_strategy + if self.after and self.after != OLDEST_OBJECT: + self._filter = lambda m: int(m['id']) > self.after.id + + async def _before_strategy(self, retrieve): + before = self.before.id if self.before else None + data = await self.request(self.guild.id, limit=retrieve, user_id=self.user_id, + action_type=self.action_type, before=before) + + entries = data.get('audit_log_entries', []) + if len(data) and entries: + if self.limit is not None: + self.limit -= retrieve + self.before = Object(id=int(entries[-1]['id'])) + return data.get('users', []), entries + + async def _after_strategy(self, retrieve): + after = self.after.id if self.after else None + data = await self.request(self.guild.id, limit=retrieve, user_id=self.user_id, + action_type=self.action_type, after=after) + entries = data.get('audit_log_entries', []) + if len(data) and entries: + if self.limit is not None: + self.limit -= retrieve + self.after = Object(id=int(entries[0]['id'])) + return data.get('users', []), entries + + async def next(self): + if self.entries.empty(): + await self._fill() + + try: + return self.entries.get_nowait() + except asyncio.QueueEmpty: + raise NoMoreItems() + + def _get_retrieve(self): + l = self.limit + if l is None: + r = 100 + elif l <= 100: + r = l + else: + r = 100 + + self.retrieve = r + return r > 0 + + async def _fill(self): + from .user import User + + if self._get_retrieve(): + users, data = await self._strategy(self.retrieve) + if len(data) < 100: + self.limit = 0 # terminate the infinite loop + + if self.reverse: + data = reversed(data) + if self._filter: + data = filter(self._filter, data) + + for user in users: + u = User(data=user, state=self._state) + self._users[u.id] = u + + for element in data: + # TODO: remove this if statement later + if element['action_type'] is None: + continue + + await self.entries.put(AuditLogEntry(data=element, users=self._users, guild=self.guild)) + + +class GuildIterator(_AsyncIterator): + """Iterator for receiving the client's guilds. + + The guilds endpoint has the same two behaviours as described + in :class:`HistoryIterator`: + If `before` is specified, the guilds endpoint returns the `limit` + newest guilds before `before`, sorted with newest first. For filling over + 100 guilds, update the `before` parameter to the oldest guild received. + Guilds will be returned in order by time. + If `after` is specified, it returns the `limit` oldest guilds after `after`, + sorted with newest first. For filling over 100 guilds, update the `after` + parameter to the newest guild received, If guilds are not reversed, they + will be out of order (99-0, 199-100, so on) + + Not that if both before and after are specified, before is ignored by the + guilds endpoint. + + Parameters + ----------- + bot: :class:`discord.Client` + The client to retrieve the guilds from. + limit: :class:`int` + Maximum number of guilds to retrieve. + before: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] + Object before which all guilds must be. + after: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] + Object after which all guilds must be. + """ + def __init__(self, bot, limit, before=None, after=None): + + if isinstance(before, datetime.datetime): + before = Object(id=time_snowflake(before, high=False)) + if isinstance(after, datetime.datetime): + after = Object(id=time_snowflake(after, high=True)) + + self.bot = bot + self.limit = limit + self.before = before + self.after = after + + self._filter = None + + self.state = self.bot._connection + self.get_guilds = self.bot.http.get_guilds + self.guilds = asyncio.Queue(loop=self.state.loop) + + if self.before and self.after: + self._retrieve_guilds = self._retrieve_guilds_before_strategy + self._filter = lambda m: int(m['id']) > self.after.id + elif self.after: + self._retrieve_guilds = self._retrieve_guilds_after_strategy + else: + self._retrieve_guilds = self._retrieve_guilds_before_strategy + + async def next(self): + if self.guilds.empty(): + await self.fill_guilds() + + try: + return self.guilds.get_nowait() + except asyncio.QueueEmpty: + raise NoMoreItems() + + def _get_retrieve(self): + l = self.limit + if l is None: + r = 100 + elif l <= 100: + r = l + else: + r = 100 + + self.retrieve = r + return r > 0 + + def create_guild(self, data): + from .guild import Guild + return Guild(state=self.state, data=data) + + async def flatten(self): + result = [] + while self._get_retrieve(): + data = await self._retrieve_guilds(self.retrieve) + if len(data) < 100: + self.limit = 0 + + if self._filter: + data = filter(self._filter, data) + + for element in data: + result.append(self.create_guild(element)) + return result + + async def fill_guilds(self): + if self._get_retrieve(): + data = await self._retrieve_guilds(self.retrieve) + if self.limit is None or len(data) < 100: + self.limit = 0 + + if self._filter: + data = filter(self._filter, data) + + for element in data: + await self.guilds.put(self.create_guild(element)) + + async def _retrieve_guilds(self, retrieve): + """Retrieve guilds and update next parameters.""" + pass + + async def _retrieve_guilds_before_strategy(self, retrieve): + """Retrieve guilds using before parameter.""" + before = self.before.id if self.before else None + data = await self.get_guilds(retrieve, before=before) + if len(data): + if self.limit is not None: + self.limit -= retrieve + self.before = Object(id=int(data[-1]['id'])) + return data + + async def _retrieve_guilds_after_strategy(self, retrieve): + """Retrieve guilds using after parameter.""" + after = self.after.id if self.after else None + data = await self.get_guilds(retrieve, after=after) + if len(data): + if self.limit is not None: + self.limit -= retrieve + self.after = Object(id=int(data[0]['id'])) + return data diff --git a/venv/lib/python3.7/site-packages/discord/member.py b/venv/lib/python3.7/site-packages/discord/member.py new file mode 100644 index 0000000..266e15d --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/member.py @@ -0,0 +1,660 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import itertools +from operator import attrgetter + +import discord.abc + +from . import utils +from .user import BaseUser, User +from .activity import create_activity +from .permissions import Permissions +from .enums import Status, try_enum +from .colour import Colour +from .object import Object + +class VoiceState: + """Represents a Discord user's voice state. + + Attributes + ------------ + deaf: :class:`bool` + Indicates if the user is currently deafened by the guild. + mute: :class:`bool` + Indicates if the user is currently muted by the guild. + self_mute: :class:`bool` + Indicates if the user is currently muted by their own accord. + self_deaf: :class:`bool` + Indicates if the user is currently deafened by their own accord. + self_video: :class:`bool` + Indicates if the user is currently broadcasting video. + afk: :class:`bool` + Indicates if the user is currently in the AFK channel in the guild. + channel: :class:`VoiceChannel` + The voice channel that the user is currently connected to. None if the user + is not currently in a voice channel. + """ + + __slots__ = ('session_id', 'deaf', 'mute', 'self_mute', + 'self_video', 'self_deaf', 'afk', 'channel') + + def __init__(self, *, data, channel=None): + self.session_id = data.get('session_id') + self._update(data, channel) + + def _update(self, data, channel): + self.self_mute = data.get('self_mute', False) + self.self_deaf = data.get('self_deaf', False) + self.self_video = data.get('self_video', False) + self.afk = data.get('suppress', False) + self.mute = data.get('mute', False) + self.deaf = data.get('deaf', False) + self.channel = channel + + def __repr__(self): + return ''.format(self) + +def flatten_user(cls): + for attr, value in itertools.chain(BaseUser.__dict__.items(), User.__dict__.items()): + # ignore private/special methods + if attr.startswith('_'): + continue + + # don't override what we already have + if attr in cls.__dict__: + continue + + # if it's a slotted attribute or a property, redirect it + # slotted members are implemented as member_descriptors in Type.__dict__ + if not hasattr(value, '__annotations__'): + getter = attrgetter('_user.' + attr) + setattr(cls, attr, property(getter, doc='Equivalent to :attr:`User.%s`' % attr)) + else: + # Technically, this can also use attrgetter + # However I'm not sure how I feel about "functions" returning properties + # It probably breaks something in Sphinx. + # probably a member function by now + def generate_function(x): + def general(self, *args, **kwargs): + return getattr(self._user, x)(*args, **kwargs) + + general.__name__ = x + return general + + func = generate_function(attr) + func.__doc__ = value.__doc__ + setattr(cls, attr, func) + + return cls + +_BaseUser = discord.abc.User + +@flatten_user +class Member(discord.abc.Messageable, _BaseUser): + """Represents a Discord member to a :class:`Guild`. + + This implements a lot of the functionality of :class:`User`. + + .. container:: operations + + .. describe:: x == y + + Checks if two members are equal. + Note that this works with :class:`User` instances too. + + .. describe:: x != y + + Checks if two members are not equal. + Note that this works with :class:`User` instances too. + + .. describe:: hash(x) + + Returns the member's hash. + + .. describe:: str(x) + + Returns the member's name with the discriminator. + + Attributes + ---------- + joined_at: Optional[:class:`datetime.datetime`] + A datetime object that specifies the date and time in UTC that the member joined the guild for + the first time. In certain cases, this can be ``None``. + activities: Tuple[Union[:class:`Game`, :class:`Streaming`, :class:`Spotify`, :class:`Activity`]] + The activities that the user is currently doing. + guild: :class:`Guild` + The guild that the member belongs to. + nick: Optional[:class:`str`] + The guild specific nickname of the user. + premium_since: Optional[:class:`datetime.datetime`] + A datetime object that specifies the date and time in UTC when the member used their + Nitro boost on the guild, if available. This could be ``None``. + """ + + __slots__ = ('_roles', 'joined_at', 'premium_since', '_client_status', 'activities', 'guild', 'nick', '_user', '_state') + + def __init__(self, *, data, guild, state): + self._state = state + self._user = state.store_user(data['user']) + self.guild = guild + self.joined_at = utils.parse_time(data.get('joined_at')) + self.premium_since = utils.parse_time(data.get('premium_since')) + self._update_roles(data) + self._client_status = { + None: 'offline' + } + self.activities = tuple(map(create_activity, data.get('activities', []))) + self.nick = data.get('nick', None) + + def __str__(self): + return str(self._user) + + def __repr__(self): + return ''.format(self, self._user) + + def __eq__(self, other): + return isinstance(other, _BaseUser) and other.id == self.id + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash(self._user) + + @classmethod + def _from_message(cls, *, message, data): + author = message.author + data['user'] = { + attr: getattr(author, attr) + for attr in author.__slots__ + if attr[0] != '_' + } + return cls(data=data, guild=message.guild, state=message._state) + + @classmethod + def _from_presence_update(cls, *, data, guild, state): + clone = cls(data=data, guild=guild, state=state) + to_return = cls(data=data, guild=guild, state=state) + to_return._client_status = { + key: value + for key, value in data.get('client_status', {}).items() + } + to_return._client_status[None] = data['status'] + return to_return, clone + + @classmethod + def _copy(cls, member): + self = cls.__new__(cls) # to bypass __init__ + + self._roles = utils.SnowflakeList(member._roles, is_sorted=True) + self.joined_at = member.joined_at + self.premium_since = member.premium_since + self._client_status = member._client_status.copy() + self.guild = member.guild + self.nick = member.nick + self.activities = member.activities + self._state = member._state + + # Reference will not be copied unless necessary by PRESENCE_UPDATE + # See below + self._user = member._user + return self + + async def _get_channel(self): + ch = await self.create_dm() + return ch + + def _update_roles(self, data): + self._roles = utils.SnowflakeList(map(int, data['roles'])) + + def _update(self, data): + # the nickname change is optional, + # if it isn't in the payload then it didn't change + try: + self.nick = data['nick'] + except KeyError: + pass + + self.premium_since = utils.parse_time(data.get('premium_since')) + self._update_roles(data) + + def _presence_update(self, data, user): + self.activities = tuple(map(create_activity, data.get('activities', []))) + self._client_status = { + key: value + for key, value in data.get('client_status', {}).items() + } + self._client_status[None] = data['status'] + + if len(user) > 1: + u = self._user + original = (u.name, u.avatar, u.discriminator) + # These keys seem to always be available + modified = (user['username'], user['avatar'], user['discriminator']) + if original != modified: + to_return = User._copy(self._user) + u.name, u.avatar, u.discriminator = modified + # Signal to dispatch on_user_update + return to_return, u + return False + + @property + def status(self): + """:class:`Status`: The member's overall status. If the value is unknown, then it will be a :class:`str` instead.""" + return try_enum(Status, self._client_status[None]) + + @status.setter + def status(self, value): + # internal use only + self._client_status[None] = str(value) + + @property + def mobile_status(self): + """:class:`Status`: The member's status on a mobile device, if applicable.""" + return try_enum(Status, self._client_status.get('mobile', 'offline')) + + @property + def desktop_status(self): + """:class:`Status`: The member's status on the desktop client, if applicable.""" + return try_enum(Status, self._client_status.get('desktop', 'offline')) + + @property + def web_status(self): + """:class:`Status`: The member's status on the web client, if applicable.""" + return try_enum(Status, self._client_status.get('web', 'offline')) + + def is_on_mobile(self): + """A helper function that determines if a member is active on a mobile device.""" + return 'mobile' in self._client_status + + @property + def colour(self): + """:class:`Colour`: A property that returns a colour denoting the rendered colour + for the member. If the default colour is the one rendered then an instance + of :meth:`Colour.default` is returned. + + There is an alias for this named :meth:`color`. + """ + + roles = self.roles[1:] # remove @everyone + + # highest order of the colour is the one that gets rendered. + # if the highest is the default colour then the next one with a colour + # is chosen instead + for role in reversed(roles): + if role.colour.value: + return role.colour + return Colour.default() + + @property + def color(self): + """:class:`Colour`: A property that returns a color denoting the rendered color for + the member. If the default color is the one rendered then an instance of :meth:`Colour.default` + is returned. + + There is an alias for this named :meth:`colour`. + """ + return self.colour + + @property + def roles(self): + """List[:class:`Role`]: A :class:`list` of :class:`Role` that the member belongs to. Note + that the first element of this list is always the default '@everyone' + role. + + These roles are sorted by their position in the role hierarchy. + """ + result = [] + g = self.guild + for role_id in self._roles: + role = g.get_role(role_id) + if role: + result.append(role) + result.append(g.default_role) + result.sort() + return result + + @property + def mention(self): + """:class:`str`: Returns a string that allows you to mention the member.""" + if self.nick: + return '<@!%s>' % self.id + return '<@%s>' % self.id + + @property + def display_name(self): + """:class:`str`: Returns the user's display name. + + For regular users this is just their username, but + if they have a guild specific nickname then that + is returned instead. + """ + return self.nick if self.nick is not None else self.name + + @property + def activity(self): + """Union[:class:`Game`, :class:`Streaming`, :class:`Spotify`, :class:`Activity`]: Returns the primary + activity the user is currently doing. Could be None if no activity is being done. + + .. note:: + + A user may have multiple activities, these can be accessed under :attr:`activities`. + """ + if self.activities: + return self.activities[0] + + def mentioned_in(self, message): + """Checks if the member is mentioned in the specified message. + + Parameters + ----------- + message: :class:`Message` + The message to check if you're mentioned in. + """ + if self._user.mentioned_in(message): + return True + + for role in message.role_mentions: + has_role = utils.get(self.roles, id=role.id) is not None + if has_role: + return True + + return False + + def permissions_in(self, channel): + """An alias for :meth:`abc.GuildChannel.permissions_for`. + + Basically equivalent to: + + .. code-block:: python3 + + channel.permissions_for(self) + + Parameters + ----------- + channel: :class:`Channel` + The channel to check your permissions for. + """ + return channel.permissions_for(self) + + @property + def top_role(self): + """:class:`Role`: Returns the member's highest role. + + This is useful for figuring where a member stands in the role + hierarchy chain. + """ + return self.roles[-1] + + @property + def guild_permissions(self): + """Returns the member's guild permissions. + + This only takes into consideration the guild permissions + and not most of the implied permissions or any of the + channel permission overwrites. For 100% accurate permission + calculation, please use either :meth:`permissions_in` or + :meth:`abc.GuildChannel.permissions_for`. + + This does take into consideration guild ownership and the + administrator implication. + """ + + if self.guild.owner == self: + return Permissions.all() + + base = Permissions.none() + for r in self.roles: + base.value |= r.permissions.value + + if base.administrator: + return Permissions.all() + + return base + + @property + def voice(self): + """Optional[:class:`VoiceState`]: Returns the member's current voice state.""" + return self.guild._voice_state_for(self._user.id) + + async def ban(self, **kwargs): + """|coro| + + Bans this member. Equivalent to :meth:`Guild.ban`. + """ + await self.guild.ban(self, **kwargs) + + async def unban(self, *, reason=None): + """|coro| + + Unbans this member. Equivalent to :meth:`Guild.unban`. + """ + await self.guild.unban(self, reason=reason) + + async def kick(self, *, reason=None): + """|coro| + + Kicks this member. Equivalent to :meth:`Guild.kick`. + """ + await self.guild.kick(self, reason=reason) + + async def edit(self, *, reason=None, **fields): + """|coro| + + Edits the member's data. + + Depending on the parameter passed, this requires different permissions listed below: + + +---------------+--------------------------------------+ + | Parameter | Permission | + +---------------+--------------------------------------+ + | nick | :attr:`Permissions.manage_nicknames` | + +---------------+--------------------------------------+ + | mute | :attr:`Permissions.mute_members` | + +---------------+--------------------------------------+ + | deafen | :attr:`Permissions.deafen_members` | + +---------------+--------------------------------------+ + | roles | :attr:`Permissions.manage_roles` | + +---------------+--------------------------------------+ + | voice_channel | :attr:`Permissions.move_members` | + +---------------+--------------------------------------+ + + All parameters are optional. + + .. versionchanged:: 1.1.0 + Can now pass ``None`` to ``voice_channel`` to kick a member from voice. + + Parameters + ----------- + nick: Optional[:class:`str`] + The member's new nickname. Use ``None`` to remove the nickname. + mute: :class:`bool` + Indicates if the member should be guild muted or un-muted. + deafen: :class:`bool` + Indicates if the member should be guild deafened or un-deafened. + roles: Optional[List[:class:`Role`]] + The member's new list of roles. This *replaces* the roles. + voice_channel: Optional[:class:`VoiceChannel`] + The voice channel to move the member to. + Pass ``None`` to kick them from voice. + reason: Optional[:class:`str`] + The reason for editing this member. Shows up on the audit log. + + Raises + ------- + Forbidden + You do not have the proper permissions to the action requested. + HTTPException + The operation failed. + """ + http = self._state.http + guild_id = self.guild.id + payload = {} + + try: + nick = fields['nick'] + except KeyError: + # nick not present so... + pass + else: + nick = nick if nick else '' + if self._state.self_id == self.id: + await http.change_my_nickname(guild_id, nick, reason=reason) + else: + payload['nick'] = nick + + deafen = fields.get('deafen') + if deafen is not None: + payload['deaf'] = deafen + + mute = fields.get('mute') + if mute is not None: + payload['mute'] = mute + + try: + vc = fields['voice_channel'] + except KeyError: + pass + else: + payload['channel_id'] = vc and vc.id + + try: + roles = fields['roles'] + except KeyError: + pass + else: + payload['roles'] = tuple(r.id for r in roles) + + await http.edit_member(guild_id, self.id, reason=reason, **payload) + + # TODO: wait for WS event for modify-in-place behaviour + + async def move_to(self, channel, *, reason=None): + """|coro| + + Moves a member to a new voice channel (they must be connected first). + + You must have the :attr:`~Permissions.move_members` permission to + use this. + + This raises the same exceptions as :meth:`edit`. + + .. versionchanged:: 1.1.0 + Can now pass ``None`` to kick a member from voice. + + Parameters + ----------- + channel: Optional[:class:`VoiceChannel`] + The new voice channel to move the member to. + Pass ``None`` to kick them from voice. + reason: Optional[:class:`str`] + The reason for doing this action. Shows up on the audit log. + """ + await self.edit(voice_channel=channel, reason=reason) + + async def add_roles(self, *roles, reason=None, atomic=True): + r"""|coro| + + Gives the member a number of :class:`Role`\s. + + You must have the :attr:`~Permissions.manage_roles` permission to + use this. + + Parameters + ----------- + \*roles: :class:`abc.Snowflake` + An argument list of :class:`abc.Snowflake` representing a :class:`Role` + to give to the member. + reason: Optional[:class:`str`] + The reason for adding these roles. Shows up on the audit log. + atomic: :class:`bool` + Whether to atomically add roles. This will ensure that multiple + operations will always be applied regardless of the current + state of the cache. + + Raises + ------- + Forbidden + You do not have permissions to add these roles. + HTTPException + Adding roles failed. + """ + + if not atomic: + new_roles = utils._unique(Object(id=r.id) for s in (self.roles[1:], roles) for r in s) + await self.edit(roles=new_roles, reason=reason) + else: + req = self._state.http.add_role + guild_id = self.guild.id + user_id = self.id + for role in roles: + await req(guild_id, user_id, role.id, reason=reason) + + async def remove_roles(self, *roles, reason=None, atomic=True): + r"""|coro| + + Removes :class:`Role`\s from this member. + + You must have the :attr:`~Permissions.manage_roles` permission to + use this. + + Parameters + ----------- + \*roles: :class:`abc.Snowflake` + An argument list of :class:`abc.Snowflake` representing a :class:`Role` + to remove from the member. + reason: Optional[:class:`str`] + The reason for removing these roles. Shows up on the audit log. + atomic: :class:`bool` + Whether to atomically remove roles. This will ensure that multiple + operations will always be applied regardless of the current + state of the cache. + + Raises + ------- + Forbidden + You do not have permissions to remove these roles. + HTTPException + Removing the roles failed. + """ + + if not atomic: + new_roles = [Object(id=r.id) for r in self.roles[1:]] # remove @everyone + for role in roles: + try: + new_roles.remove(Object(id=role.id)) + except ValueError: + pass + + await self.edit(roles=new_roles, reason=reason) + else: + req = self._state.http.remove_role + guild_id = self.guild.id + user_id = self.id + for role in roles: + await req(guild_id, user_id, role.id, reason=reason) diff --git a/venv/lib/python3.7/site-packages/discord/message.py b/venv/lib/python3.7/site-packages/discord/message.py new file mode 100644 index 0000000..fdf685e --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/message.py @@ -0,0 +1,918 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import asyncio +import datetime +import re +import io + +from . import utils +from .reaction import Reaction +from .emoji import Emoji, PartialEmoji +from .calls import CallMessage +from .enums import MessageType, try_enum +from .errors import InvalidArgument, ClientException, HTTPException +from .embeds import Embed +from .member import Member + +class Attachment: + """Represents an attachment from Discord. + + Attributes + ------------ + id: :class:`int` + The attachment ID. + size: :class:`int` + The attachment size in bytes. + height: Optional[:class:`int`] + The attachment's height, in pixels. Only applicable to images. + width: Optional[:class:`int`] + The attachment's width, in pixels. Only applicable to images. + filename: :class:`str` + The attachment's filename. + url: :class:`str` + The attachment URL. If the message this attachment was attached + to is deleted, then this will 404. + proxy_url: :class:`str` + The proxy URL. This is a cached version of the :attr:`~Attachment.url` in the + case of images. When the message is deleted, this URL might be valid for a few + minutes or not valid at all. + """ + + __slots__ = ('id', 'size', 'height', 'width', 'filename', 'url', 'proxy_url', '_http') + + def __init__(self, *, data, state): + self.id = int(data['id']) + self.size = data['size'] + self.height = data.get('height') + self.width = data.get('width') + self.filename = data['filename'] + self.url = data.get('url') + self.proxy_url = data.get('proxy_url') + self._http = state.http + + def is_spoiler(self): + """:class:`bool`: Whether this attachment contains a spoiler.""" + return self.filename.startswith('SPOILER_') + + def __repr__(self): + return ''.format(self) + + async def save(self, fp, *, seek_begin=True, use_cached=False): + """|coro| + + Saves this attachment into a file-like object. + + Parameters + ----------- + fp: Union[:class:`io.BufferedIOBase`, :class:`os.PathLike`] + The file-like object to save this attachment to or the filename + to use. If a filename is passed then a file is created with that + filename and used instead. + seek_begin: :class:`bool` + Whether to seek to the beginning of the file after saving is + successfully done. + use_cached: :class:`bool` + Whether to use :attr:`proxy_url` rather than :attr:`url` when downloading + the attachment. This will allow attachments to be saved after deletion + more often, compared to the regular URL which is generally deleted right + after the message is deleted. Note that this can still fail to download + deleted attachments if too much time has passed and it does not work + on some types of attachments. + + Raises + -------- + HTTPException + Saving the attachment failed. + NotFound + The attachment was deleted. + + Returns + -------- + :class:`int` + The number of bytes written. + """ + data = await self.read(use_cached=use_cached) + if isinstance(fp, io.IOBase) and fp.writable(): + written = fp.write(data) + if seek_begin: + fp.seek(0) + return written + else: + with open(fp, 'wb') as f: + return f.write(data) + + async def read(self, *, use_cached=False): + """|coro| + + Retrieves the content of this attachment as a :class:`bytes` object. + + .. versionadded:: 1.1.0 + + Parameters + ----------- + use_cached: :class:`bool` + Whether to use :attr:`proxy_url` rather than :attr:`url` when downloading + the attachment. This will allow attachments to be saved after deletion + more often, compared to the regular URL which is generally deleted right + after the message is deleted. Note that this can still fail to download + deleted attachments if too much time has passed and it does not work + on some types of attachments. + + Raises + ------ + HTTPException + Downloading the attachment failed. + Forbidden + You do not have permissions to access this attachment + NotFound + The attachment was deleted. + + Returns + ------- + :class:`bytes` + The contents of the attachment. + """ + url = self.proxy_url if use_cached else self.url + data = await self._http.get_from_cdn(url) + return data + +def flatten_handlers(cls): + prefix = len('_handle_') + cls._HANDLERS = { + key[prefix:]: value + for key, value in cls.__dict__.items() + if key.startswith('_handle_') + } + cls._CACHED_SLOTS = [ + attr for attr in cls.__slots__ if attr.startswith('_cs_') + ] + return cls + +@flatten_handlers +class Message: + r"""Represents a message from Discord. + + There should be no need to create one of these manually. + + Attributes + ----------- + tts: :class:`bool` + Specifies if the message was done with text-to-speech. + This can only be accurately received in :func:`on_message` due to + a discord limitation. + type: :class:`MessageType` + The type of message. In most cases this should not be checked, but it is helpful + in cases where it might be a system message for :attr:`system_content`. + author: :class:`abc.User` + A :class:`Member` that sent the message. If :attr:`channel` is a + private channel or the user has the left the guild, then it is a :class:`User` instead. + content: :class:`str` + The actual contents of the message. + nonce + The value used by the discord guild and the client to verify that the message is successfully sent. + This is typically non-important. + embeds: List[:class:`Embed`] + A list of embeds the message has. + channel: Union[:class:`abc.Messageable`] + The :class:`TextChannel` that the message was sent from. + Could be a :class:`DMChannel` or :class:`GroupChannel` if it's a private message. + call: Optional[:class:`CallMessage`] + The call that the message refers to. This is only applicable to messages of type + :attr:`MessageType.call`. + mention_everyone: :class:`bool` + Specifies if the message mentions everyone. + + .. note:: + + This does not check if the ``@everyone`` or the ``@here`` text is in the message itself. + Rather this boolean indicates if either the ``@everyone`` or the ``@here`` text is in the message + **and** it did end up mentioning. + mentions: List[:class:`abc.User`] + A list of :class:`Member` that were mentioned. If the message is in a private message + then the list will be of :class:`User` instead. For messages that are not of type + :attr:`MessageType.default`\, this array can be used to aid in system messages. + For more information, see :attr:`system_content`. + + .. warning:: + + The order of the mentions list is not in any particular order so you should + not rely on it. This is a discord limitation, not one with the library. + channel_mentions: List[:class:`abc.GuildChannel`] + A list of :class:`abc.GuildChannel` that were mentioned. If the message is in a private message + then the list is always empty. + role_mentions: List[:class:`Role`] + A list of :class:`Role` that were mentioned. If the message is in a private message + then the list is always empty. + id: :class:`int` + The message ID. + webhook_id: Optional[:class:`int`] + If this message was sent by a webhook, then this is the webhook ID's that sent this + message. + attachments: List[:class:`Attachment`] + A list of attachments given to a message. + pinned: :class:`bool` + Specifies if the message is currently pinned. + reactions : List[:class:`Reaction`] + Reactions to a message. Reactions can be either custom emoji or standard unicode emoji. + activity: Optional[:class:`dict`] + The activity associated with this message. Sent with Rich-Presence related messages that for + example, request joining, spectating, or listening to or with another member. + + It is a dictionary with the following optional keys: + + - ``type``: An integer denoting the type of message activity being requested. + - ``party_id``: The party ID associated with the party. + application: Optional[:class:`dict`] + The rich presence enabled application associated with this message. + + It is a dictionary with the following keys: + + - ``id``: A string representing the application's ID. + - ``name``: A string representing the application's name. + - ``description``: A string representing the application's description. + - ``icon``: A string representing the icon ID of the application. + - ``cover_image``: A string representing the embed's image asset ID. + """ + + __slots__ = ('_edited_timestamp', 'tts', 'content', 'channel', 'webhook_id', + 'mention_everyone', 'embeds', 'id', 'mentions', 'author', + '_cs_channel_mentions', '_cs_raw_mentions', 'attachments', + '_cs_clean_content', '_cs_raw_channel_mentions', 'nonce', 'pinned', + 'role_mentions', '_cs_raw_role_mentions', 'type', 'call', + '_cs_system_content', '_cs_guild', '_state', 'reactions', + 'application', 'activity') + + def __init__(self, *, state, channel, data): + self._state = state + self.id = int(data['id']) + self.webhook_id = utils._get_as_snowflake(data, 'webhook_id') + self.reactions = [Reaction(message=self, data=d) for d in data.get('reactions', [])] + self.attachments = [Attachment(data=a, state=self._state) for a in data['attachments']] + self.embeds = [Embed.from_dict(a) for a in data['embeds']] + self.application = data.get('application') + self.activity = data.get('activity') + self.channel = channel + self._edited_timestamp = utils.parse_time(data['edited_timestamp']) + self.type = try_enum(MessageType, data['type']) + self.pinned = data['pinned'] + self.mention_everyone = data['mention_everyone'] + self.tts = data['tts'] + self.content = data['content'] + self.nonce = data.get('nonce') + + for handler in ('author', 'member', 'mentions', 'mention_roles', 'call'): + try: + getattr(self, '_handle_%s' % handler)(data[handler]) + except KeyError: + continue + + def __repr__(self): + return ''.format(self) + + def _try_patch(self, data, key, transform=None): + try: + value = data[key] + except KeyError: + pass + else: + if transform is None: + setattr(self, key, value) + else: + setattr(self, key, transform(value)) + + def _add_reaction(self, data, emoji, user_id): + reaction = utils.find(lambda r: r.emoji == emoji, self.reactions) + is_me = data['me'] = user_id == self._state.self_id + + if reaction is None: + reaction = Reaction(message=self, data=data, emoji=emoji) + self.reactions.append(reaction) + else: + reaction.count += 1 + if is_me: + reaction.me = is_me + + return reaction + + def _remove_reaction(self, data, emoji, user_id): + reaction = utils.find(lambda r: r.emoji == emoji, self.reactions) + + if reaction is None: + # already removed? + raise ValueError('Emoji already removed?') + + # if reaction isn't in the list, we crash. This means discord + # sent bad data, or we stored improperly + reaction.count -= 1 + + if user_id == self._state.self_id: + reaction.me = False + if reaction.count == 0: + # this raises ValueError if something went wrong as well. + self.reactions.remove(reaction) + + return reaction + + def _update(self, data): + handlers = self._HANDLERS + for key, value in data.items(): + try: + handler = handlers[key] + except KeyError: + continue + else: + handler(self, value) + + # clear the cached properties + for attr in self._CACHED_SLOTS: + try: + delattr(self, attr) + except AttributeError: + pass + + def _handle_edited_timestamp(self, value): + self._edited_timestamp = utils.parse_time(value) + + def _handle_pinned(self, value): + self.pinned = value + + def _handle_application(self, value): + self.application = value + + def _handle_activity(self, value): + self.activity = value + + def _handle_mention_everyone(self, value): + self.mention_everyone = value + + def _handle_tts(self, value): + self.tts = value + + def _handle_type(self, value): + self.type = try_enum(MessageType, value) + + def _handle_content(self, value): + self.content = value + + def _handle_attachments(self, value): + self.attachments = [Attachment(data=a, state=self._state) for a in value] + + def _handle_embeds(self, value): + self.embeds = [Embed.from_dict(data) for data in value] + + def _handle_nonce(self, value): + self.nonce = value + + def _handle_author(self, author): + self.author = self._state.store_user(author) + if self.guild is not None: + found = self.guild.get_member(self.author.id) + if found is not None: + self.author = found + + def _handle_member(self, member): + # The gateway now gives us full Member objects sometimes with the following keys + # deaf, mute, joined_at, roles + # For the sake of performance I'm going to assume that the only + # field that needs *updating* would be the joined_at field. + # If there is no Member object (for some strange reason), then we can upgrade + # ourselves to a more "partial" member object. + author = self.author + try: + if author.joined_at is None: + author.joined_at = utils.parse_time(member.get('joined_at')) + except AttributeError: + self.author = Member._from_message(message=self, data=member) + + def _handle_mentions(self, mentions): + self.mentions = [] + if self.guild is None: + self.mentions = [self._state.store_user(m) for m in mentions] + return + + for mention in filter(None, mentions): + id_search = int(mention['id']) + member = self.guild.get_member(id_search) + if member is not None: + self.mentions.append(member) + + def _handle_mention_roles(self, role_mentions): + self.role_mentions = [] + if self.guild is not None: + for role_id in map(int, role_mentions): + role = self.guild.get_role(role_id) + if role is not None: + self.role_mentions.append(role) + + def _handle_call(self, call): + if call is None or self.type is not MessageType.call: + self.call = None + return + + # we get the participant source from the mentions array or + # the author + + participants = [] + for uid in map(int, call.get('participants', [])): + if uid == self.author.id: + participants.append(self.author) + else: + user = utils.find(lambda u: u.id == uid, self.mentions) + if user is not None: + participants.append(user) + + call['participants'] = participants + self.call = CallMessage(message=self, **call) + + @utils.cached_slot_property('_cs_guild') + def guild(self): + """Optional[:class:`Guild`]: The guild that the message belongs to, if applicable.""" + return getattr(self.channel, 'guild', None) + + @utils.cached_slot_property('_cs_raw_mentions') + def raw_mentions(self): + """List[:class:`int`]: A property that returns an array of user IDs matched with + the syntax of ``<@user_id>`` in the message content. + + This allows you to receive the user IDs of mentioned users + even in a private message context. + """ + return [int(x) for x in re.findall(r'<@!?([0-9]+)>', self.content)] + + @utils.cached_slot_property('_cs_raw_channel_mentions') + def raw_channel_mentions(self): + """List[:class:`int`]: A property that returns an array of channel IDs matched with + the syntax of ``<#channel_id>`` in the message content. + """ + return [int(x) for x in re.findall(r'<#([0-9]+)>', self.content)] + + @utils.cached_slot_property('_cs_raw_role_mentions') + def raw_role_mentions(self): + """List[:class:`int`]: A property that returns an array of role IDs matched with + the syntax of ``<@&role_id>`` in the message content. + """ + return [int(x) for x in re.findall(r'<@&([0-9]+)>', self.content)] + + @utils.cached_slot_property('_cs_channel_mentions') + def channel_mentions(self): + if self.guild is None: + return [] + it = filter(None, map(self.guild.get_channel, self.raw_channel_mentions)) + return utils._unique(it) + + @utils.cached_slot_property('_cs_clean_content') + def clean_content(self): + """A property that returns the content in a "cleaned up" + manner. This basically means that mentions are transformed + into the way the client shows it. e.g. ``<#id>`` will transform + into ``#name``. + + This will also transform @everyone and @here mentions into + non-mentions. + + .. note:: + + This *does not* escape markdown. If you want to escape + markdown then use :func:`utils.escape_markdown` along + with this function. + """ + + transformations = { + re.escape('<#%s>' % channel.id): '#' + channel.name + for channel in self.channel_mentions + } + + mention_transforms = { + re.escape('<@%s>' % member.id): '@' + member.display_name + for member in self.mentions + } + + # add the <@!user_id> cases as well.. + second_mention_transforms = { + re.escape('<@!%s>' % member.id): '@' + member.display_name + for member in self.mentions + } + + transformations.update(mention_transforms) + transformations.update(second_mention_transforms) + + if self.guild is not None: + role_transforms = { + re.escape('<@&%s>' % role.id): '@' + role.name + for role in self.role_mentions + } + transformations.update(role_transforms) + + def repl(obj): + return transformations.get(re.escape(obj.group(0)), '') + + pattern = re.compile('|'.join(transformations.keys())) + result = pattern.sub(repl, self.content) + + transformations = { + '@everyone': '@\u200beveryone', + '@here': '@\u200bhere' + } + + def repl2(obj): + return transformations.get(obj.group(0), '') + + pattern = re.compile('|'.join(transformations.keys())) + return pattern.sub(repl2, result) + + @property + def created_at(self): + """:class:`datetime.datetime`: The message's creation time in UTC.""" + return utils.snowflake_time(self.id) + + @property + def edited_at(self): + """Optional[:class:`datetime.datetime`]: A naive UTC datetime object containing the edited time of the message.""" + return self._edited_timestamp + + @property + def jump_url(self): + """:class:`str`: Returns a URL that allows the client to jump to this message.""" + guild_id = getattr(self.guild, 'id', '@me') + return 'https://discordapp.com/channels/{0}/{1.channel.id}/{1.id}'.format(guild_id, self) + + @utils.cached_slot_property('_cs_system_content') + def system_content(self): + r"""A property that returns the content that is rendered + regardless of the :attr:`Message.type`. + + In the case of :attr:`MessageType.default`\, this just returns the + regular :attr:`Message.content`. Otherwise this returns an English + message denoting the contents of the system message. + """ + + if self.type is MessageType.default: + return self.content + + if self.type is MessageType.pins_add: + return '{0.name} pinned a message to this channel.'.format(self.author) + + if self.type is MessageType.recipient_add: + return '{0.name} added {1.name} to the group.'.format(self.author, self.mentions[0]) + + if self.type is MessageType.recipient_remove: + return '{0.name} removed {1.name} from the group.'.format(self.author, self.mentions[0]) + + if self.type is MessageType.channel_name_change: + return '{0.author.name} changed the channel name: {0.content}'.format(self) + + if self.type is MessageType.channel_icon_change: + return '{0.author.name} changed the channel icon.'.format(self) + + if self.type is MessageType.new_member: + formats = [ + "{0} just joined the server - glhf!", + "{0} just joined. Everyone, look busy!", + "{0} just joined. Can I get a heal?", + "{0} joined your party.", + "{0} joined. You must construct additional pylons.", + "Ermagherd. {0} is here.", + "Welcome, {0}. Stay awhile and listen.", + "Welcome, {0}. We were expecting you ( ͡° ͜ʖ ͡°)", + "Welcome, {0}. We hope you brought pizza.", + "Welcome {0}. Leave your weapons by the door.", + "A wild {0} appeared.", + "Swoooosh. {0} just landed.", + "Brace yourselves. {0} just joined the server.", + "{0} just joined... or did they?", + "{0} just arrived. Seems OP - please nerf.", + "{0} just slid into the server.", + "A {0} has spawned in the server.", + "Big {0} showed up!", + "Where’s {0}? In the server!", + "{0} hopped into the server. Kangaroo!!", + "{0} just showed up. Hold my beer.", + "Challenger approaching - {0} has appeared!", + "It's a bird! It's a plane! Nevermind, it's just {0}.", + "It's {0}! Praise the sun! \\[T]/", + "Never gonna give {0} up. Never gonna let {0} down.", + "{0} has joined the battle bus.", + "Cheers, love! {0}'s here!", + "Hey! Listen! {0} has joined!", + "We've been expecting you {0}", + "It's dangerous to go alone, take {0}!", + "{0} has joined the server! It's super effective!", + "Cheers, love! {0} is here!", + "{0} is here, as the prophecy foretold.", + "{0} has arrived. Party's over.", + "Ready player {0}", + "{0} is here to kick butt and chew bubblegum. And {0} is all out of gum.", + "Hello. Is it {0} you're looking for?", + "{0} has joined. Stay a while and listen!", + "Roses are red, violets are blue, {0} joined this server with you", + ] + + # manually reconstruct the epoch with millisecond precision, because + # datetime.datetime.timestamp() doesn't return the exact posix + # timestamp with the precision that we need + created_at_ms = int((self.created_at - datetime.datetime(1970, 1, 1)).total_seconds() * 1000) + return formats[created_at_ms % len(formats)].format(self.author.name) + + if self.type is MessageType.call: + # we're at the call message type now, which is a bit more complicated. + # we can make the assumption that Message.channel is a PrivateChannel + # with the type ChannelType.group or ChannelType.private + call_ended = self.call.ended_timestamp is not None + + if self.channel.me in self.call.participants: + return '{0.author.name} started a call.'.format(self) + elif call_ended: + return 'You missed a call from {0.author.name}'.format(self) + else: + return '{0.author.name} started a call \N{EM DASH} Join the call.'.format(self) + + if self.type is MessageType.premium_guild_subscription: + return '{0.author.name} just boosted the server!'.format(self) + + if self.type is MessageType.premium_guild_tier_1: + return '{0.author.name} just boosted the server! {0.guild} has achieved **Level 1!**'.format(self) + + if self.type is MessageType.premium_guild_tier_2: + return '{0.author.name} just boosted the server! {0.guild} has achieved **Level 2!**'.format(self) + + if self.type is MessageType.premium_guild_tier_3: + return '{0.author.name} just boosted the server! {0.guild} has achieved **Level 3!**'.format(self) + + async def delete(self, *, delay=None): + """|coro| + + Deletes the message. + + Your own messages could be deleted without any proper permissions. However to + delete other people's messages, you need the :attr:`~Permissions.manage_messages` + permission. + + .. versionchanged:: 1.1.0 + Added the new ``delay`` keyword-only parameter. + + Parameters + ----------- + delay: Optional[:class:`float`] + If provided, the number of seconds to wait in the background + before deleting the message. + + Raises + ------ + Forbidden + You do not have proper permissions to delete the message. + HTTPException + Deleting the message failed. + """ + if delay is not None: + async def delete(): + await asyncio.sleep(delay, loop=self._state.loop) + try: + await self._state.http.delete_message(self.channel.id, self.id) + except HTTPException: + pass + + asyncio.ensure_future(delete(), loop=self._state.loop) + else: + await self._state.http.delete_message(self.channel.id, self.id) + + async def edit(self, **fields): + """|coro| + + Edits the message. + + The content must be able to be transformed into a string via ``str(content)``. + + Parameters + ----------- + content: Optional[:class:`str`] + The new content to replace the message with. + Could be ``None`` to remove the content. + embed: Optional[:class:`Embed`] + The new embed to replace the original with. + Could be ``None`` to remove the embed. + delete_after: Optional[:class:`float`] + If provided, the number of seconds to wait in the background + before deleting the message we just edited. If the deletion fails, + then it is silently ignored. + + Raises + ------- + HTTPException + Editing the message failed. + """ + + try: + content = fields['content'] + except KeyError: + pass + else: + if content is not None: + fields['content'] = str(content) + + try: + embed = fields['embed'] + except KeyError: + pass + else: + if embed is not None: + fields['embed'] = embed.to_dict() + + data = await self._state.http.edit_message(self.channel.id, self.id, **fields) + self._update(data) + + try: + delete_after = fields['delete_after'] + except KeyError: + pass + else: + if delete_after is not None: + await self.delete(delay=delete_after) + + async def pin(self): + """|coro| + + Pins the message. + + You must have the :attr:`~Permissions.manage_messages` permission to do + this in a non-private channel context. + + Raises + ------- + Forbidden + You do not have permissions to pin the message. + NotFound + The message or channel was not found or deleted. + HTTPException + Pinning the message failed, probably due to the channel + having more than 50 pinned messages. + """ + + await self._state.http.pin_message(self.channel.id, self.id) + self.pinned = True + + async def unpin(self): + """|coro| + + Unpins the message. + + You must have the :attr:`~Permissions.manage_messages` permission to do + this in a non-private channel context. + + Raises + ------- + Forbidden + You do not have permissions to unpin the message. + NotFound + The message or channel was not found or deleted. + HTTPException + Unpinning the message failed. + """ + + await self._state.http.unpin_message(self.channel.id, self.id) + self.pinned = False + + async def add_reaction(self, emoji): + """|coro| + + Add a reaction to the message. + + The emoji may be a unicode emoji or a custom guild :class:`Emoji`. + + You must have the :attr:`~Permissions.read_message_history` permission + to use this. If nobody else has reacted to the message using this + emoji, the :attr:`~Permissions.add_reactions` permission is required. + + Parameters + ------------ + emoji: Union[:class:`Emoji`, :class:`Reaction`, :class:`PartialEmoji`, :class:`str`] + The emoji to react with. + + Raises + -------- + HTTPException + Adding the reaction failed. + Forbidden + You do not have the proper permissions to react to the message. + NotFound + The emoji you specified was not found. + InvalidArgument + The emoji parameter is invalid. + """ + + emoji = self._emoji_reaction(emoji) + await self._state.http.add_reaction(self.channel.id, self.id, emoji) + + async def remove_reaction(self, emoji, member): + """|coro| + + Remove a reaction by the member from the message. + + The emoji may be a unicode emoji or a custom guild :class:`Emoji`. + + If the reaction is not your own (i.e. ``member`` parameter is not you) then + the :attr:`~Permissions.manage_messages` permission is needed. + + The ``member`` parameter must represent a member and meet + the :class:`abc.Snowflake` abc. + + Parameters + ------------ + emoji: Union[:class:`Emoji`, :class:`Reaction`, :class:`PartialEmoji`, :class:`str`] + The emoji to remove. + member: :class:`abc.Snowflake` + The member for which to remove the reaction. + + Raises + -------- + HTTPException + Removing the reaction failed. + Forbidden + You do not have the proper permissions to remove the reaction. + NotFound + The member or emoji you specified was not found. + InvalidArgument + The emoji parameter is invalid. + """ + + emoji = self._emoji_reaction(emoji) + + if member.id == self._state.self_id: + await self._state.http.remove_own_reaction(self.channel.id, self.id, emoji) + else: + await self._state.http.remove_reaction(self.channel.id, self.id, emoji, member.id) + + @staticmethod + def _emoji_reaction(emoji): + if isinstance(emoji, Reaction): + emoji = emoji.emoji + + if isinstance(emoji, Emoji): + return '%s:%s' % (emoji.name, emoji.id) + if isinstance(emoji, PartialEmoji): + return emoji._as_reaction() + if isinstance(emoji, str): + # Reactions can be in :name:id format, but not <:name:id>. + # No existing emojis have <> in them, so this should be okay. + return emoji.strip('<>') + + raise InvalidArgument('emoji argument must be str, Emoji, or Reaction not {.__class__.__name__}.'.format(emoji)) + + async def clear_reactions(self): + """|coro| + + Removes all the reactions from the message. + + You need the :attr:`~Permissions.manage_messages` permission to use this. + + Raises + -------- + HTTPException + Removing the reactions failed. + Forbidden + You do not have the proper permissions to remove all the reactions. + """ + await self._state.http.clear_reactions(self.channel.id, self.id) + + async def ack(self): + """|coro| + + Marks this message as read. + + The user must not be a bot user. + + Raises + ------- + HTTPException + Acking failed. + ClientException + You must not be a bot user. + """ + + state = self._state + if state.is_bot: + raise ClientException('Must not be a bot account to ack messages.') + return await state.http.ack_message(self.channel.id, self.id) diff --git a/venv/lib/python3.7/site-packages/discord/mixins.py b/venv/lib/python3.7/site-packages/discord/mixins.py new file mode 100644 index 0000000..3717cf4 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/mixins.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +class EqualityComparable: + __slots__ = () + + def __eq__(self, other): + return isinstance(other, self.__class__) and other.id == self.id + + def __ne__(self, other): + if isinstance(other, self.__class__): + return other.id != self.id + return True + +class Hashable(EqualityComparable): + __slots__ = () + + def __hash__(self): + return self.id >> 22 diff --git a/venv/lib/python3.7/site-packages/discord/object.py b/venv/lib/python3.7/site-packages/discord/object.py new file mode 100644 index 0000000..1660171 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/object.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from . import utils +from .mixins import Hashable + +class Object(Hashable): + """Represents a generic Discord object. + + The purpose of this class is to allow you to create 'miniature' + versions of data classes if you want to pass in just an ID. Most functions + that take in a specific data class with an ID can also take in this class + as a substitute instead. Note that even though this is the case, not all + objects (if any) actually inherit from this class. + + There are also some cases where some websocket events are received + in :issue:`strange order <21>` and when such events happened you would + receive this class rather than the actual data class. These cases are + extremely rare. + + .. container:: operations + + .. describe:: x == y + + Checks if two objects are equal. + + .. describe:: x != y + + Checks if two objects are not equal. + + .. describe:: hash(x) + + Returns the object's hash. + + Attributes + ----------- + id: :class:`str` + The ID of the object. + """ + + def __init__(self, id): + self.id = id + + def __repr__(self): + return '' % self.id + + @property + def created_at(self): + """:class:`datetime.datetime`: Returns the snowflake's creation time in UTC.""" + return utils.snowflake_time(self.id) diff --git a/venv/lib/python3.7/site-packages/discord/opus.py b/venv/lib/python3.7/site-packages/discord/opus.py new file mode 100644 index 0000000..9ad6b11 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/opus.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import array +import ctypes +import ctypes.util +import logging +import os.path +import sys + +from .errors import DiscordException + +log = logging.getLogger(__name__) +c_int_ptr = ctypes.POINTER(ctypes.c_int) +c_int16_ptr = ctypes.POINTER(ctypes.c_int16) +c_float_ptr = ctypes.POINTER(ctypes.c_float) + +class EncoderStruct(ctypes.Structure): + pass + +EncoderStructPtr = ctypes.POINTER(EncoderStruct) + +def _err_lt(result, func, args): + if result < 0: + log.info('error has happened in %s', func.__name__) + raise OpusError(result) + return result + +def _err_ne(result, func, args): + ret = args[-1]._obj + if ret.value != 0: + log.info('error has happened in %s', func.__name__) + raise OpusError(ret.value) + return result + +# A list of exported functions. +# The first argument is obviously the name. +# The second one are the types of arguments it takes. +# The third is the result type. +# The fourth is the error handler. +exported_functions = [ + ('opus_strerror', + [ctypes.c_int], ctypes.c_char_p, None), + ('opus_encoder_get_size', + [ctypes.c_int], ctypes.c_int, None), + ('opus_encoder_create', + [ctypes.c_int, ctypes.c_int, ctypes.c_int, c_int_ptr], EncoderStructPtr, _err_ne), + ('opus_encode', + [EncoderStructPtr, c_int16_ptr, ctypes.c_int, ctypes.c_char_p, ctypes.c_int32], ctypes.c_int32, _err_lt), + ('opus_encoder_ctl', + None, ctypes.c_int32, _err_lt), + ('opus_encoder_destroy', + [EncoderStructPtr], None, None), +] + +def libopus_loader(name): + # create the library... + lib = ctypes.cdll.LoadLibrary(name) + + # register the functions... + for item in exported_functions: + func = getattr(lib, item[0]) + + try: + if item[1]: + func.argtypes = item[1] + + func.restype = item[2] + except KeyError: + pass + + try: + if item[3]: + func.errcheck = item[3] + except KeyError: + log.exception("Error assigning check function to %s", func) + + return lib + +try: + if sys.platform == 'win32': + _basedir = os.path.dirname(os.path.abspath(__file__)) + _bitness = 'x64' if sys.maxsize > 2**32 else 'x86' + _filename = os.path.join(_basedir, 'bin', 'libopus-0.{}.dll'.format(_bitness)) + _lib = libopus_loader(_filename) + else: + _lib = libopus_loader(ctypes.util.find_library('opus')) +except Exception: + _lib = None + +def load_opus(name): + """Loads the libopus shared library for use with voice. + + If this function is not called then the library uses the function + :func:`ctypes.util.find_library` and then loads that one + if available. + + Not loading a library leads to voice not working. + + This function propagates the exceptions thrown. + + .. warning:: + + The bitness of the library must match the bitness of your python + interpreter. If the library is 64-bit then your python interpreter + must be 64-bit as well. Usually if there's a mismatch in bitness then + the load will throw an exception. + + .. note:: + + On Windows, this function should not need to be called as the binaries + are automatically loaded. + + .. note:: + + On Windows, the .dll extension is not necessary. However, on Linux + the full extension is required to load the library, e.g. ``libopus.so.1``. + On Linux however, :func:`ctypes.util.find_library` will usually find the library automatically + without you having to call this. + + Parameters + ---------- + name: :class:`str` + The filename of the shared library. + """ + global _lib + _lib = libopus_loader(name) + +def is_loaded(): + """Function to check if opus lib is successfully loaded either + via the :func:`ctypes.util.find_library` call of :func:`load_opus`. + + This must return ``True`` for voice to work. + + Returns + ------- + :class:`bool` + Indicates if the opus library has been loaded. + """ + global _lib + return _lib is not None + +class OpusError(DiscordException): + """An exception that is thrown for libopus related errors. + + Attributes + ---------- + code: :class:`int` + The error code returned. + """ + + def __init__(self, code): + self.code = code + msg = _lib.opus_strerror(self.code).decode('utf-8') + log.info('"%s" has happened', msg) + super().__init__(msg) + +class OpusNotLoaded(DiscordException): + """An exception that is thrown for when libopus is not loaded.""" + pass + + +# Some constants... +OK = 0 +APPLICATION_AUDIO = 2049 +APPLICATION_VOIP = 2048 +APPLICATION_LOWDELAY = 2051 +CTL_SET_BITRATE = 4002 +CTL_SET_BANDWIDTH = 4008 +CTL_SET_FEC = 4012 +CTL_SET_PLP = 4014 +CTL_SET_SIGNAL = 4024 + +band_ctl = { + 'narrow': 1101, + 'medium': 1102, + 'wide': 1103, + 'superwide': 1104, + 'full': 1105, +} + +signal_ctl = { + 'auto': -1000, + 'voice': 3001, + 'music': 3002, +} + +class Encoder: + SAMPLING_RATE = 48000 + CHANNELS = 2 + FRAME_LENGTH = 20 + SAMPLE_SIZE = 4 # (bit_rate / 8) * CHANNELS (bit_rate == 16) + SAMPLES_PER_FRAME = int(SAMPLING_RATE / 1000 * FRAME_LENGTH) + + FRAME_SIZE = SAMPLES_PER_FRAME * SAMPLE_SIZE + + def __init__(self, application=APPLICATION_AUDIO): + self.application = application + + if not is_loaded(): + raise OpusNotLoaded() + + self._state = self._create_state() + self.set_bitrate(128) + self.set_fec(True) + self.set_expected_packet_loss_percent(0.15) + self.set_bandwidth('full') + self.set_signal_type('auto') + + def __del__(self): + if hasattr(self, '_state'): + _lib.opus_encoder_destroy(self._state) + self._state = None + + def _create_state(self): + ret = ctypes.c_int() + return _lib.opus_encoder_create(self.SAMPLING_RATE, self.CHANNELS, self.application, ctypes.byref(ret)) + + def set_bitrate(self, kbps): + kbps = min(512, max(16, int(kbps))) + + _lib.opus_encoder_ctl(self._state, CTL_SET_BITRATE, kbps * 1024) + return kbps + + def set_bandwidth(self, req): + if req not in band_ctl: + raise KeyError('%r is not a valid bandwidth setting. Try one of: %s' % (req, ','.join(band_ctl))) + + k = band_ctl[req] + _lib.opus_encoder_ctl(self._state, CTL_SET_BANDWIDTH, k) + + def set_signal_type(self, req): + if req not in signal_ctl: + raise KeyError('%r is not a valid signal setting. Try one of: %s' % (req, ','.join(signal_ctl))) + + k = signal_ctl[req] + _lib.opus_encoder_ctl(self._state, CTL_SET_SIGNAL, k) + + def set_fec(self, enabled=True): + _lib.opus_encoder_ctl(self._state, CTL_SET_FEC, 1 if enabled else 0) + + def set_expected_packet_loss_percent(self, percentage): + _lib.opus_encoder_ctl(self._state, CTL_SET_PLP, min(100, max(0, int(percentage * 100)))) + + def encode(self, pcm, frame_size): + max_data_bytes = len(pcm) + pcm = ctypes.cast(pcm, c_int16_ptr) + data = (ctypes.c_char * max_data_bytes)() + + ret = _lib.opus_encode(self._state, pcm, frame_size, data, max_data_bytes) + + return array.array('b', data[:ret]).tobytes() diff --git a/venv/lib/python3.7/site-packages/discord/permissions.py b/venv/lib/python3.7/site-packages/discord/permissions.py new file mode 100644 index 0000000..45db34f --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/permissions.py @@ -0,0 +1,637 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +class Permissions: + """Wraps up the Discord permission value. + + The properties provided are two way. You can set and retrieve individual + bits using the properties as if they were regular bools. This allows + you to edit permissions. + + .. container:: operations + + .. describe:: x == y + + Checks if two permissions are equal. + .. describe:: x != y + + Checks if two permissions are not equal. + .. describe:: x <= y + + Checks if a permission is a subset of another permission. + .. describe:: x >= y + + Checks if a permission is a superset of another permission. + .. describe:: x < y + + Checks if a permission is a strict subset of another permission. + .. describe:: x > y + + Checks if a permission is a strict superset of another permission. + .. describe:: hash(x) + + Return the permission's hash. + .. describe:: iter(x) + + Returns an iterator of ``(perm, value)`` pairs. This allows it + to be, for example, constructed as a dict or a list of pairs. + + Attributes + ----------- + value + The raw value. This value is a bit array field of a 53-bit integer + representing the currently available permissions. You should query + permissions via the properties rather than using this raw value. + """ + + __slots__ = ('value',) + def __init__(self, permissions=0): + if not isinstance(permissions, int): + raise TypeError('Expected int parameter, received %s instead.' % permissions.__class__.__name__) + + self.value = permissions + + def __eq__(self, other): + return isinstance(other, Permissions) and self.value == other.value + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash(self.value) + + def __repr__(self): + return '' % self.value + + def _perm_iterator(self): + for attr in dir(self): + # check if it's a property, because if so it's a permission + is_property = isinstance(getattr(self.__class__, attr), property) + if is_property: + yield (attr, getattr(self, attr)) + + def __iter__(self): + return self._perm_iterator() + + def is_subset(self, other): + """Returns ``True`` if self has the same or fewer permissions as other.""" + if isinstance(other, Permissions): + return (self.value & other.value) == self.value + else: + raise TypeError("cannot compare {} with {}".format(self.__class__.__name__, other.__class__.__name__)) + + def is_superset(self, other): + """Returns ``True`` if self has the same or more permissions as other.""" + if isinstance(other, Permissions): + return (self.value | other.value) == self.value + else: + raise TypeError("cannot compare {} with {}".format(self.__class__.__name__, other.__class__.__name__)) + + def is_strict_subset(self, other): + """Returns ``True`` if the permissions on other are a strict subset of those on self.""" + return self.is_subset(other) and self != other + + def is_strict_superset(self, other): + """Returns ``True`` if the permissions on other are a strict superset of those on self.""" + return self.is_superset(other) and self != other + + __le__ = is_subset + __ge__ = is_superset + __lt__ = is_strict_subset + __gt__ = is_strict_superset + + @classmethod + def none(cls): + """A factory method that creates a :class:`Permissions` with all + permissions set to ``False``.""" + return cls(0) + + @classmethod + def all(cls): + """A factory method that creates a :class:`Permissions` with all + permissions set to True.""" + return cls(0b01111111111101111111111111111111) + + @classmethod + def all_channel(cls): + """A :class:`Permissions` with all channel-specific permissions set to + ``True`` and the guild-specific ones set to ``False``. The guild-specific + permissions are currently: + + - manage_guild + - kick_members + - ban_members + - administrator + - change_nickname + - manage_nicknames + """ + return cls(0b00110011111101111111110001010001) + + @classmethod + def general(cls): + """A factory method that creates a :class:`Permissions` with all + "General" permissions from the official Discord UI set to ``True``.""" + return cls(0b01111100000000000000000010111111) + + @classmethod + def text(cls): + """A factory method that creates a :class:`Permissions` with all + "Text" permissions from the official Discord UI set to ``True``.""" + return cls(0b00000000000001111111110001000000) + + @classmethod + def voice(cls): + """A factory method that creates a :class:`Permissions` with all + "Voice" permissions from the official Discord UI set to ``True``.""" + return cls(0b00000011111100000000001100000000) + + + def update(self, **kwargs): + r"""Bulk updates this permission object. + + Allows you to set multiple attributes by using keyword + arguments. The names must be equivalent to the properties + listed. Extraneous key/value pairs will be silently ignored. + + Parameters + ------------ + \*\*kwargs + A list of key/value pairs to bulk update permissions with. + """ + for key, value in kwargs.items(): + try: + is_property = isinstance(getattr(self.__class__, key), property) + except AttributeError: + continue + + if is_property: + setattr(self, key, value) + + def _bit(self, index): + return bool((self.value >> index) & 1) + + def _set(self, index, value): + if value is True: + self.value |= (1 << index) + elif value is False: + self.value &= ~(1 << index) + else: + raise TypeError('Value to set for Permissions must be a bool.') + + def handle_overwrite(self, allow, deny): + # Basically this is what's happening here. + # We have an original bit array, e.g. 1010 + # Then we have another bit array that is 'denied', e.g. 1111 + # And then we have the last one which is 'allowed', e.g. 0101 + # We want original OP denied to end up resulting in + # whatever is in denied to be set to 0. + # So 1010 OP 1111 -> 0000 + # Then we take this value and look at the allowed values. + # And whatever is allowed is set to 1. + # So 0000 OP2 0101 -> 0101 + # The OP is base & ~denied. + # The OP2 is base | allowed. + self.value = (self.value & ~deny) | allow + + @property + def create_instant_invite(self): + """:class:`bool`: Returns ``True`` if the user can create instant invites.""" + return self._bit(0) + + @create_instant_invite.setter + def create_instant_invite(self, value): + self._set(0, value) + + @property + def kick_members(self): + """:class:`bool`: Returns ``True`` if the user can kick users from the guild.""" + return self._bit(1) + + @kick_members.setter + def kick_members(self, value): + self._set(1, value) + + @property + def ban_members(self): + """:class:`bool`: Returns ``True`` if a user can ban users from the guild.""" + return self._bit(2) + + @ban_members.setter + def ban_members(self, value): + self._set(2, value) + + @property + def administrator(self): + """:class:`bool`: Returns ``True`` if a user is an administrator. This role overrides all other permissions. + + This also bypasses all channel-specific overrides. + """ + return self._bit(3) + + @administrator.setter + def administrator(self, value): + self._set(3, value) + + @property + def manage_channels(self): + """:class:`bool`: Returns ``True`` if a user can edit, delete, or create channels in the guild. + + This also corresponds to the "Manage Channel" channel-specific override.""" + return self._bit(4) + + @manage_channels.setter + def manage_channels(self, value): + self._set(4, value) + + @property + def manage_guild(self): + """:class:`bool`: Returns ``True`` if a user can edit guild properties.""" + return self._bit(5) + + @manage_guild.setter + def manage_guild(self, value): + self._set(5, value) + + @property + def add_reactions(self): + """:class:`bool`: Returns ``True`` if a user can add reactions to messages.""" + return self._bit(6) + + @add_reactions.setter + def add_reactions(self, value): + self._set(6, value) + + @property + def view_audit_log(self): + """:class:`bool`: Returns ``True`` if a user can view the guild's audit log.""" + return self._bit(7) + + @view_audit_log.setter + def view_audit_log(self, value): + self._set(7, value) + + @property + def priority_speaker(self): + """:class:`bool`: Returns ``True`` if a user can be more easily heard while talking.""" + return self._bit(8) + + @priority_speaker.setter + def priority_speaker(self, value): + self._set(8, value) + + @property + def stream(self): + """:class:`bool`: Returns ``True`` if a user can stream in a voice channel.""" + return self._bit(9) + + @stream.setter + def stream(self, value): + self._set(9, value) + + @property + def read_messages(self): + """:class:`bool`: Returns ``True`` if a user can read messages from all or specific text channels.""" + return self._bit(10) + + @read_messages.setter + def read_messages(self, value): + self._set(10, value) + + @property + def send_messages(self): + """:class:`bool`: Returns ``True`` if a user can send messages from all or specific text channels.""" + return self._bit(11) + + @send_messages.setter + def send_messages(self, value): + self._set(11, value) + + @property + def send_tts_messages(self): + """:class:`bool`: Returns ``True`` if a user can send TTS messages from all or specific text channels.""" + return self._bit(12) + + @send_tts_messages.setter + def send_tts_messages(self, value): + self._set(12, value) + + @property + def manage_messages(self): + """:class:`bool`: Returns ``True`` if a user can delete or pin messages in a text channel. + + .. note:: + + Note that there are currently no ways to edit other people's messages. + """ + return self._bit(13) + + @manage_messages.setter + def manage_messages(self, value): + self._set(13, value) + + @property + def embed_links(self): + """:class:`bool`: Returns ``True`` if a user's messages will automatically be embedded by Discord.""" + return self._bit(14) + + @embed_links.setter + def embed_links(self, value): + self._set(14, value) + + @property + def attach_files(self): + """:class:`bool`: Returns ``True`` if a user can send files in their messages.""" + return self._bit(15) + + @attach_files.setter + def attach_files(self, value): + self._set(15, value) + + @property + def read_message_history(self): + """:class:`bool`: Returns ``True`` if a user can read a text channel's previous messages.""" + return self._bit(16) + + @read_message_history.setter + def read_message_history(self, value): + self._set(16, value) + + @property + def mention_everyone(self): + """:class:`bool`: Returns ``True`` if a user's @everyone or @here will mention everyone in the text channel.""" + return self._bit(17) + + @mention_everyone.setter + def mention_everyone(self, value): + self._set(17, value) + + @property + def external_emojis(self): + """:class:`bool`: Returns ``True`` if a user can use emojis from other guilds.""" + return self._bit(18) + + @external_emojis.setter + def external_emojis(self, value): + self._set(18, value) + + # 1 unused + + @property + def connect(self): + """:class:`bool`: Returns ``True`` if a user can connect to a voice channel.""" + return self._bit(20) + + @connect.setter + def connect(self, value): + self._set(20, value) + + @property + def speak(self): + """:class:`bool`: Returns ``True`` if a user can speak in a voice channel.""" + return self._bit(21) + + @speak.setter + def speak(self, value): + self._set(21, value) + + @property + def mute_members(self): + """:class:`bool`: Returns ``True`` if a user can mute other users.""" + return self._bit(22) + + @mute_members.setter + def mute_members(self, value): + self._set(22, value) + + @property + def deafen_members(self): + """:class:`bool`: Returns ``True`` if a user can deafen other users.""" + return self._bit(23) + + @deafen_members.setter + def deafen_members(self, value): + self._set(23, value) + + @property + def move_members(self): + """:class:`bool`: Returns ``True`` if a user can move users between other voice channels.""" + return self._bit(24) + + @move_members.setter + def move_members(self, value): + self._set(24, value) + + @property + def use_voice_activation(self): + """:class:`bool`: Returns ``True`` if a user can use voice activation in voice channels.""" + return self._bit(25) + + @use_voice_activation.setter + def use_voice_activation(self, value): + self._set(25, value) + + @property + def change_nickname(self): + """:class:`bool`: Returns ``True`` if a user can change their nickname in the guild.""" + return self._bit(26) + + @change_nickname.setter + def change_nickname(self, value): + self._set(26, value) + + @property + def manage_nicknames(self): + """:class:`bool`: Returns ``True`` if a user can change other user's nickname in the guild.""" + return self._bit(27) + + @manage_nicknames.setter + def manage_nicknames(self, value): + self._set(27, value) + + @property + def manage_roles(self): + """:class:`bool`: Returns ``True`` if a user can create or edit roles less than their role's position. + + This also corresponds to the "Manage Permissions" channel-specific override. + """ + return self._bit(28) + + @manage_roles.setter + def manage_roles(self, value): + self._set(28, value) + + @property + def manage_webhooks(self): + """:class:`bool`: Returns ``True`` if a user can create, edit, or delete webhooks.""" + return self._bit(29) + + @manage_webhooks.setter + def manage_webhooks(self, value): + self._set(29, value) + + @property + def manage_emojis(self): + """:class:`bool`: Returns ``True`` if a user can create, edit, or delete emojis.""" + return self._bit(30) + + @manage_emojis.setter + def manage_emojis(self, value): + self._set(30, value) + + # 1 unused + + # after these 32 bits, there's 21 more unused ones technically + +def augment_from_permissions(cls): + cls.VALID_NAMES = {name for name in dir(Permissions) if isinstance(getattr(Permissions, name), property)} + + # make descriptors for all the valid names + for name in cls.VALID_NAMES: + # god bless Python + def getter(self, x=name): + return self._values.get(x) + def setter(self, value, x=name): + self._set(x, value) + + prop = property(getter, setter) + setattr(cls, name, prop) + + return cls + +@augment_from_permissions +class PermissionOverwrite: + r"""A type that is used to represent a channel specific permission. + + Unlike a regular :class:`Permissions`\, the default value of a + permission is equivalent to ``None`` and not ``False``. Setting + a value to ``False`` is **explicitly** denying that permission, + while setting a value to ``True`` is **explicitly** allowing + that permission. + + The values supported by this are the same as :class:`Permissions` + with the added possibility of it being set to ``None``. + + Supported operations: + + +-----------+------------------------------------------+ + | Operation | Description | + +===========+==========================================+ + | x == y | Checks if two overwrites are equal. | + +-----------+------------------------------------------+ + | x != y | Checks if two overwrites are not equal. | + +-----------+------------------------------------------+ + | iter(x) | Returns an iterator of (perm, value) | + | | pairs. This allows this class to be used | + | | as an iterable in e.g. set/list/dict | + | | constructions. | + +-----------+------------------------------------------+ + + Parameters + ----------- + \*\*kwargs + Set the value of permissions by their name. + """ + + __slots__ = ('_values',) + + def __init__(self, **kwargs): + self._values = {} + + for key, value in kwargs.items(): + if key not in self.VALID_NAMES: + raise ValueError('no permission called {0}.'.format(key)) + + setattr(self, key, value) + + def __eq__(self, other): + return self._values == other._values + + def _set(self, key, value): + if value not in (True, None, False): + raise TypeError('Expected bool or NoneType, received {0.__class__.__name__}'.format(value)) + + self._values[key] = value + + def pair(self): + """Returns the (allow, deny) pair from this overwrite. + + The value of these pairs is :class:`Permissions`. + """ + + allow = Permissions.none() + deny = Permissions.none() + + for key, value in self._values.items(): + if value is True: + setattr(allow, key, True) + elif value is False: + setattr(deny, key, True) + + return allow, deny + + @classmethod + def from_pair(cls, allow, deny): + """Creates an overwrite from an allow/deny pair of :class:`Permissions`.""" + ret = cls() + for key, value in allow: + if value is True: + setattr(ret, key, True) + + for key, value in deny: + if value is True: + setattr(ret, key, False) + + return ret + + def is_empty(self): + """Checks if the permission overwrite is currently empty. + + An empty permission overwrite is one that has no overwrites set + to ``True`` or ``False``. + """ + return all(x is None for x in self._values.values()) + + def update(self, **kwargs): + r"""Bulk updates this permission overwrite object. + + Allows you to set multiple attributes by using keyword + arguments. The names must be equivalent to the properties + listed. Extraneous key/value pairs will be silently ignored. + + Parameters + ------------ + \*\*kwargs + A list of key/value pairs to bulk update with. + """ + for key, value in kwargs.items(): + if key not in self.VALID_NAMES: + continue + + setattr(self, key, value) + + def __iter__(self): + for key in self.VALID_NAMES: + yield key, self._values.get(key) diff --git a/venv/lib/python3.7/site-packages/discord/player.py b/venv/lib/python3.7/site-packages/discord/player.py new file mode 100644 index 0000000..9d9311e --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/player.py @@ -0,0 +1,346 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import threading +import subprocess +import audioop +import asyncio +import logging +import shlex +import time + +from .errors import ClientException +from .opus import Encoder as OpusEncoder + +log = logging.getLogger(__name__) + +__all__ = ( + 'AudioSource', + 'PCMAudio', + 'FFmpegPCMAudio', + 'PCMVolumeTransformer', +) + +class AudioSource: + """Represents an audio stream. + + The audio stream can be Opus encoded or not, however if the audio stream + is not Opus encoded then the audio format must be 16-bit 48KHz stereo PCM. + + .. warning:: + + The audio source reads are done in a separate thread. + """ + + def read(self): + """Reads 20ms worth of audio. + + Subclasses must implement this. + + If the audio is complete, then returning an empty + :term:`py:bytes-like object` to signal this is the way to do so. + + If :meth:`is_opus` method returns ``True``, then it must return + 20ms worth of Opus encoded audio. Otherwise, it must be 20ms + worth of 16-bit 48KHz stereo PCM, which is about 3,840 bytes + per frame (20ms worth of audio). + + Returns + -------- + :class:`bytes` + A bytes like object that represents the PCM or Opus data. + """ + raise NotImplementedError + + def is_opus(self): + """Checks if the audio source is already encoded in Opus.""" + return False + + def cleanup(self): + """Called when clean-up is needed to be done. + + Useful for clearing buffer data or processes after + it is done playing audio. + """ + pass + + def __del__(self): + self.cleanup() + +class PCMAudio(AudioSource): + """Represents raw 16-bit 48KHz stereo PCM audio source. + + Attributes + ----------- + stream: :term:`py:file object` + A file-like object that reads byte data representing raw PCM. + """ + def __init__(self, stream): + self.stream = stream + + def read(self): + ret = self.stream.read(OpusEncoder.FRAME_SIZE) + if len(ret) != OpusEncoder.FRAME_SIZE: + return b'' + return ret + +class FFmpegPCMAudio(AudioSource): + """An audio source from FFmpeg (or AVConv). + + This launches a sub-process to a specific input file given. + + .. warning:: + + You must have the ffmpeg or avconv executable in your path environment + variable in order for this to work. + + Parameters + ------------ + source: Union[:class:`str`, :class:`io.BufferedIOBase`] + The input that ffmpeg will take and convert to PCM bytes. + If ``pipe`` is True then this is a file-like object that is + passed to the stdin of ffmpeg. + executable: :class:`str` + The executable name (and path) to use. Defaults to ``ffmpeg``. + pipe: :class:`bool` + If ``True``, denotes that ``source`` parameter will be passed + to the stdin of ffmpeg. Defaults to ``False``. + stderr: Optional[:term:`py:file object`] + A file-like object to pass to the Popen constructor. + Could also be an instance of ``subprocess.PIPE``. + options: Optional[:class:`str`] + Extra command line arguments to pass to ffmpeg after the ``-i`` flag. + before_options: Optional[:class:`str`] + Extra command line arguments to pass to ffmpeg before the ``-i`` flag. + + Raises + -------- + ClientException + The subprocess failed to be created. + """ + + def __init__(self, source, *, executable='ffmpeg', pipe=False, stderr=None, before_options=None, options=None): + stdin = None if not pipe else source + + args = [executable] + + if isinstance(before_options, str): + args.extend(shlex.split(before_options)) + + args.append('-i') + args.append('-' if pipe else source) + args.extend(('-f', 's16le', '-ar', '48000', '-ac', '2', '-loglevel', 'warning')) + + if isinstance(options, str): + args.extend(shlex.split(options)) + + args.append('pipe:1') + + self._process = None + try: + self._process = subprocess.Popen(args, stdin=stdin, stdout=subprocess.PIPE, stderr=stderr) + self._stdout = self._process.stdout + except FileNotFoundError: + raise ClientException(executable + ' was not found.') from None + except subprocess.SubprocessError as exc: + raise ClientException('Popen failed: {0.__class__.__name__}: {0}'.format(exc)) from exc + + def read(self): + ret = self._stdout.read(OpusEncoder.FRAME_SIZE) + if len(ret) != OpusEncoder.FRAME_SIZE: + return b'' + return ret + + def cleanup(self): + proc = self._process + if proc is None: + return + + log.info('Preparing to terminate ffmpeg process %s.', proc.pid) + proc.kill() + if proc.poll() is None: + log.info('ffmpeg process %s has not terminated. Waiting to terminate...', proc.pid) + proc.communicate() + log.info('ffmpeg process %s should have terminated with a return code of %s.', proc.pid, proc.returncode) + else: + log.info('ffmpeg process %s successfully terminated with return code of %s.', proc.pid, proc.returncode) + + self._process = None + +class PCMVolumeTransformer(AudioSource): + """Transforms a previous :class:`AudioSource` to have volume controls. + + This does not work on audio sources that have :meth:`AudioSource.is_opus` + set to ``True``. + + Parameters + ------------ + original: :class:`AudioSource` + The original AudioSource to transform. + volume: :class:`float` + The initial volume to set it to. + See :attr:`volume` for more info. + + Raises + ------- + TypeError + Not an audio source. + ClientException + The audio source is opus encoded. + """ + + def __init__(self, original, volume=1.0): + if not isinstance(original, AudioSource): + raise TypeError('expected AudioSource not {0.__class__.__name__}.'.format(original)) + + if original.is_opus(): + raise ClientException('AudioSource must not be Opus encoded.') + + self.original = original + self.volume = volume + + @property + def volume(self): + """Retrieves or sets the volume as a floating point percentage (e.g. 1.0 for 100%).""" + return self._volume + + @volume.setter + def volume(self, value): + self._volume = max(value, 0.0) + + def cleanup(self): + self.original.cleanup() + + def read(self): + ret = self.original.read() + return audioop.mul(ret, 2, min(self._volume, 2.0)) + +class AudioPlayer(threading.Thread): + DELAY = OpusEncoder.FRAME_LENGTH / 1000.0 + + def __init__(self, source, client, *, after=None): + threading.Thread.__init__(self) + self.daemon = True + self.source = source + self.client = client + self.after = after + + self._end = threading.Event() + self._resumed = threading.Event() + self._resumed.set() # we are not paused + self._current_error = None + self._connected = client._connected + self._lock = threading.Lock() + + if after is not None and not callable(after): + raise TypeError('Expected a callable for the "after" parameter.') + + def _do_run(self): + self.loops = 0 + self._start = time.time() + + # getattr lookup speed ups + play_audio = self.client.send_audio_packet + self._speak(True) + + while not self._end.is_set(): + # are we paused? + if not self._resumed.is_set(): + # wait until we aren't + self._resumed.wait() + continue + + # are we disconnected from voice? + if not self._connected.is_set(): + # wait until we are connected + self._connected.wait() + # reset our internal data + self.loops = 0 + self._start = time.time() + + self.loops += 1 + data = self.source.read() + + if not data: + self.stop() + break + + play_audio(data, encode=not self.source.is_opus()) + next_time = self._start + self.DELAY * self.loops + delay = max(0, self.DELAY + (next_time - time.time())) + time.sleep(delay) + + def run(self): + try: + self._do_run() + except Exception as exc: + self._current_error = exc + self.stop() + finally: + self.source.cleanup() + self._call_after() + + def _call_after(self): + if self.after is not None: + try: + self.after(self._current_error) + except Exception: + log.exception('Calling the after function failed.') + + def stop(self): + self._end.set() + self._resumed.set() + self._speak(False) + + def pause(self, *, update_speaking=True): + self._resumed.clear() + if update_speaking: + self._speak(False) + + def resume(self, *, update_speaking=True): + self.loops = 0 + self._start = time.time() + self._resumed.set() + if update_speaking: + self._speak(True) + + def is_playing(self): + return self._resumed.is_set() and not self._end.is_set() + + def is_paused(self): + return not self._end.is_set() and not self._resumed.is_set() + + def _set_source(self, source): + with self._lock: + self.pause(update_speaking=False) + self.source = source + self.resume(update_speaking=False) + + def _speak(self, speaking): + try: + asyncio.run_coroutine_threadsafe(self.client.ws.speak(speaking), self.client.loop) + except Exception as e: + log.info("Speaking call in player failed: %s", e) diff --git a/venv/lib/python3.7/site-packages/discord/raw_models.py b/venv/lib/python3.7/site-packages/discord/raw_models.py new file mode 100644 index 0000000..19ec68b --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/raw_models.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +class _RawReprMixin: + def __repr__(self): + value = ' '.join('%s=%r' % (attr, getattr(self, attr)) for attr in self.__slots__) + return '<%s %s>' % (self.__class__.__name__, value) + +class RawMessageDeleteEvent(_RawReprMixin): + """Represents the event payload for a :func:`on_raw_message_delete` event. + + Attributes + ------------ + channel_id: :class:`int` + The channel ID where the deletion took place. + guild_id: Optional[:class:`int`] + The guild ID where the deletion took place, if applicable. + message_id: :class:`int` + The message ID that got deleted. + cached_message: Optional[:class:`Message`] + The cached message, if found in the internal message cache. + """ + + __slots__ = ('message_id', 'channel_id', 'guild_id', 'cached_message') + + def __init__(self, data): + self.message_id = int(data['id']) + self.channel_id = int(data['channel_id']) + self.cached_message = None + try: + self.guild_id = int(data['guild_id']) + except KeyError: + self.guild_id = None + +class RawBulkMessageDeleteEvent(_RawReprMixin): + """Represents the event payload for a :func:`on_raw_bulk_message_delete` event. + + Attributes + ----------- + message_ids: Set[:class:`int`] + A :class:`set` of the message IDs that were deleted. + channel_id: :class:`int` + The channel ID where the message got deleted. + guild_id: Optional[:class:`int`] + The guild ID where the message got deleted, if applicable. + cached_messages: List[:class:`Message`] + The cached messages, if found in the internal message cache. + """ + + __slots__ = ('message_ids', 'channel_id', 'guild_id', 'cached_messages') + + def __init__(self, data): + self.message_ids = {int(x) for x in data.get('ids', [])} + self.channel_id = int(data['channel_id']) + self.cached_messages = [] + + try: + self.guild_id = int(data['guild_id']) + except KeyError: + self.guild_id = None + +class RawMessageUpdateEvent(_RawReprMixin): + """Represents the payload for a :func:`on_raw_message_edit` event. + + Attributes + ----------- + message_id: :class:`int` + The message ID that got updated. + data: :class:`dict` + The raw data given by the `gateway `_ + cached_message: Optional[:class:`Message`] + The cached message, if found in the internal message cache. + """ + + __slots__ = ('message_id', 'data', 'cached_message') + + def __init__(self, data): + self.message_id = int(data['id']) + self.data = data + self.cached_message = None + +class RawReactionActionEvent(_RawReprMixin): + """Represents the payload for a :func:`on_raw_reaction_add` or + :func:`on_raw_reaction_remove` event. + + Attributes + ----------- + message_id: :class:`int` + The message ID that got or lost a reaction. + user_id: :class:`int` + The user ID who added the reaction or whose reaction was removed. + channel_id: :class:`int` + The channel ID where the reaction got added or removed. + guild_id: Optional[:class:`int`] + The guild ID where the reaction got added or removed, if applicable. + emoji: :class:`PartialEmoji` + The custom or unicode emoji being used. + """ + + __slots__ = ('message_id', 'user_id', 'channel_id', 'guild_id', 'emoji') + + def __init__(self, data, emoji): + self.message_id = int(data['message_id']) + self.channel_id = int(data['channel_id']) + self.user_id = int(data['user_id']) + self.emoji = emoji + + try: + self.guild_id = int(data['guild_id']) + except KeyError: + self.guild_id = None + +class RawReactionClearEvent(_RawReprMixin): + """Represents the payload for a :func:`on_raw_reaction_clear` event. + + Attributes + ----------- + message_id: :class:`int` + The message ID that got its reactions cleared. + channel_id: :class:`int` + The channel ID where the reactions got cleared. + guild_id: Optional[:class:`int`] + The guild ID where the reactions got cleared. + """ + + __slots__ = ('message_id', 'channel_id', 'guild_id') + + def __init__(self, data): + self.message_id = int(data['message_id']) + self.channel_id = int(data['channel_id']) + + try: + self.guild_id = int(data['guild_id']) + except KeyError: + self.guild_id = None diff --git a/venv/lib/python3.7/site-packages/discord/reaction.py b/venv/lib/python3.7/site-packages/discord/reaction.py new file mode 100644 index 0000000..276fd84 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/reaction.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from .iterators import ReactionIterator + +class Reaction: + """Represents a reaction to a message. + + Depending on the way this object was created, some of the attributes can + have a value of ``None``. + + .. container:: operations + + .. describe:: x == y + + Checks if two reactions are equal. This works by checking if the emoji + is the same. So two messages with the same reaction will be considered + "equal". + + .. describe:: x != y + + Checks if two reactions are not equal. + + .. describe:: hash(x) + + Returns the reaction's hash. + + .. describe:: str(x) + + Returns the string form of the reaction's emoji. + + Attributes + ----------- + emoji: Union[:class:`Emoji`, :class:`str`] + The reaction emoji. May be a custom emoji, or a unicode emoji. + count: :class:`int` + Number of times this reaction was made + me: :class:`bool` + If the user sent this reaction. + message: :class:`Message` + Message this reaction is for. + """ + __slots__ = ('message', 'count', 'emoji', 'me') + + def __init__(self, *, message, data, emoji=None): + self.message = message + self.emoji = emoji or message._state.get_reaction_emoji(data['emoji']) + self.count = data.get('count', 1) + self.me = data.get('me') + + @property + def custom_emoji(self): + """:class:`bool`: If this is a custom emoji.""" + return not isinstance(self.emoji, str) + + def __eq__(self, other): + return isinstance(other, self.__class__) and other.emoji == self.emoji + + def __ne__(self, other): + if isinstance(other, self.__class__): + return other.emoji != self.emoji + return True + + def __hash__(self): + return hash(self.emoji) + + def __str__(self): + return str(self.emoji) + + def __repr__(self): + return ''.format(self) + + async def remove(self, user): + """|coro| + + Remove the reaction by the provided :class:`User` from the message. + + If the reaction is not your own (i.e. ``user`` parameter is not you) then + the :attr:`~Permissions.manage_messages` permission is needed. + + The ``user`` parameter must represent a user or member and meet + the :class:`abc.Snowflake` abc. + + Parameters + ----------- + user: :class:`abc.Snowflake` + The user or member from which to remove the reaction. + + Raises + ------- + HTTPException + Removing the reaction failed. + Forbidden + You do not have the proper permissions to remove the reaction. + NotFound + The user you specified, or the reaction's message was not found. + """ + + await self.message.remove_reaction(self.emoji, user) + + def users(self, limit=None, after=None): + """Returns an :class:`AsyncIterator` representing the users that have reacted to the message. + + The ``after`` parameter must represent a member + and meet the :class:`abc.Snowflake` abc. + + Examples + --------- + + Usage :: + + # I do not actually recommend doing this. + async for user in reaction.users(): + await channel.send('{0} has reacted with {1.emoji}!'.format(user, reaction)) + + Flattening into a list: :: + + users = await reaction.users().flatten() + # users is now a list of User... + winner = random.choice(users) + await channel.send('{} has won the raffle.'.format(winner)) + + Parameters + ------------ + limit: :class:`int` + The maximum number of results to return. + If not provided, returns all the users who + reacted to the message. + after: :class:`abc.Snowflake` + For pagination, reactions are sorted by member. + + Raises + -------- + HTTPException + Getting the users for the reaction failed. + + Yields + -------- + Union[:class:`User`, :class:`Member`] + The member (if retrievable) or the user that has reacted + to this message. The case where it can be a :class:`Member` is + in a guild message context. Sometimes it can be a :class:`User` + if the member has left the guild. + """ + + if self.custom_emoji: + emoji = '{0.name}:{0.id}'.format(self.emoji) + else: + emoji = self.emoji + + if limit is None: + limit = self.count + + return ReactionIterator(self.message, emoji, limit, after) diff --git a/venv/lib/python3.7/site-packages/discord/relationship.py b/venv/lib/python3.7/site-packages/discord/relationship.py new file mode 100644 index 0000000..a1bd2c1 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/relationship.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from .enums import RelationshipType, try_enum + +class Relationship: + """Represents a relationship in Discord. + + A relationship is like a friendship, a person who is blocked, etc. + Only non-bot accounts can have relationships. + + Attributes + ----------- + user: :class:`User` + The user you have the relationship with. + type: :class:`RelationshipType` + The type of relationship you have. + """ + + __slots__ = ('type', 'user', '_state') + + def __init__(self, *, state, data): + self._state = state + self.type = try_enum(RelationshipType, data['type']) + self.user = state.store_user(data['user']) + + def __repr__(self): + return ''.format(self) + + async def delete(self): + """|coro| + + Deletes the relationship. + + Raises + ------ + HTTPException + Deleting the relationship failed. + """ + + await self._state.http.remove_relationship(self.user.id) + + async def accept(self): + """|coro| + + Accepts the relationship request. e.g. accepting a + friend request. + + Raises + ------- + HTTPException + Accepting the relationship failed. + """ + + await self._state.http.add_relationship(self.user.id) diff --git a/venv/lib/python3.7/site-packages/discord/role.py b/venv/lib/python3.7/site-packages/discord/role.py new file mode 100644 index 0000000..8608c5e --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/role.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from .permissions import Permissions +from .errors import InvalidArgument +from .colour import Colour +from .mixins import Hashable +from .utils import snowflake_time + +class Role(Hashable): + """Represents a Discord role in a :class:`Guild`. + + .. container:: operations + + .. describe:: x == y + + Checks if two roles are equal. + + .. describe:: x != y + + Checks if two roles are not equal. + + .. describe:: x > y + + Checks if a role is higher than another in the hierarchy. + + .. describe:: x < y + + Checks if a role is lower than another in the hierarchy. + + .. describe:: x >= y + + Checks if a role is higher or equal to another in the hierarchy. + + .. describe:: x <= y + + Checks if a role is lower or equal to another in the hierarchy. + + .. describe:: hash(x) + + Return the role's hash. + + .. describe:: str(x) + + Returns the role's name. + + Attributes + ---------- + id: :class:`int` + The ID for the role. + name: :class:`str` + The name of the role. + permissions: :class:`Permissions` + Represents the role's permissions. + guild: :class:`Guild` + The guild the role belongs to. + colour: :class:`Colour` + Represents the role colour. An alias exists under ``color``. + hoist: :class:`bool` + Indicates if the role will be displayed separately from other members. + position: :class:`int` + The position of the role. This number is usually positive. The bottom + role has a position of 0. + managed: :class:`bool` + Indicates if the role is managed by the guild through some form of + integrations such as Twitch. + mentionable: :class:`bool` + Indicates if the role can be mentioned by users. + """ + + __slots__ = ('id', 'name', 'permissions', 'color', 'colour', 'position', + 'managed', 'mentionable', 'hoist', 'guild', '_state') + + def __init__(self, *, guild, state, data): + self.guild = guild + self._state = state + self.id = int(data['id']) + self._update(data) + + def __str__(self): + return self.name + + def __repr__(self): + return ''.format(self) + + def __lt__(self, other): + if not isinstance(other, Role) or not isinstance(self, Role): + return NotImplemented + + if self.guild != other.guild: + raise RuntimeError('cannot compare roles from two different guilds.') + + # the @everyone role is always the lowest role in hierarchy + guild_id = self.guild.id + if self.id == guild_id: + # everyone_role < everyone_role -> False + return other.id != guild_id + + if self.position < other.position: + return True + + if self.position == other.position: + return int(self.id) > int(other.id) + + return False + + def __le__(self, other): + r = Role.__lt__(other, self) + if r is NotImplemented: + return NotImplemented + return not r + + def __gt__(self, other): + return Role.__lt__(other, self) + + def __ge__(self, other): + r = Role.__lt__(self, other) + if r is NotImplemented: + return NotImplemented + return not r + + def _update(self, data): + self.name = data['name'] + self.permissions = Permissions(data.get('permissions', 0)) + self.position = data.get('position', 0) + self.colour = Colour(data.get('color', 0)) + self.hoist = data.get('hoist', False) + self.managed = data.get('managed', False) + self.mentionable = data.get('mentionable', False) + self.color = self.colour + + def is_default(self): + """Checks if the role is the default role.""" + return self.guild.id == self.id + + @property + def created_at(self): + """:class:`datetime.datetime`: Returns the role's creation time in UTC.""" + return snowflake_time(self.id) + + @property + def mention(self): + """:class:`str`: Returns a string that allows you to mention a role.""" + return '<@&%s>' % self.id + + @property + def members(self): + """List[:class:`Member`]: Returns all the members with this role.""" + all_members = self.guild.members + if self.is_default(): + return all_members + + role_id = self.id + return [member for member in all_members if member._roles.has(role_id)] + + async def _move(self, position, reason): + if position <= 0: + raise InvalidArgument("Cannot move role to position 0 or below") + + if self.is_default(): + raise InvalidArgument("Cannot move default role") + + if self.position == position: + return # Save discord the extra request. + + http = self._state.http + + change_range = range(min(self.position, position), max(self.position, position) + 1) + roles = [r.id for r in self.guild.roles[1:] if r.position in change_range and r.id != self.id] + + if self.position > position: + roles.insert(0, self.id) + else: + roles.append(self.id) + + payload = [{"id": z[0], "position": z[1]} for z in zip(roles, change_range)] + await http.move_role_position(self.guild.id, payload, reason=reason) + + async def edit(self, *, reason=None, **fields): + """|coro| + + Edits the role. + + You must have the :attr:`~Permissions.manage_roles` permission to + use this. + + All fields are optional. + + Parameters + ----------- + name: :class:`str` + The new role name to change to. + permissions: :class:`Permissions` + The new permissions to change to. + colour: :class:`Colour` + The new colour to change to. (aliased to color as well) + hoist: :class:`bool` + Indicates if the role should be shown separately in the member list. + mentionable: :class:`bool` + Indicates if the role should be mentionable by others. + position: :class:`int` + The new role's position. This must be below your top role's + position or it will fail. + reason: Optional[:class:`str`] + The reason for editing this role. Shows up on the audit log. + + Raises + ------- + Forbidden + You do not have permissions to change the role. + HTTPException + Editing the role failed. + InvalidArgument + An invalid position was given or the default + role was asked to be moved. + """ + + position = fields.get('position') + if position is not None: + await self._move(position, reason=reason) + self.position = position + + try: + colour = fields['colour'] + except KeyError: + colour = fields.get('color', self.colour) + + payload = { + 'name': fields.get('name', self.name), + 'permissions': fields.get('permissions', self.permissions).value, + 'color': colour.value, + 'hoist': fields.get('hoist', self.hoist), + 'mentionable': fields.get('mentionable', self.mentionable) + } + + data = await self._state.http.edit_role(self.guild.id, self.id, reason=reason, **payload) + self._update(data) + + async def delete(self, *, reason=None): + """|coro| + + Deletes the role. + + You must have the :attr:`~Permissions.manage_roles` permission to + use this. + + Parameters + ----------- + reason: Optional[:class:`str`] + The reason for deleting this role. Shows up on the audit log. + + Raises + -------- + Forbidden + You do not have permissions to delete the role. + HTTPException + Deleting the role failed. + """ + + await self._state.http.delete_role(self.guild.id, self.id, reason=reason) diff --git a/venv/lib/python3.7/site-packages/discord/shard.py b/venv/lib/python3.7/site-packages/discord/shard.py new file mode 100644 index 0000000..fba96f4 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/shard.py @@ -0,0 +1,360 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import asyncio +import itertools +import logging + +import websockets + +from .state import AutoShardedConnectionState +from .client import Client +from .gateway import * +from .errors import ClientException, InvalidArgument +from . import utils +from .enums import Status + +log = logging.getLogger(__name__) + +class Shard: + def __init__(self, ws, client): + self.ws = ws + self._client = client + self._dispatch = client.dispatch + self.loop = self._client.loop + self._current = self.loop.create_future() + self._current.set_result(None) # we just need an already done future + self._pending = asyncio.Event(loop=self.loop) + self._pending_task = None + + @property + def id(self): + return self.ws.shard_id + + def is_pending(self): + return not self._pending.is_set() + + def complete_pending_reads(self): + self._pending.set() + + async def _pending_reads(self): + try: + while self.is_pending(): + await self.poll() + except asyncio.CancelledError: + pass + + def launch_pending_reads(self): + self._pending_task = asyncio.ensure_future(self._pending_reads(), loop=self.loop) + + def wait(self): + return self._pending_task + + async def poll(self): + try: + await self.ws.poll_event() + except ResumeWebSocket: + log.info('Got a request to RESUME the websocket at Shard ID %s.', self.id) + coro = DiscordWebSocket.from_client(self._client, resume=True, shard_id=self.id, + session=self.ws.session_id, sequence=self.ws.sequence) + self._dispatch('disconnect') + self.ws = await asyncio.wait_for(coro, timeout=180.0, loop=self.loop) + + def get_future(self): + if self._current.done(): + self._current = asyncio.ensure_future(self.poll(), loop=self.loop) + + return self._current + +class AutoShardedClient(Client): + """A client similar to :class:`Client` except it handles the complications + of sharding for the user into a more manageable and transparent single + process bot. + + When using this client, you will be able to use it as-if it was a regular + :class:`Client` with a single shard when implementation wise internally it + is split up into multiple shards. This allows you to not have to deal with + IPC or other complicated infrastructure. + + It is recommended to use this client only if you have surpassed at least + 1000 guilds. + + If no :attr:`.shard_count` is provided, then the library will use the + Bot Gateway endpoint call to figure out how many shards to use. + + If a ``shard_ids`` parameter is given, then those shard IDs will be used + to launch the internal shards. Note that :attr:`.shard_count` must be provided + if this is used. By default, when omitted, the client will launch shards from + 0 to ``shard_count - 1``. + + Attributes + ------------ + shard_ids: Optional[List[:class:`int`]] + An optional list of shard_ids to launch the shards with. + """ + def __init__(self, *args, loop=None, **kwargs): + kwargs.pop('shard_id', None) + self.shard_ids = kwargs.pop('shard_ids', None) + super().__init__(*args, loop=loop, **kwargs) + + if self.shard_ids is not None: + if self.shard_count is None: + raise ClientException('When passing manual shard_ids, you must provide a shard_count.') + elif not isinstance(self.shard_ids, (list, tuple)): + raise ClientException('shard_ids parameter must be a list or a tuple.') + + self._connection = AutoShardedConnectionState(dispatch=self.dispatch, chunker=self._chunker, + handlers=self._handlers, syncer=self._syncer, + http=self.http, loop=self.loop, **kwargs) + + # instead of a single websocket, we have multiple + # the key is the shard_id + self.shards = {} + + def _get_websocket(guild_id): + i = (guild_id >> 22) % self.shard_count + return self.shards[i].ws + + self._connection._get_websocket = _get_websocket + + async def _chunker(self, guild, *, shard_id=None): + try: + guild_id = guild.id + shard_id = shard_id or guild.shard_id + except AttributeError: + guild_id = [s.id for s in guild] + + payload = { + 'op': 8, + 'd': { + 'guild_id': guild_id, + 'query': '', + 'limit': 0 + } + } + + ws = self.shards[shard_id].ws + await ws.send_as_json(payload) + + @property + def latency(self): + """:class:`float`: Measures latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds. + + This operates similarly to :meth:`Client.latency` except it uses the average + latency of every shard's latency. To get a list of shard latency, check the + :attr:`latencies` property. Returns ``nan`` if there are no shards ready. + """ + if not self.shards: + return float('nan') + return sum(latency for _, latency in self.latencies) / len(self.shards) + + @property + def latencies(self): + """List[Tuple[:class:`int`, :class:`float`]]: A list of latencies between a HEARTBEAT and a HEARTBEAT_ACK in seconds. + + This returns a list of tuples with elements ``(shard_id, latency)``. + """ + return [(shard_id, shard.ws.latency) for shard_id, shard in self.shards.items()] + + async def request_offline_members(self, *guilds): + r"""|coro| + + Requests previously offline members from the guild to be filled up + into the :attr:`Guild.members` cache. This function is usually not + called. It should only be used if you have the ``fetch_offline_members`` + parameter set to ``False``. + + When the client logs on and connects to the websocket, Discord does + not provide the library with offline members if the number of members + in the guild is larger than 250. You can check if a guild is large + if :attr:`Guild.large` is ``True``. + + Parameters + ----------- + \*guilds: :class:`Guild` + An argument list of guilds to request offline members for. + + Raises + ------- + InvalidArgument + If any guild is unavailable or not large in the collection. + """ + if any(not g.large or g.unavailable for g in guilds): + raise InvalidArgument('An unavailable or non-large guild was passed.') + + _guilds = sorted(guilds, key=lambda g: g.shard_id) + for shard_id, sub_guilds in itertools.groupby(_guilds, key=lambda g: g.shard_id): + sub_guilds = list(sub_guilds) + await self._connection.request_offline_members(sub_guilds, shard_id=shard_id) + + async def launch_shard(self, gateway, shard_id): + try: + coro = websockets.connect(gateway, loop=self.loop, klass=DiscordWebSocket, compression=None) + ws = await asyncio.wait_for(coro, loop=self.loop, timeout=180.0) + except Exception: + log.info('Failed to connect for shard_id: %s. Retrying...', shard_id) + await asyncio.sleep(5.0, loop=self.loop) + return await self.launch_shard(gateway, shard_id) + + ws.token = self.http.token + ws._connection = self._connection + ws._discord_parsers = self._connection.parsers + ws._dispatch = self.dispatch + ws.gateway = gateway + ws.shard_id = shard_id + ws.shard_count = self.shard_count + ws._max_heartbeat_timeout = self._connection.heartbeat_timeout + + try: + # OP HELLO + await asyncio.wait_for(ws.poll_event(), loop=self.loop, timeout=180.0) + await asyncio.wait_for(ws.identify(), loop=self.loop, timeout=180.0) + except asyncio.TimeoutError: + log.info('Timed out when connecting for shard_id: %s. Retrying...', shard_id) + await asyncio.sleep(5.0, loop=self.loop) + return await self.launch_shard(gateway, shard_id) + + # keep reading the shard while others connect + self.shards[shard_id] = ret = Shard(ws, self) + ret.launch_pending_reads() + await asyncio.sleep(5.0, loop=self.loop) + + async def launch_shards(self): + if self.shard_count is None: + self.shard_count, gateway = await self.http.get_bot_gateway() + else: + gateway = await self.http.get_gateway() + + self._connection.shard_count = self.shard_count + + shard_ids = self.shard_ids if self.shard_ids else range(self.shard_count) + + for shard_id in shard_ids: + await self.launch_shard(gateway, shard_id) + + shards_to_wait_for = [] + for shard in self.shards.values(): + shard.complete_pending_reads() + shards_to_wait_for.append(shard.wait()) + + # wait for all pending tasks to finish + await utils.sane_wait_for(shards_to_wait_for, timeout=300.0, loop=self.loop) + + async def _connect(self): + await self.launch_shards() + + while True: + pollers = [shard.get_future() for shard in self.shards.values()] + done, _ = await asyncio.wait(pollers, loop=self.loop, return_when=asyncio.FIRST_COMPLETED) + for f in done: + # we wanna re-raise to the main Client.connect handler if applicable + f.result() + + async def close(self): + """|coro| + + Closes the connection to Discord. + """ + if self.is_closed(): + return + + self._closed = True + + for vc in self.voice_clients: + try: + await vc.disconnect() + except Exception: + pass + + to_close = [shard.ws.close() for shard in self.shards.values()] + if to_close: + await asyncio.wait(to_close, loop=self.loop) + + await self.http.close() + + async def change_presence(self, *, activity=None, status=None, afk=False, shard_id=None): + """|coro| + + Changes the client's presence. + + The activity parameter is a :class:`Activity` object (not a string) that represents + the activity being done currently. This could also be the slimmed down versions, + :class:`Game` and :class:`Streaming`. + + Example: :: + + game = discord.Game("with the API") + await client.change_presence(status=discord.Status.idle, activity=game) + + Parameters + ---------- + activity: Optional[Union[:class:`Game`, :class:`Streaming`, :class:`Activity`]] + The activity being done. ``None`` if no currently active activity is done. + status: Optional[:class:`Status`] + Indicates what status to change to. If ``None``, then + :attr:`Status.online` is used. + afk: :class:`bool` + Indicates if you are going AFK. This allows the discord + client to know how to handle push notifications better + for you in case you are actually idle and not lying. + shard_id: Optional[:class:`int`] + The shard_id to change the presence to. If not specified + or ``None``, then it will change the presence of every + shard the bot can see. + + Raises + ------ + InvalidArgument + If the ``activity`` parameter is not of proper type. + """ + + if status is None: + status = 'online' + status_enum = Status.online + elif status is Status.offline: + status = 'invisible' + status_enum = Status.offline + else: + status_enum = status + status = str(status) + + if shard_id is None: + for shard in self.shards.values(): + await shard.ws.change_presence(activity=activity, status=status, afk=afk) + + guilds = self._connection.guilds + else: + shard = self.shards[shard_id] + await shard.ws.change_presence(activity=activity, status=status, afk=afk) + guilds = [g for g in self._connection.guilds if g.shard_id == shard_id] + + for guild in guilds: + me = guild.me + if me is None: + continue + + me.activities = (activity,) + me.status = status_enum diff --git a/venv/lib/python3.7/site-packages/discord/state.py b/venv/lib/python3.7/site-packages/discord/state.py new file mode 100644 index 0000000..18c6965 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/state.py @@ -0,0 +1,1014 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import asyncio +from collections import deque, namedtuple, OrderedDict +import copy +import datetime +import itertools +import logging +import math +import weakref +import inspect + +from .guild import Guild +from .activity import _ActivityTag +from .user import User, ClientUser +from .emoji import Emoji, PartialEmoji +from .message import Message +from .relationship import Relationship +from .channel import * +from .raw_models import * +from .member import Member +from .role import Role +from .enums import ChannelType, try_enum, Status, Enum +from . import utils +from .embeds import Embed +from .object import Object + +class ListenerType(Enum): + chunk = 0 + +Listener = namedtuple('Listener', ('type', 'future', 'predicate')) +log = logging.getLogger(__name__) +ReadyState = namedtuple('ReadyState', ('launch', 'guilds')) + +class ConnectionState: + def __init__(self, *, dispatch, chunker, handlers, syncer, http, loop, **options): + self.loop = loop + self.http = http + self.max_messages = max(options.get('max_messages', 5000), 100) + self.dispatch = dispatch + self.chunker = chunker + self.syncer = syncer + self.is_bot = None + self.handlers = handlers + self.shard_count = None + self._ready_task = None + self._fetch_offline = options.get('fetch_offline_members', True) + self.heartbeat_timeout = options.get('heartbeat_timeout', 60.0) + self._listeners = [] + + activity = options.get('activity', None) + if activity: + if not isinstance(activity, _ActivityTag): + raise TypeError('activity parameter must be one of Game, Streaming, or Activity.') + + activity = activity.to_dict() + + status = options.get('status', None) + if status: + if status is Status.offline: + status = 'invisible' + else: + status = str(status) + + self._activity = activity + self._status = status + + self.parsers = parsers = {} + for attr, func in inspect.getmembers(self): + if attr.startswith('parse_'): + parsers[attr[6:].upper()] = func + + self.clear() + + def clear(self): + self.user = None + self._users = weakref.WeakValueDictionary() + self._emojis = {} + self._calls = {} + self._guilds = {} + self._voice_clients = {} + + # LRU of max size 128 + self._private_channels = OrderedDict() + # extra dict to look up private channels by user id + self._private_channels_by_user = {} + self._messages = deque(maxlen=self.max_messages) + + def process_listeners(self, listener_type, argument, result): + removed = [] + for i, listener in enumerate(self._listeners): + if listener.type != listener_type: + continue + + future = listener.future + if future.cancelled(): + removed.append(i) + continue + + try: + passed = listener.predicate(argument) + except Exception as exc: + future.set_exception(exc) + removed.append(i) + else: + if passed: + future.set_result(result) + removed.append(i) + if listener.type == ListenerType.chunk: + break + + for index in reversed(removed): + del self._listeners[index] + + def call_handlers(self, key, *args, **kwargs): + try: + func = self.handlers[key] + except KeyError: + pass + else: + func(*args, **kwargs) + + @property + def self_id(self): + u = self.user + return u.id if u else None + + @property + def voice_clients(self): + return list(self._voice_clients.values()) + + def _get_voice_client(self, guild_id): + return self._voice_clients.get(guild_id) + + def _add_voice_client(self, guild_id, voice): + self._voice_clients[guild_id] = voice + + def _remove_voice_client(self, guild_id): + self._voice_clients.pop(guild_id, None) + + def _update_references(self, ws): + for vc in self.voice_clients: + vc.main_ws = ws + + def store_user(self, data): + # this way is 300% faster than `dict.setdefault`. + user_id = int(data['id']) + try: + return self._users[user_id] + except KeyError: + user = User(state=self, data=data) + if user.discriminator != '0000': + self._users[user_id] = user + return user + + def get_user(self, id): + return self._users.get(id) + + def store_emoji(self, guild, data): + emoji_id = int(data['id']) + self._emojis[emoji_id] = emoji = Emoji(guild=guild, state=self, data=data) + return emoji + + @property + def guilds(self): + return list(self._guilds.values()) + + def _get_guild(self, guild_id): + return self._guilds.get(guild_id) + + def _add_guild(self, guild): + self._guilds[guild.id] = guild + + def _remove_guild(self, guild): + self._guilds.pop(guild.id, None) + + for emoji in guild.emojis: + self._emojis.pop(emoji.id, None) + + del guild + + @property + def emojis(self): + return list(self._emojis.values()) + + def get_emoji(self, emoji_id): + return self._emojis.get(emoji_id) + + @property + def private_channels(self): + return list(self._private_channels.values()) + + def _get_private_channel(self, channel_id): + try: + value = self._private_channels[channel_id] + except KeyError: + return None + else: + self._private_channels.move_to_end(channel_id) + return value + + def _get_private_channel_by_user(self, user_id): + return self._private_channels_by_user.get(user_id) + + def _add_private_channel(self, channel): + channel_id = channel.id + self._private_channels[channel_id] = channel + + if self.is_bot and len(self._private_channels) > 128: + _, to_remove = self._private_channels.popitem(last=False) + if isinstance(to_remove, DMChannel): + self._private_channels_by_user.pop(to_remove.recipient.id, None) + + if isinstance(channel, DMChannel): + self._private_channels_by_user[channel.recipient.id] = channel + + def add_dm_channel(self, data): + channel = DMChannel(me=self.user, state=self, data=data) + self._add_private_channel(channel) + return channel + + def _remove_private_channel(self, channel): + self._private_channels.pop(channel.id, None) + if isinstance(channel, DMChannel): + self._private_channels_by_user.pop(channel.recipient.id, None) + + def _get_message(self, msg_id): + return utils.find(lambda m: m.id == msg_id, reversed(self._messages)) + + def _add_guild_from_data(self, guild): + guild = Guild(data=guild, state=self) + self._add_guild(guild) + return guild + + def chunks_needed(self, guild): + for _ in range(math.ceil(guild._member_count / 1000)): + yield self.receive_chunk(guild.id) + + def _get_guild_channel(self, data): + channel_id = int(data['channel_id']) + try: + guild = self._get_guild(int(data['guild_id'])) + except KeyError: + channel = self.get_channel(channel_id) + guild = None + else: + channel = guild and guild.get_channel(channel_id) + + return channel or Object(id=channel_id), guild + + async def request_offline_members(self, guilds): + # get all the chunks + chunks = [] + for guild in guilds: + chunks.extend(self.chunks_needed(guild)) + + # we only want to request ~75 guilds per chunk request. + splits = [guilds[i:i + 75] for i in range(0, len(guilds), 75)] + for split in splits: + await self.chunker(split) + + # wait for the chunks + if chunks: + try: + await utils.sane_wait_for(chunks, timeout=len(chunks) * 30.0, loop=self.loop) + except asyncio.TimeoutError: + log.info('Somehow timed out waiting for chunks.') + + async def _delay_ready(self): + try: + launch = self._ready_state.launch + + # only real bots wait for GUILD_CREATE streaming + if self.is_bot: + while not launch.is_set(): + # this snippet of code is basically waiting 2 seconds + # until the last GUILD_CREATE was sent + launch.set() + await asyncio.sleep(2, loop=self.loop) + + guilds = next(zip(*self._ready_state.guilds), []) + if self._fetch_offline: + await self.request_offline_members(guilds) + + for guild, unavailable in self._ready_state.guilds: + if unavailable is False: + self.dispatch('guild_available', guild) + else: + self.dispatch('guild_join', guild) + + # remove the state + try: + del self._ready_state + except AttributeError: + pass # already been deleted somehow + + # call GUILD_SYNC after we're done chunking + if not self.is_bot: + log.info('Requesting GUILD_SYNC for %s guilds', len(self.guilds)) + await self.syncer([s.id for s in self.guilds]) + except asyncio.CancelledError: + pass + else: + # dispatch the event + self.call_handlers('ready') + self.dispatch('ready') + finally: + self._ready_task = None + + def parse_ready(self, data): + if self._ready_task is not None: + self._ready_task.cancel() + + self._ready_state = ReadyState(launch=asyncio.Event(), guilds=[]) + self.clear() + self.user = user = ClientUser(state=self, data=data['user']) + self._users[user.id] = user + + guilds = self._ready_state.guilds + for guild_data in data['guilds']: + guild = self._add_guild_from_data(guild_data) + if (not self.is_bot and not guild.unavailable) or guild.large: + guilds.append((guild, guild.unavailable)) + + for relationship in data.get('relationships', []): + try: + r_id = int(relationship['id']) + except KeyError: + continue + else: + user._relationships[r_id] = Relationship(state=self, data=relationship) + + for pm in data.get('private_channels', []): + factory, _ = _channel_factory(pm['type']) + self._add_private_channel(factory(me=user, data=pm, state=self)) + + self.dispatch('connect') + self._ready_task = asyncio.ensure_future(self._delay_ready(), loop=self.loop) + + def parse_resumed(self, data): + self.dispatch('resumed') + + def parse_message_create(self, data): + channel, _ = self._get_guild_channel(data) + message = Message(channel=channel, data=data, state=self) + self.dispatch('message', message) + self._messages.append(message) + if channel and channel.__class__ is TextChannel: + channel.last_message_id = message.id + + def parse_message_delete(self, data): + raw = RawMessageDeleteEvent(data) + found = self._get_message(raw.message_id) + raw.cached_message = found + self.dispatch('raw_message_delete', raw) + if found is not None: + self.dispatch('message_delete', found) + self._messages.remove(found) + + def parse_message_delete_bulk(self, data): + raw = RawBulkMessageDeleteEvent(data) + found_messages = [message for message in self._messages if message.id in raw.message_ids] + raw.cached_messages = found_messages + self.dispatch('raw_bulk_message_delete', raw) + if found_messages: + self.dispatch('bulk_message_delete', found_messages) + for msg in found_messages: + self._messages.remove(msg) + + def parse_message_update(self, data): + raw = RawMessageUpdateEvent(data) + message = self._get_message(raw.message_id) + if message is not None: + older_message = copy.copy(message) + raw.cached_message = older_message + self.dispatch('raw_message_edit', raw) + message._update(data) + self.dispatch('message_edit', older_message, message) + else: + self.dispatch('raw_message_edit', raw) + + def parse_message_reaction_add(self, data): + emoji = data['emoji'] + emoji_id = utils._get_as_snowflake(emoji, 'id') + emoji = PartialEmoji.with_state(self, animated=emoji.get('animated', False), id=emoji_id, name=emoji['name']) + raw = RawReactionActionEvent(data, emoji) + self.dispatch('raw_reaction_add', raw) + + # rich interface here + message = self._get_message(raw.message_id) + if message is not None: + emoji = self._upgrade_partial_emoji(emoji) + reaction = message._add_reaction(data, emoji, raw.user_id) + user = self._get_reaction_user(message.channel, raw.user_id) + if user: + self.dispatch('reaction_add', reaction, user) + + def parse_message_reaction_remove_all(self, data): + raw = RawReactionClearEvent(data) + self.dispatch('raw_reaction_clear', raw) + + message = self._get_message(raw.message_id) + if message is not None: + old_reactions = message.reactions.copy() + message.reactions.clear() + self.dispatch('reaction_clear', message, old_reactions) + + def parse_message_reaction_remove(self, data): + emoji = data['emoji'] + emoji_id = utils._get_as_snowflake(emoji, 'id') + emoji = PartialEmoji.with_state(self, animated=emoji.get('animated', False), id=emoji_id, name=emoji['name']) + raw = RawReactionActionEvent(data, emoji) + self.dispatch('raw_reaction_remove', raw) + + message = self._get_message(raw.message_id) + if message is not None: + emoji = self._upgrade_partial_emoji(emoji) + try: + reaction = message._remove_reaction(data, emoji, raw.user_id) + except (AttributeError, ValueError): # eventual consistency lol + pass + else: + user = self._get_reaction_user(message.channel, raw.user_id) + if user: + self.dispatch('reaction_remove', reaction, user) + + def parse_presence_update(self, data): + guild_id = utils._get_as_snowflake(data, 'guild_id') + guild = self._get_guild(guild_id) + if guild is None: + log.warning('PRESENCE_UPDATE referencing an unknown guild ID: %s. Discarding.', guild_id) + return + + user = data['user'] + member_id = int(user['id']) + member = guild.get_member(member_id) + if member is None: + if 'username' not in user: + # sometimes we receive 'incomplete' member data post-removal. + # skip these useless cases. + return + + member, old_member = Member._from_presence_update(guild=guild, data=data, state=self) + guild._add_member(member) + else: + old_member = Member._copy(member) + user_update = member._presence_update(data=data, user=user) + if user_update: + self.dispatch('user_update', user_update[0], user_update[1]) + + self.dispatch('member_update', old_member, member) + + def parse_user_update(self, data): + self.user._update(data) + + def parse_channel_delete(self, data): + guild = self._get_guild(utils._get_as_snowflake(data, 'guild_id')) + channel_id = int(data['id']) + if guild is not None: + channel = guild.get_channel(channel_id) + if channel is not None: + guild._remove_channel(channel) + self.dispatch('guild_channel_delete', channel) + else: + # the reason we're doing this is so it's also removed from the + # private channel by user cache as well + channel = self._get_private_channel(channel_id) + if channel is not None: + self._remove_private_channel(channel) + self.dispatch('private_channel_delete', channel) + + def parse_channel_update(self, data): + channel_type = try_enum(ChannelType, data.get('type')) + channel_id = int(data['id']) + if channel_type is ChannelType.group: + channel = self._get_private_channel(channel_id) + old_channel = copy.copy(channel) + channel._update_group(data) + self.dispatch('private_channel_update', old_channel, channel) + return + + guild_id = utils._get_as_snowflake(data, 'guild_id') + guild = self._get_guild(guild_id) + if guild is not None: + channel = guild.get_channel(channel_id) + if channel is not None: + old_channel = copy.copy(channel) + channel._update(guild, data) + self.dispatch('guild_channel_update', old_channel, channel) + else: + log.warning('CHANNEL_UPDATE referencing an unknown channel ID: %s. Discarding.', channel_id) + else: + log.warning('CHANNEL_UPDATE referencing an unknown guild ID: %s. Discarding.', guild_id) + + def parse_channel_create(self, data): + factory, ch_type = _channel_factory(data['type']) + if factory is None: + log.warning('CHANNEL_CREATE referencing an unknown channel type %s. Discarding.', data['type']) + return + + channel = None + + if ch_type in (ChannelType.group, ChannelType.private): + channel_id = int(data['id']) + if self._get_private_channel(channel_id) is None: + channel = factory(me=self.user, data=data, state=self) + self._add_private_channel(channel) + self.dispatch('private_channel_create', channel) + else: + guild_id = utils._get_as_snowflake(data, 'guild_id') + guild = self._get_guild(guild_id) + if guild is not None: + channel = factory(guild=guild, state=self, data=data) + guild._add_channel(channel) + self.dispatch('guild_channel_create', channel) + else: + log.warning('CHANNEL_CREATE referencing an unknown guild ID: %s. Discarding.', guild_id) + return + + def parse_channel_pins_update(self, data): + channel_id = int(data['channel_id']) + channel = self.get_channel(channel_id) + if channel is None: + log.warning('CHANNEL_PINS_UPDATE referencing an unknown channel ID: %s. Discarding.', channel_id) + return + + last_pin = utils.parse_time(data['last_pin_timestamp']) if data['last_pin_timestamp'] else None + + try: + # I have not imported discord.abc in this file + # the isinstance check is also 2x slower than just checking this attribute + # so we're just gonna check it since it's easier and faster and lazier + channel.guild + except AttributeError: + self.dispatch('private_channel_pins_update', channel, last_pin) + else: + self.dispatch('guild_channel_pins_update', channel, last_pin) + + def parse_channel_recipient_add(self, data): + channel = self._get_private_channel(int(data['channel_id'])) + user = self.store_user(data['user']) + channel.recipients.append(user) + self.dispatch('group_join', channel, user) + + def parse_channel_recipient_remove(self, data): + channel = self._get_private_channel(int(data['channel_id'])) + user = self.store_user(data['user']) + try: + channel.recipients.remove(user) + except ValueError: + pass + else: + self.dispatch('group_remove', channel, user) + + def parse_guild_member_add(self, data): + guild = self._get_guild(int(data['guild_id'])) + if guild is None: + log.warning('GUILD_MEMBER_ADD referencing an unknown guild ID: %s. Discarding.', data['guild_id']) + return + + member = Member(guild=guild, data=data, state=self) + guild._add_member(member) + guild._member_count += 1 + self.dispatch('member_join', member) + + def parse_guild_member_remove(self, data): + guild = self._get_guild(int(data['guild_id'])) + if guild is not None: + user_id = int(data['user']['id']) + member = guild.get_member(user_id) + if member is not None: + guild._remove_member(member) + guild._member_count -= 1 + self.dispatch('member_remove', member) + else: + log.warning('GUILD_MEMBER_REMOVE referencing an unknown guild ID: %s. Discarding.', data['guild_id']) + + def parse_guild_member_update(self, data): + guild = self._get_guild(int(data['guild_id'])) + user = data['user'] + user_id = int(user['id']) + if guild is None: + log.warning('GUILD_MEMBER_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id']) + return + + member = guild.get_member(user_id) + if member is not None: + old_member = copy.copy(member) + member._update(data) + self.dispatch('member_update', old_member, member) + else: + log.warning('GUILD_MEMBER_UPDATE referencing an unknown member ID: %s. Discarding.', user_id) + + def parse_guild_emojis_update(self, data): + guild = self._get_guild(int(data['guild_id'])) + if guild is None: + log.warning('GUILD_EMOJIS_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id']) + return + + before_emojis = guild.emojis + for emoji in before_emojis: + self._emojis.pop(emoji.id, None) + guild.emojis = tuple(map(lambda d: self.store_emoji(guild, d), data['emojis'])) + self.dispatch('guild_emojis_update', guild, before_emojis, guild.emojis) + + def _get_create_guild(self, data): + if data.get('unavailable') is False: + # GUILD_CREATE with unavailable in the response + # usually means that the guild has become available + # and is therefore in the cache + guild = self._get_guild(int(data['id'])) + if guild is not None: + guild.unavailable = False + guild._from_data(data) + return guild + + return self._add_guild_from_data(data) + + async def _chunk_and_dispatch(self, guild, unavailable): + chunks = list(self.chunks_needed(guild)) + await self.chunker(guild) + if chunks: + try: + await utils.sane_wait_for(chunks, timeout=len(chunks), loop=self.loop) + except asyncio.TimeoutError: + log.info('Somehow timed out waiting for chunks.') + + if unavailable is False: + self.dispatch('guild_available', guild) + else: + self.dispatch('guild_join', guild) + + def parse_guild_create(self, data): + unavailable = data.get('unavailable') + if unavailable is True: + # joined a guild with unavailable == True so.. + return + + guild = self._get_create_guild(data) + + # check if it requires chunking + if guild.large: + if unavailable is False: + # check if we're waiting for 'useful' READY + # and if we are, we don't want to dispatch any + # event such as guild_join or guild_available + # because we're still in the 'READY' phase. Or + # so we say. + try: + state = self._ready_state + state.launch.clear() + state.guilds.append((guild, unavailable)) + except AttributeError: + # the _ready_state attribute is only there during + # processing of useful READY. + pass + else: + return + + # since we're not waiting for 'useful' READY we'll just + # do the chunk request here if wanted + if self._fetch_offline: + asyncio.ensure_future(self._chunk_and_dispatch(guild, unavailable), loop=self.loop) + return + + # Dispatch available if newly available + if unavailable is False: + self.dispatch('guild_available', guild) + else: + self.dispatch('guild_join', guild) + + def parse_guild_sync(self, data): + guild = self._get_guild(int(data['id'])) + guild._sync(data) + + def parse_guild_update(self, data): + guild = self._get_guild(int(data['id'])) + if guild is not None: + old_guild = copy.copy(guild) + guild._from_data(data) + self.dispatch('guild_update', old_guild, guild) + else: + log.warning('GUILD_UPDATE referencing an unknown guild ID: %s. Discarding.', data['id']) + + def parse_guild_delete(self, data): + guild = self._get_guild(int(data['id'])) + if guild is None: + log.warning('GUILD_DELETE referencing an unknown guild ID: %s. Discarding.', data['id']) + return + + if data.get('unavailable', False) and guild is not None: + # GUILD_DELETE with unavailable being True means that the + # guild that was available is now currently unavailable + guild.unavailable = True + self.dispatch('guild_unavailable', guild) + return + + # do a cleanup of the messages cache + self._messages = deque((msg for msg in self._messages if msg.guild != guild), maxlen=self.max_messages) + + self._remove_guild(guild) + self.dispatch('guild_remove', guild) + + def parse_guild_ban_add(self, data): + # we make the assumption that GUILD_BAN_ADD is done + # before GUILD_MEMBER_REMOVE is called + # hence we don't remove it from cache or do anything + # strange with it, the main purpose of this event + # is mainly to dispatch to another event worth listening to for logging + guild = self._get_guild(int(data['guild_id'])) + if guild is not None: + try: + user = User(data=data['user'], state=self) + except KeyError: + pass + else: + member = guild.get_member(user.id) or user + self.dispatch('member_ban', guild, member) + + def parse_guild_ban_remove(self, data): + guild = self._get_guild(int(data['guild_id'])) + if guild is not None: + if 'user' in data: + user = self.store_user(data['user']) + self.dispatch('member_unban', guild, user) + + def parse_guild_role_create(self, data): + guild = self._get_guild(int(data['guild_id'])) + if guild is None: + log.warning('GUILD_ROLE_CREATE referencing an unknown guild ID: %s. Discarding.', data['guild_id']) + return + + role_data = data['role'] + role = Role(guild=guild, data=role_data, state=self) + guild._add_role(role) + self.dispatch('guild_role_create', role) + + def parse_guild_role_delete(self, data): + guild = self._get_guild(int(data['guild_id'])) + if guild is not None: + role_id = int(data['role_id']) + try: + role = guild._remove_role(role_id) + except KeyError: + return + else: + self.dispatch('guild_role_delete', role) + else: + log.warning('GUILD_ROLE_DELETE referencing an unknown guild ID: %s. Discarding.', data['guild_id']) + + def parse_guild_role_update(self, data): + guild = self._get_guild(int(data['guild_id'])) + if guild is not None: + role_data = data['role'] + role_id = int(role_data['id']) + role = guild.get_role(role_id) + if role is not None: + old_role = copy.copy(role) + role._update(role_data) + self.dispatch('guild_role_update', old_role, role) + else: + log.warning('GUILD_ROLE_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id']) + + def parse_guild_members_chunk(self, data): + guild_id = int(data['guild_id']) + guild = self._get_guild(guild_id) + members = data.get('members', []) + for member in members: + m = Member(guild=guild, data=member, state=self) + existing = guild.get_member(m.id) + if existing is None or existing.joined_at is None: + guild._add_member(m) + + log.info('Processed a chunk for %s members in guild ID %s.', len(members), guild_id) + self.process_listeners(ListenerType.chunk, guild, len(members)) + + def parse_guild_integrations_update(self, data): + guild = self._get_guild(int(data['guild_id'])) + if guild is not None: + self.dispatch('guild_integrations_update', guild) + else: + log.warning('GUILD_INTEGRATIONS_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id']) + + def parse_webhooks_update(self, data): + channel = self.get_channel(int(data['channel_id'])) + if channel is not None: + self.dispatch('webhooks_update', channel) + else: + log.warning('WEBHOOKS_UPDATE referencing an unknown channel ID: %s. Discarding.', data['channel_id']) + + def parse_voice_state_update(self, data): + guild = self._get_guild(utils._get_as_snowflake(data, 'guild_id')) + channel_id = utils._get_as_snowflake(data, 'channel_id') + if guild is not None: + if int(data['user_id']) == self.user.id: + voice = self._get_voice_client(guild.id) + if voice is not None: + ch = guild.get_channel(channel_id) + if ch is not None: + voice.channel = ch + + member, before, after = guild._update_voice_state(data, channel_id) + if member is not None: + self.dispatch('voice_state_update', member, before, after) + else: + log.warning('VOICE_STATE_UPDATE referencing an unknown member ID: %s. Discarding.', data['user_id']) + else: + # in here we're either at private or group calls + call = self._calls.get(channel_id) + if call is not None: + call._update_voice_state(data) + + def parse_voice_server_update(self, data): + try: + key_id = int(data['guild_id']) + except KeyError: + key_id = int(data['channel_id']) + + vc = self._get_voice_client(key_id) + if vc is not None: + asyncio.ensure_future(vc._create_socket(key_id, data)) + + def parse_typing_start(self, data): + channel, guild = self._get_guild_channel(data) + if channel is not None: + member = None + user_id = utils._get_as_snowflake(data, 'user_id') + if isinstance(channel, DMChannel): + member = channel.recipient + elif isinstance(channel, TextChannel) and guild is not None: + member = guild.get_member(user_id) + elif isinstance(channel, GroupChannel): + member = utils.find(lambda x: x.id == user_id, channel.recipients) + + if member is not None: + timestamp = datetime.datetime.utcfromtimestamp(data.get('timestamp')) + self.dispatch('typing', channel, member, timestamp) + + def parse_relationship_add(self, data): + key = int(data['id']) + old = self.user.get_relationship(key) + new = Relationship(state=self, data=data) + self.user._relationships[key] = new + if old is not None: + self.dispatch('relationship_update', old, new) + else: + self.dispatch('relationship_add', new) + + def parse_relationship_remove(self, data): + key = int(data['id']) + try: + old = self.user._relationships.pop(key) + except KeyError: + pass + else: + self.dispatch('relationship_remove', old) + + def _get_reaction_user(self, channel, user_id): + if isinstance(channel, TextChannel): + return channel.guild.get_member(user_id) + return self.get_user(user_id) + + def get_reaction_emoji(self, data): + emoji_id = utils._get_as_snowflake(data, 'id') + + if not emoji_id: + return data['name'] + + try: + return self._emojis[emoji_id] + except KeyError: + return PartialEmoji(animated=data.get('animated', False), id=emoji_id, name=data['name']) + + def _upgrade_partial_emoji(self, emoji): + emoji_id = emoji.id + if not emoji_id: + return emoji.name + try: + return self._emojis[emoji_id] + except KeyError: + return emoji + + def get_channel(self, id): + if id is None: + return None + + pm = self._get_private_channel(id) + if pm is not None: + return pm + + for guild in self.guilds: + channel = guild.get_channel(id) + if channel is not None: + return channel + + def create_message(self, *, channel, data): + return Message(state=self, channel=channel, data=data) + + def receive_chunk(self, guild_id): + future = self.loop.create_future() + listener = Listener(ListenerType.chunk, future, lambda s: s.id == guild_id) + self._listeners.append(listener) + return future + +class AutoShardedConnectionState(ConnectionState): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._ready_task = None + + async def request_offline_members(self, guilds, *, shard_id): + # get all the chunks + chunks = [] + for guild in guilds: + chunks.extend(self.chunks_needed(guild)) + + # we only want to request ~75 guilds per chunk request. + splits = [guilds[i:i + 75] for i in range(0, len(guilds), 75)] + for split in splits: + await self.chunker(split, shard_id=shard_id) + + # wait for the chunks + if chunks: + try: + await utils.sane_wait_for(chunks, timeout=len(chunks) * 30.0, loop=self.loop) + except asyncio.TimeoutError: + log.info('Somehow timed out waiting for chunks.') + + async def _delay_ready(self): + launch = self._ready_state.launch + while not launch.is_set(): + # this snippet of code is basically waiting 2 seconds + # until the last GUILD_CREATE was sent + launch.set() + await asyncio.sleep(2.0 * self.shard_count, loop=self.loop) + + if self._fetch_offline: + guilds = sorted(self._ready_state.guilds, key=lambda g: g[0].shard_id) + + for shard_id, sub_guilds_info in itertools.groupby(guilds, key=lambda g: g[0].shard_id): + sub_guilds, sub_available = zip(*sub_guilds_info) + await self.request_offline_members(sub_guilds, shard_id=shard_id) + + for guild, unavailable in zip(sub_guilds, sub_available): + if unavailable is False: + self.dispatch('guild_available', guild) + else: + self.dispatch('guild_join', guild) + self.dispatch('shard_ready', shard_id) + else: + for guild, unavailable in self._ready_state.guilds: + if unavailable is False: + self.dispatch('guild_available', guild) + else: + self.dispatch('guild_join', guild) + + # remove the state + try: + del self._ready_state + except AttributeError: + pass # already been deleted somehow + + # regular users cannot shard so we won't worry about it here. + + # clear the current task + self._ready_task = None + + # dispatch the event + self.call_handlers('ready') + self.dispatch('ready') + + def parse_ready(self, data): + if not hasattr(self, '_ready_state'): + self._ready_state = ReadyState(launch=asyncio.Event(), guilds=[]) + + self.user = user = ClientUser(state=self, data=data['user']) + self._users[user.id] = user + + guilds = self._ready_state.guilds + for guild_data in data['guilds']: + guild = self._add_guild_from_data(guild_data) + if guild.large: + guilds.append((guild, guild.unavailable)) + + for pm in data.get('private_channels', []): + factory, _ = _channel_factory(pm['type']) + self._add_private_channel(factory(me=user, data=pm, state=self)) + + self.dispatch('connect') + if self._ready_task is None: + self._ready_task = asyncio.ensure_future(self._delay_ready(), loop=self.loop) diff --git a/venv/lib/python3.7/site-packages/discord/user.py b/venv/lib/python3.7/site-packages/discord/user.py new file mode 100644 index 0000000..b567913 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/user.py @@ -0,0 +1,852 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2017 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from collections import namedtuple + +import discord.abc +from .utils import snowflake_time, _bytes_to_base64_data, parse_time +from .enums import DefaultAvatar, RelationshipType, UserFlags, HypeSquadHouse, PremiumType, try_enum +from .errors import ClientException +from .colour import Colour +from .asset import Asset + +class Profile(namedtuple('Profile', 'flags user mutual_guilds connected_accounts premium_since')): + __slots__ = () + + @property + def nitro(self): + return self.premium_since is not None + + premium = nitro + + def _has_flag(self, o): + v = o.value + return (self.flags & v) == v + + @property + def staff(self): + return self._has_flag(UserFlags.staff) + + @property + def partner(self): + return self._has_flag(UserFlags.partner) + + @property + def bug_hunter(self): + return self._has_flag(UserFlags.bug_hunter) + + @property + def early_supporter(self): + return self._has_flag(UserFlags.early_supporter) + + @property + def hypesquad(self): + return self._has_flag(UserFlags.hypesquad) + + @property + def hypesquad_houses(self): + flags = (UserFlags.hypesquad_bravery, UserFlags.hypesquad_brilliance, UserFlags.hypesquad_balance) + return [house for house, flag in zip(HypeSquadHouse, flags) if self._has_flag(flag)] + +_BaseUser = discord.abc.User + +class BaseUser(_BaseUser): + __slots__ = ('name', 'id', 'discriminator', 'avatar', 'bot', '_state') + + def __init__(self, *, state, data): + self._state = state + self._update(data) + + def __str__(self): + return '{0.name}#{0.discriminator}'.format(self) + + def __eq__(self, other): + return isinstance(other, _BaseUser) and other.id == self.id + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return self.id >> 22 + + def _update(self, data): + self.name = data['username'] + self.id = int(data['id']) + self.discriminator = data['discriminator'] + self.avatar = data['avatar'] + self.bot = data.get('bot', False) + + @classmethod + def _copy(cls, user): + self = cls.__new__(cls) # bypass __init__ + + self.name = user.name + self.id = user.id + self.discriminator = user.discriminator + self.avatar = user.avatar + self.bot = user.bot + self._state = user._state + + return self + + @property + def avatar_url(self): + """Returns an :class:`Asset` for the avatar the user has. + + If the user does not have a traditional avatar, an asset for + the default avatar is returned instead. + + This is equivalent to calling :meth:`avatar_url_as` with + the default parameters (i.e. webp/gif detection and a size of 1024). + """ + return self.avatar_url_as(format=None, size=1024) + + def is_avatar_animated(self): + """Indicates if the user has an animated avatar.""" + return bool(self.avatar and self.avatar.startswith('a_')) + + def avatar_url_as(self, *, format=None, static_format='webp', size=1024): + """Returns an :class:`Asset` for the avatar the user has. + + If the user does not have a traditional avatar, an asset for + the default avatar is returned instead. + + The format must be one of 'webp', 'jpeg', 'jpg', 'png' or 'gif', and + 'gif' is only valid for animated avatars. The size must be a power of 2 + between 16 and 4096. + + Parameters + ----------- + format: Optional[:class:`str`] + The format to attempt to convert the avatar to. + If the format is ``None``, then it is automatically + detected into either 'gif' or static_format depending on the + avatar being animated or not. + static_format: Optional[:class:`str`] + Format to attempt to convert only non-animated avatars to. + Defaults to 'webp' + size: :class:`int` + The size of the image to display. + + Raises + ------ + InvalidArgument + Bad image format passed to ``format`` or ``static_format``, or + invalid ``size``. + + Returns + -------- + :class:`Asset` + The resulting CDN asset. + """ + return Asset._from_avatar(self._state, self, format=format, static_format=static_format, size=size) + + @property + def default_avatar(self): + """class:`DefaultAvatar`: Returns the default avatar for a given user. This is calculated by the user's discriminator.""" + return try_enum(DefaultAvatar, int(self.discriminator) % len(DefaultAvatar)) + + @property + def default_avatar_url(self): + """:class:`Asset`: Returns a URL for a user's default avatar.""" + return Asset(self._state, 'https://cdn.discordapp.com/embed/avatars/{}.png'.format(self.default_avatar.value)) + + @property + def colour(self): + """:class:`Colour`: A property that returns a colour denoting the rendered colour + for the user. This always returns :meth:`Colour.default`. + + There is an alias for this named :meth:`color`. + """ + return Colour.default() + + @property + def color(self): + """:class:`Colour`: A property that returns a color denoting the rendered color + for the user. This always returns :meth:`Colour.default`. + + There is an alias for this named :meth:`colour`. + """ + return self.colour + + @property + def mention(self): + """:class:`str`: Returns a string that allows you to mention the given user.""" + return '<@{0.id}>'.format(self) + + def permissions_in(self, channel): + """An alias for :meth:`abc.GuildChannel.permissions_for`. + + Basically equivalent to: + + .. code-block:: python3 + + channel.permissions_for(self) + + Parameters + ----------- + channel: :class:`abc.GuildChannel` + The channel to check your permissions for. + """ + return channel.permissions_for(self) + + @property + def created_at(self): + """:class:`datetime.datetime`: Returns the user's creation time in UTC. + + This is when the user's discord account was created.""" + return snowflake_time(self.id) + + @property + def display_name(self): + """:class:`str`: Returns the user's display name. + + For regular users this is just their username, but + if they have a guild specific nickname then that + is returned instead. + """ + return self.name + + def mentioned_in(self, message): + """Checks if the user is mentioned in the specified message. + + Parameters + ----------- + message: :class:`Message` + The message to check if you're mentioned in. + """ + + if message.mention_everyone: + return True + + for user in message.mentions: + if user.id == self.id: + return True + + return False + +class ClientUser(BaseUser): + """Represents your Discord user. + + .. container:: operations + + .. describe:: x == y + + Checks if two users are equal. + + .. describe:: x != y + + Checks if two users are not equal. + + .. describe:: hash(x) + + Return the user's hash. + + .. describe:: str(x) + + Returns the user's name with discriminator. + + Attributes + ----------- + name: :class:`str` + The user's username. + id: :class:`int` + The user's unique ID. + discriminator: :class:`str` + The user's discriminator. This is given when the username has conflicts. + avatar: Optional[:class:`str`] + The avatar hash the user has. Could be None. + bot: :class:`bool` + Specifies if the user is a bot account. + verified: :class:`bool` + Specifies if the user is a verified account. + email: Optional[:class:`str`] + The email the user used when registering. + locale: Optional[:class:`str`] + The IETF language tag used to identify the language the user is using. + mfa_enabled: :class:`bool` + Specifies if the user has MFA turned on and working. + premium: :class:`bool` + Specifies if the user is a premium user (e.g. has Discord Nitro). + premium_type: :class:`PremiumType` + Specifies the type of premium a user has (e.g. Nitro or Nitro Classic). Could be None if the user is not premium. + """ + __slots__ = BaseUser.__slots__ + \ + ('email', 'locale', '_flags', 'verified', 'mfa_enabled', + 'premium', 'premium_type', '_relationships', '__weakref__') + + def __init__(self, *, state, data): + super().__init__(state=state, data=data) + self._relationships = {} + + def __repr__(self): + return ''.format(self) + + def _update(self, data): + super()._update(data) + # There's actually an Optional[str] phone field as well but I won't use it + self.verified = data.get('verified', False) + self.email = data.get('email') + self.locale = data.get('locale') + self._flags = data.get('flags', 0) + self.mfa_enabled = data.get('mfa_enabled', False) + self.premium = data.get('premium', False) + self.premium_type = try_enum(PremiumType, data.get('premium_type', None)) + + def get_relationship(self, user_id): + """Retrieves the :class:`Relationship` if applicable. + + .. note:: + + This only applies to non-bot accounts. + + Parameters + ----------- + user_id: :class:`int` + The user ID to check if we have a relationship with them. + + Returns + -------- + Optional[:class:`Relationship`] + The relationship if available or ``None``. + """ + return self._relationships.get(user_id) + + @property + def relationships(self): + """List[:class:`User`]: Returns all the relationships that the user has. + + .. note:: + + This only applies to non-bot accounts. + """ + return list(self._relationships.values()) + + @property + def friends(self): + r"""List[:class:`User`]: Returns all the users that the user is friends with. + + .. note:: + + This only applies to non-bot accounts. + """ + return [r.user for r in self._relationships.values() if r.type is RelationshipType.friend] + + @property + def blocked(self): + r"""List[:class:`User`]: Returns all the users that the user has blocked. + + .. note:: + + This only applies to non-bot accounts. + """ + return [r.user for r in self._relationships.values() if r.type is RelationshipType.blocked] + + async def edit(self, **fields): + """|coro| + + Edits the current profile of the client. + + If a bot account is used then a password field is optional, + otherwise it is required. + + .. note:: + + To upload an avatar, a :term:`py:bytes-like object` must be passed in that + represents the image being uploaded. If this is done through a file + then the file must be opened via ``open('some_filename', 'rb')`` and + the :term:`py:bytes-like object` is given through the use of ``fp.read()``. + + The only image formats supported for uploading is JPEG and PNG. + + Parameters + ----------- + password: :class:`str` + The current password for the client's account. + Only applicable to user accounts. + new_password: :class:`str` + The new password you wish to change to. + Only applicable to user accounts. + email: :class:`str` + The new email you wish to change to. + Only applicable to user accounts. + house: Optional[:class:`HypeSquadHouse`] + The hypesquad house you wish to change to. + Could be ``None`` to leave the current house. + Only applicable to user accounts. + username: :class:`str` + The new username you wish to change to. + avatar: :class:`bytes` + A :term:`py:bytes-like object` representing the image to upload. + Could be ``None`` to denote no avatar. + + Raises + ------ + HTTPException + Editing your profile failed. + InvalidArgument + Wrong image format passed for ``avatar``. + ClientException + Password is required for non-bot accounts. + House field was not a HypeSquadHouse. + """ + + try: + avatar_bytes = fields['avatar'] + except KeyError: + avatar = self.avatar + else: + if avatar_bytes is not None: + avatar = _bytes_to_base64_data(avatar_bytes) + else: + avatar = None + + not_bot_account = not self.bot + password = fields.get('password') + if not_bot_account and password is None: + raise ClientException('Password is required for non-bot accounts.') + + args = { + 'password': password, + 'username': fields.get('username', self.name), + 'avatar': avatar + } + + if not_bot_account: + args['email'] = fields.get('email', self.email) + + if 'new_password' in fields: + args['new_password'] = fields['new_password'] + + http = self._state.http + + if 'house' in fields: + house = fields['house'] + if house is None: + await http.leave_hypesquad_house() + elif not isinstance(house, HypeSquadHouse): + raise ClientException('`house` parameter was not a HypeSquadHouse') + else: + value = house.value + + await http.change_hypesquad_house(value) + + data = await http.edit_profile(**args) + if not_bot_account: + self.email = data['email'] + try: + http._token(data['token'], bot=False) + except KeyError: + pass + + self._update(data) + + async def create_group(self, *recipients): + r"""|coro| + + Creates a group direct message with the recipients + provided. These recipients must be have a relationship + of type :attr:`RelationshipType.friend`. + + .. note:: + + This only applies to non-bot accounts. + + Parameters + ----------- + \*recipients: :class:`User` + An argument :class:`list` of :class:`User` to have in + your group. + + Raises + ------- + HTTPException + Failed to create the group direct message. + ClientException + Attempted to create a group with only one recipient. + This does not include yourself. + + Returns + ------- + :class:`GroupChannel` + The new group channel. + """ + + from .channel import GroupChannel + + if len(recipients) < 2: + raise ClientException('You must have two or more recipients to create a group.') + + users = [str(u.id) for u in recipients] + data = await self._state.http.start_group(self.id, users) + return GroupChannel(me=self, data=data, state=self._state) + + async def edit_settings(self, **kwargs): + """|coro| + + Edits the client user's settings. + + .. note:: + + This only applies to non-bot accounts. + + Parameters + ------- + afk_timeout: :class:`int` + How long (in seconds) the user needs to be AFK until Discord + sends push notifications to your mobile device. + animate_emojis: :class:`bool` + Whether or not to animate emojis in the chat. + convert_emoticons: :class:`bool` + Whether or not to automatically convert emoticons into emojis. + e.g. :-) -> 😃 + default_guilds_restricted: :class:`bool` + Whether or not to automatically disable DMs between you and + members of new guilds you join. + detect_platform_accounts: :class:`bool` + Whether or not to automatically detect accounts from services + like Steam and Blizzard when you open the Discord client. + developer_mode: :class:`bool` + Whether or not to enable developer mode. + disable_games_tab: :class:`bool` + Whether or not to disable the showing of the Games tab. + enable_tts_command: :class:`bool` + Whether or not to allow tts messages to be played/sent. + explicit_content_filter: :class:`UserContentFilter` + The filter for explicit content in all messages. + friend_source_flags: :class:`FriendFlags` + Who can add you as a friend. + gif_auto_play: :class:`bool` + Whether or not to automatically play gifs that are in the chat. + guild_positions: List[:class:`abc.Snowflake`] + A list of guilds in order of the guild/guild icons that are on + the left hand side of the UI. + inline_attachment_media: :class:`bool` + Whether or not to display attachments when they are uploaded in chat. + inline_embed_media: :class:`bool` + Whether or not to display videos and images from links posted in chat. + locale: :class:`str` + The :rfc:`3066` language identifier of the locale to use for the language + of the Discord client. + message_display_compact: :class:`bool` + Whether or not to use the compact Discord display mode. + render_embeds: :class:`bool` + Whether or not to render embeds that are sent in the chat. + render_reactions: :class:`bool` + Whether or not to render reactions that are added to messages. + restricted_guilds: List[:class:`abc.Snowflake`] + A list of guilds that you will not receive DMs from. + show_current_game: :class:`bool` + Whether or not to display the game that you are currently playing. + status: :class:`Status` + The clients status that is shown to others. + theme: :class:`Theme` + The theme of the Discord UI. + timezone_offset: :class:`int` + The timezone offset to use. + + Raises + ------- + HTTPException + Editing the settings failed. + Forbidden + The client is a bot user and not a user account. + + Returns + ------- + :class:`dict` + The client user's updated settings. + """ + payload = {} + + content_filter = kwargs.pop('explicit_content_filter', None) + if content_filter: + payload.update({'explicit_content_filter': content_filter.value}) + + friend_flags = kwargs.pop('friend_source_flags', None) + if friend_flags: + dicts = [{}, {'mutual_guilds': True}, {'mutual_friends': True}, + {'mutual_guilds': True, 'mutual_friends': True}, {'all': True}] + payload.update({'friend_source_flags': dicts[friend_flags.value]}) + + guild_positions = kwargs.pop('guild_positions', None) + if guild_positions: + guild_positions = [str(x.id) for x in guild_positions] + payload.update({'guild_positions': guild_positions}) + + restricted_guilds = kwargs.pop('restricted_guilds', None) + if restricted_guilds: + restricted_guilds = [str(x.id) for x in restricted_guilds] + payload.update({'restricted_guilds': restricted_guilds}) + + status = kwargs.pop('status', None) + if status: + payload.update({'status': status.value}) + + theme = kwargs.pop('theme', None) + if theme: + payload.update({'theme': theme.value}) + + payload.update(kwargs) + + data = await self._state.http.edit_settings(**payload) + return data + +class User(BaseUser, discord.abc.Messageable): + """Represents a Discord user. + + .. container:: operations + + .. describe:: x == y + + Checks if two users are equal. + + .. describe:: x != y + + Checks if two users are not equal. + + .. describe:: hash(x) + + Return the user's hash. + + .. describe:: str(x) + + Returns the user's name with discriminator. + + Attributes + ----------- + name: :class:`str` + The user's username. + id: :class:`int` + The user's unique ID. + discriminator: :class:`str` + The user's discriminator. This is given when the username has conflicts. + avatar: Optional[:class:`str`] + The avatar hash the user has. Could be None. + bot: :class:`bool` + Specifies if the user is a bot account. + """ + + __slots__ = BaseUser.__slots__ + ('__weakref__',) + + def __repr__(self): + return ''.format(self) + + async def _get_channel(self): + ch = await self.create_dm() + return ch + + @property + def dm_channel(self): + """Optional[:class:`DMChannel`]: Returns the channel associated with this user if it exists. + + If this returns ``None``, you can create a DM channel by calling the + :meth:`create_dm` coroutine function. + """ + return self._state._get_private_channel_by_user(self.id) + + async def create_dm(self): + """Creates a :class:`DMChannel` with this user. + + This should be rarely called, as this is done transparently for most + people. + """ + found = self.dm_channel + if found is not None: + return found + + state = self._state + data = await state.http.start_private_message(self.id) + return state.add_dm_channel(data) + + @property + def relationship(self): + """Returns the :class:`Relationship` with this user if applicable, ``None`` otherwise. + + .. note:: + + This only applies to non-bot accounts. + """ + return self._state.user.get_relationship(self.id) + + async def mutual_friends(self): + """|coro| + + Gets all mutual friends of this user. + + .. note:: + + This only applies to non-bot accounts. + + Raises + ------- + Forbidden + Not allowed to get mutual friends of this user. + HTTPException + Getting mutual friends failed. + + Returns + ------- + List[:class:`User`] + The users that are mutual friends. + """ + state = self._state + mutuals = await state.http.get_mutual_friends(self.id) + return [User(state=state, data=friend) for friend in mutuals] + + def is_friend(self): + """Checks if the user is your friend. + + .. note:: + + This only applies to non-bot accounts. + """ + r = self.relationship + if r is None: + return False + return r.type is RelationshipType.friend + + def is_blocked(self): + """Checks if the user is blocked. + + .. note:: + + This only applies to non-bot accounts. + """ + r = self.relationship + if r is None: + return False + return r.type is RelationshipType.blocked + + async def block(self): + """|coro| + + Blocks the user. + + .. note:: + + This only applies to non-bot accounts. + + Raises + ------- + Forbidden + Not allowed to block this user. + HTTPException + Blocking the user failed. + """ + + await self._state.http.add_relationship(self.id, type=RelationshipType.blocked.value) + + async def unblock(self): + """|coro| + + Unblocks the user. + + .. note:: + + This only applies to non-bot accounts. + + Raises + ------- + Forbidden + Not allowed to unblock this user. + HTTPException + Unblocking the user failed. + """ + await self._state.http.remove_relationship(self.id) + + async def remove_friend(self): + """|coro| + + Removes the user as a friend. + + .. note:: + + This only applies to non-bot accounts. + + Raises + ------- + Forbidden + Not allowed to remove this user as a friend. + HTTPException + Removing the user as a friend failed. + """ + await self._state.http.remove_relationship(self.id) + + async def send_friend_request(self): + """|coro| + + Sends the user a friend request. + + .. note:: + + This only applies to non-bot accounts. + + Raises + ------- + Forbidden + Not allowed to send a friend request to the user. + HTTPException + Sending the friend request failed. + """ + await self._state.http.send_friend_request(username=self.name, discriminator=self.discriminator) + + async def profile(self): + """|coro| + + Gets the user's profile. + + .. note:: + + This only applies to non-bot accounts. + + Raises + ------- + Forbidden + Not allowed to fetch profiles. + HTTPException + Fetching the profile failed. + + Returns + -------- + :class:`Profile` + The profile of the user. + """ + + state = self._state + data = await state.http.get_user_profile(self.id) + + def transform(d): + return state._get_guild(int(d['id'])) + + since = data.get('premium_since') + mutual_guilds = list(filter(None, map(transform, data.get('mutual_guilds', [])))) + return Profile(flags=data['user'].get('flags', 0), + premium_since=parse_time(since), + mutual_guilds=mutual_guilds, + user=self, + connected_accounts=data['connected_accounts']) diff --git a/venv/lib/python3.7/site-packages/discord/utils.py b/venv/lib/python3.7/site-packages/discord/utils.py new file mode 100644 index 0000000..48942a1 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/utils.py @@ -0,0 +1,467 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import array +import asyncio +import collections.abc +import unicodedata +from base64 import b64encode +from bisect import bisect_left +import datetime +from email.utils import parsedate_to_datetime +import functools +from inspect import isawaitable as _isawaitable +from operator import attrgetter +import json +import re +import warnings + +from .errors import InvalidArgument +from .object import Object + +DISCORD_EPOCH = 1420070400000 + +class cached_property: + def __init__(self, function): + self.function = function + self.__doc__ = getattr(function, '__doc__') + + def __get__(self, instance, owner): + if instance is None: + return self + + value = self.function(instance) + setattr(instance, self.function.__name__, value) + + return value + +class CachedSlotProperty: + def __init__(self, name, function): + self.name = name + self.function = function + self.__doc__ = getattr(function, '__doc__') + + def __get__(self, instance, owner): + if instance is None: + return self + + try: + return getattr(instance, self.name) + except AttributeError: + value = self.function(instance) + setattr(instance, self.name, value) + return value + +def cached_slot_property(name): + def decorator(func): + return CachedSlotProperty(name, func) + return decorator + +class SequenceProxy(collections.abc.Sequence): + """Read-only proxy of a Sequence.""" + def __init__(self, proxied): + self.__proxied = proxied + + def __getitem__(self, idx): + return self.__proxied[idx] + + def __len__(self): + return len(self.__proxied) + + def __contains__(self, item): + return item in self.__proxied + + def __iter__(self): + return iter(self.__proxied) + + def __reversed__(self): + return reversed(self.__proxied) + + def index(self, value, *args, **kwargs): + return self.__proxied.index(value, *args, **kwargs) + + def count(self, value): + return self.__proxied.count(value) + +def parse_time(timestamp): + if timestamp: + return datetime.datetime(*map(int, re.split(r'[^\d]', timestamp.replace('+00:00', '')))) + return None + +def deprecated(instead=None): + def actual_decorator(func): + @functools.wraps(func) + def decorated(*args, **kwargs): + warnings.simplefilter('always', DeprecationWarning) # turn off filter + if instead: + fmt = "{0.__name__} is deprecated, use {1} instead." + else: + fmt = '{0.__name__} is deprecated.' + + warnings.warn(fmt.format(func, instead), stacklevel=3, category=DeprecationWarning) + warnings.simplefilter('default', DeprecationWarning) # reset filter + return func(*args, **kwargs) + return decorated + return actual_decorator + +def oauth_url(client_id, permissions=None, guild=None, redirect_uri=None): + """A helper function that returns the OAuth2 URL for inviting the bot + into guilds. + + Parameters + ----------- + client_id: :class:`str` + The client ID for your bot. + permissions: :class:`~discord.Permissions` + The permissions you're requesting. If not given then you won't be requesting any + permissions. + guild: :class:`~discord.Guild` + The guild to pre-select in the authorization screen, if available. + redirect_uri: :class:`str` + An optional valid redirect URI. + """ + url = 'https://discordapp.com/oauth2/authorize?client_id={}&scope=bot'.format(client_id) + if permissions is not None: + url = url + '&permissions=' + str(permissions.value) + if guild is not None: + url = url + "&guild_id=" + str(guild.id) + if redirect_uri is not None: + from urllib.parse import urlencode + url = url + "&response_type=code&" + urlencode({'redirect_uri': redirect_uri}) + return url + + +def snowflake_time(id): + """Returns the creation date in UTC of a Discord snowflake ID.""" + return datetime.datetime.utcfromtimestamp(((id >> 22) + DISCORD_EPOCH) / 1000) + +def time_snowflake(datetime_obj, high=False): + """Returns a numeric snowflake pretending to be created at the given date. + + When using as the lower end of a range, use ``time_snowflake(high=False) - 1`` to be inclusive, ``high=True`` to be exclusive + When using as the higher end of a range, use ``time_snowflake(high=True)`` + 1 to be inclusive, ``high=False`` to be exclusive + + Parameters + ----------- + datetime_obj: :class:`datetime.datetime` + A timezone-naive datetime object representing UTC time. + high: :class:`bool` + Whether or not to set the lower 22 bit to high or low. + """ + unix_seconds = (datetime_obj - type(datetime_obj)(1970, 1, 1)).total_seconds() + discord_millis = int(unix_seconds * 1000 - DISCORD_EPOCH) + + return (discord_millis << 22) + (2**22-1 if high else 0) + +def find(predicate, seq): + """A helper to return the first element found in the sequence + that meets the predicate. For example: :: + + member = find(lambda m: m.name == 'Mighty', channel.guild.members) + + would find the first :class:`~discord.Member` whose name is 'Mighty' and return it. + If an entry is not found, then ``None`` is returned. + + This is different from :func:`py:filter` due to the fact it stops the moment it finds + a valid entry. + + Parameters + ----------- + predicate + A function that returns a boolean-like result. + seq: iterable + The iterable to search through. + """ + + for element in seq: + if predicate(element): + return element + return None + +def get(iterable, **attrs): + r"""A helper that returns the first element in the iterable that meets + all the traits passed in ``attrs``. This is an alternative for + :func:`~discord.utils.find`. + + When multiple attributes are specified, they are checked using + logical AND, not logical OR. Meaning they have to meet every + attribute passed in and not one of them. + + To have a nested attribute search (i.e. search by ``x.y``) then + pass in ``x__y`` as the keyword argument. + + If nothing is found that matches the attributes passed, then + ``None`` is returned. + + Examples + --------- + + Basic usage: + + .. code-block:: python3 + + member = discord.utils.get(message.guild.members, name='Foo') + + Multiple attribute matching: + + .. code-block:: python3 + + channel = discord.utils.get(guild.voice_channels, name='Foo', bitrate=64000) + + Nested attribute matching: + + .. code-block:: python3 + + channel = discord.utils.get(client.get_all_channels(), guild__name='Cool', name='general') + + Parameters + ----------- + iterable + An iterable to search through. + \*\*attrs + Keyword arguments that denote attributes to search with. + """ + + # global -> local + _all = all + attrget = attrgetter + + # Special case the single element call + if len(attrs) == 1: + k, v = attrs.popitem() + pred = attrget(k.replace('__', '.')) + for elem in iterable: + if pred(elem) == v: + return elem + return None + + converted = [ + (attrget(attr.replace('__', '.')), value) + for attr, value in attrs.items() + ] + + for elem in iterable: + if _all(pred(elem) == value for pred, value in converted): + return elem + return None + +def _unique(iterable): + seen = set() + adder = seen.add + return [x for x in iterable if not (x in seen or adder(x))] + +def _get_as_snowflake(data, key): + try: + value = data[key] + except KeyError: + return None + else: + return value and int(value) + +def _get_mime_type_for_image(data): + if data.startswith(b'\x89\x50\x4E\x47\x0D\x0A\x1A\x0A'): + return 'image/png' + elif data[6:10] in (b'JFIF', b'Exif'): + return 'image/jpeg' + elif data.startswith((b'\x47\x49\x46\x38\x37\x61', b'\x47\x49\x46\x38\x39\x61')): + return 'image/gif' + elif data.startswith(b'RIFF') and data[8:12] == b'WEBP': + return 'image/webp' + else: + raise InvalidArgument('Unsupported image type given') + +def _bytes_to_base64_data(data): + fmt = 'data:{mime};base64,{data}' + mime = _get_mime_type_for_image(data) + b64 = b64encode(data).decode('ascii') + return fmt.format(mime=mime, data=b64) + +def to_json(obj): + return json.dumps(obj, separators=(',', ':'), ensure_ascii=True) + +def _parse_ratelimit_header(request): + now = parsedate_to_datetime(request.headers['Date']) + reset = datetime.datetime.fromtimestamp(int(request.headers['X-Ratelimit-Reset']), datetime.timezone.utc) + return (reset - now).total_seconds() + +async def maybe_coroutine(f, *args, **kwargs): + value = f(*args, **kwargs) + if _isawaitable(value): + return await value + else: + return value + +async def async_all(gen, *, check=_isawaitable): + for elem in gen: + if check(elem): + elem = await elem + if not elem: + return False + return True + +async def sane_wait_for(futures, *, timeout, loop): + _, pending = await asyncio.wait(futures, timeout=timeout, loop=loop) + + if len(pending) != 0: + raise asyncio.TimeoutError() + +def valid_icon_size(size): + """Icons must be power of 2 within [16, 4096].""" + return not size & (size - 1) and size in range(16, 4097) + +class SnowflakeList(array.array): + """Internal data storage class to efficiently store a list of snowflakes. + + This should have the following characteristics: + + - Low memory usage + - O(n) iteration (obviously) + - O(n log n) initial creation if data is unsorted + - O(log n) search and indexing + - O(n) insertion + """ + + __slots__ = () + + def __new__(cls, data, *, is_sorted=False): + return array.array.__new__(cls, 'Q', data if is_sorted else sorted(data)) + + def add(self, element): + i = bisect_left(self, element) + self.insert(i, element) + + def get(self, element): + i = bisect_left(self, element) + return self[i] if i != len(self) and self[i] == element else None + + def has(self, element): + i = bisect_left(self, element) + return i != len(self) and self[i] == element + +_IS_ASCII = re.compile(r'^[\x00-\x7f]+$') + +def _string_width(string, *, _IS_ASCII=_IS_ASCII): + """Returns string's width.""" + match = _IS_ASCII.match(string) + if match: + return match.endpos + + UNICODE_WIDE_CHAR_TYPE = 'WFA' + width = 0 + func = unicodedata.east_asian_width + for char in string: + width += 2 if func(char) in UNICODE_WIDE_CHAR_TYPE else 1 + return width + +def resolve_invite(invite): + """ + Resolves an invite from a :class:`~discord.Invite`, URL or ID + + Parameters + ----------- + invite: Union[:class:`~discord.Invite`, :class:`~discord.Object`, :class:`str`] + The invite. + + Returns + -------- + :class:`str` + The invite code. + """ + from .invite import Invite # circular import + if isinstance(invite, Invite) or isinstance(invite, Object): + return invite.id + else: + rx = r'(?:https?\:\/\/)?discord(?:\.gg|app\.com\/invite)\/(.+)' + m = re.match(rx, invite) + if m: + return m.group(1) + return invite + +_MARKDOWN_ESCAPE_SUBREGEX = '|'.join(r'\{0}(?=([\s\S]*((?%s)' % _MARKDOWN_ESCAPE_SUBREGEX) + +def escape_markdown(text, *, as_needed=False, ignore_links=True): + r"""A helper function that escapes Discord's markdown. + + Parameters + ----------- + text: :class:`str` + The text to escape markdown from. + as_needed: :class:`bool` + Whether to escape the markdown characters as needed. This + means that it does not escape extraneous characters if it's + not necessary, e.g. ``**hello**`` is escaped into ``\*\*hello**`` + instead of ``\*\*hello\*\*``. Note however that this can open + you up to some clever syntax abuse. Defaults to ``False``. + ignore_links: :class:`bool` + Whether to leave links alone when escaping markdown. For example, + if a URL in the text contains characters such as ``_`` then it will + be left alone. This option is not supported with ``as_needed``. + Defaults to ``True``. + + Returns + -------- + :class:`str` + The text with the markdown special characters escaped with a slash. + """ + + if not as_needed: + url_regex = r'(?P(?:https?|steam)://(?:-\.)?(?:[^\s/?\.#-]+\.?)+(?:/[^\s]*)?)' + def replacement(match): + groupdict = match.groupdict() + is_url = groupdict.get('url') + if is_url: + return is_url + return '\\' + groupdict['markdown'] + + regex = r'(?P[_\\~|\*`])' + if ignore_links: + regex = '(?:%s|%s)' % (url_regex, regex) + return re.sub(regex, replacement, text) + else: + text = re.sub(r'\\', r'\\\\', text) + return _MARKDOWN_ESCAPE_REGEX.sub(r'\\\1', text) + +def escape_mentions(text): + """A helper function that escapes everyone, here, role, and user mentions. + + .. note:: + + This does not include channel mentions. + + Parameters + ----------- + text: :class:`str` + The text to escape mentions from. + + Returns + -------- + :class:`str` + The text with the mentions removed. + """ + return re.sub(r'@(everyone|here|[!&]?[0-9]{17,21})', '@\u200b\\1', text) diff --git a/venv/lib/python3.7/site-packages/discord/voice_client.py b/venv/lib/python3.7/site-packages/discord/voice_client.py new file mode 100644 index 0000000..b014de5 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/voice_client.py @@ -0,0 +1,447 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +"""Some documentation to refer to: + +- Our main web socket (mWS) sends opcode 4 with a guild ID and channel ID. +- The mWS receives VOICE_STATE_UPDATE and VOICE_SERVER_UPDATE. +- We pull the session_id from VOICE_STATE_UPDATE. +- We pull the token, endpoint and server_id from VOICE_SERVER_UPDATE. +- Then we initiate the voice web socket (vWS) pointing to the endpoint. +- We send opcode 0 with the user_id, server_id, session_id and token using the vWS. +- The vWS sends back opcode 2 with an ssrc, port, modes(array) and hearbeat_interval. +- We send a UDP discovery packet to endpoint:port and receive our IP and our port in LE. +- Then we send our IP and port via vWS with opcode 1. +- When that's all done, we receive opcode 4 from the vWS. +- Finally we can transmit data to endpoint:port. +""" + +import asyncio +import socket +import logging +import struct +import threading + +from . import opus +from .backoff import ExponentialBackoff +from .gateway import * +from .errors import ClientException, ConnectionClosed +from .player import AudioPlayer, AudioSource + +try: + import nacl.secret + has_nacl = True +except ImportError: + has_nacl = False + + +log = logging.getLogger(__name__) + +class VoiceClient: + """Represents a Discord voice connection. + + You do not create these, you typically get them from + e.g. :meth:`VoiceChannel.connect`. + + Warning + -------- + In order to play audio, you must have loaded the opus library + through :func:`opus.load_opus`. + + If you don't do this then the library will not be able to + transmit audio. + + Attributes + ----------- + session_id: :class:`str` + The voice connection session ID. + token: :class:`str` + The voice connection token. + endpoint: :class:`str` + The endpoint we are connecting to. + channel: :class:`abc.Connectable` + The voice channel connected to. + loop: :class:`asyncio.AbstractEventLoop` + The event loop that the voice client is running on. + """ + def __init__(self, state, timeout, channel): + if not has_nacl: + raise RuntimeError("PyNaCl library needed in order to use voice") + + self.channel = channel + self.main_ws = None + self.timeout = timeout + self.ws = None + self.socket = None + self.loop = state.loop + self._state = state + # this will be used in the AudioPlayer thread + self._connected = threading.Event() + + self._handshaking = False + self._handshake_check = asyncio.Lock(loop=self.loop) + self._handshake_complete = asyncio.Event(loop=self.loop) + + self.mode = None + self._connections = 0 + self.sequence = 0 + self.timestamp = 0 + self._runner = None + self._player = None + self.encoder = opus.Encoder() + + warn_nacl = not has_nacl + supported_modes = ( + 'xsalsa20_poly1305_suffix', + 'xsalsa20_poly1305', + ) + + @property + def guild(self): + """Optional[:class:`Guild`]: The guild we're connected to, if applicable.""" + return getattr(self.channel, 'guild', None) + + @property + def user(self): + """:class:`ClientUser`: The user connected to voice (i.e. ourselves).""" + return self._state.user + + def checked_add(self, attr, value, limit): + val = getattr(self, attr) + if val + value > limit: + setattr(self, attr, 0) + else: + setattr(self, attr, val + value) + + # connection related + + async def start_handshake(self): + log.info('Starting voice handshake...') + + guild_id, channel_id = self.channel._get_voice_state_pair() + state = self._state + self.main_ws = ws = state._get_websocket(guild_id) + self._connections += 1 + + # request joining + await ws.voice_state(guild_id, channel_id) + + try: + await asyncio.wait_for(self._handshake_complete.wait(), timeout=self.timeout, loop=self.loop) + except asyncio.TimeoutError: + await self.terminate_handshake(remove=True) + raise + + log.info('Voice handshake complete. Endpoint found %s (IP: %s)', self.endpoint, self.endpoint_ip) + + async def terminate_handshake(self, *, remove=False): + guild_id, channel_id = self.channel._get_voice_state_pair() + self._handshake_complete.clear() + await self.main_ws.voice_state(guild_id, None, self_mute=True) + + log.info('The voice handshake is being terminated for Channel ID %s (Guild ID %s)', channel_id, guild_id) + if remove: + log.info('The voice client has been removed for Channel ID %s (Guild ID %s)', channel_id, guild_id) + key_id, _ = self.channel._get_voice_client_key() + self._state._remove_voice_client(key_id) + + async def _create_socket(self, server_id, data): + async with self._handshake_check: + if self._handshaking: + log.info("Ignoring voice server update while handshake is in progress") + return + self._handshaking = True + + self._connected.clear() + self.session_id = self.main_ws.session_id + self.server_id = server_id + self.token = data.get('token') + endpoint = data.get('endpoint') + + if endpoint is None or self.token is None: + log.warning('Awaiting endpoint... This requires waiting. ' \ + 'If timeout occurred considering raising the timeout and reconnecting.') + return + + self.endpoint = endpoint.replace(':80', '') + self.endpoint_ip = socket.gethostbyname(self.endpoint) + + if self.socket: + try: + self.socket.close() + except Exception: + pass + + self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + self.socket.setblocking(False) + + if self._handshake_complete.is_set(): + # terminate the websocket and handle the reconnect loop if necessary. + self._handshake_complete.clear() + await self.ws.close(4000) + return + + self._handshake_complete.set() + + async def connect(self, *, reconnect=True, _tries=0, do_handshake=True): + log.info('Connecting to voice...') + try: + del self.secret_key + except AttributeError: + pass + + if do_handshake: + await self.start_handshake() + + try: + self.ws = await DiscordVoiceWebSocket.from_client(self) + self._handshaking = False + self._connected.clear() + while not hasattr(self, 'secret_key'): + await self.ws.poll_event() + self._connected.set() + except (ConnectionClosed, asyncio.TimeoutError): + if reconnect and _tries < 5: + log.exception('Failed to connect to voice... Retrying...') + await asyncio.sleep(1 + _tries * 2.0, loop=self.loop) + await self.terminate_handshake() + await self.connect(reconnect=reconnect, _tries=_tries + 1) + else: + raise + + if self._runner is None: + self._runner = self.loop.create_task(self.poll_voice_ws(reconnect)) + + async def poll_voice_ws(self, reconnect): + backoff = ExponentialBackoff() + while True: + try: + await self.ws.poll_event() + except (ConnectionClosed, asyncio.TimeoutError) as exc: + if isinstance(exc, ConnectionClosed): + # The following close codes are undocumented so I will document them here. + # 1000 - normal closure (obviously) + # 4014 - voice channel has been deleted. + # 4015 - voice server has crashed + if exc.code in (1000, 4014, 4015): + log.info('Disconnecting from voice normally, close code %d.', exc.code) + await self.disconnect() + break + + if not reconnect: + await self.disconnect() + raise + + retry = backoff.delay() + log.exception('Disconnected from voice... Reconnecting in %.2fs.', retry) + self._connected.clear() + await asyncio.sleep(retry, loop=self.loop) + await self.terminate_handshake() + try: + await self.connect(reconnect=True) + except asyncio.TimeoutError: + # at this point we've retried 5 times... let's continue the loop. + log.warning('Could not connect to voice... Retrying...') + continue + + async def disconnect(self, *, force=False): + """|coro| + + Disconnects this voice client from voice. + """ + if not force and not self.is_connected(): + return + + self.stop() + self._connected.clear() + + try: + if self.ws: + await self.ws.close() + + await self.terminate_handshake(remove=True) + finally: + if self.socket: + self.socket.close() + + async def move_to(self, channel): + """|coro| + + Moves you to a different voice channel. + + Parameters + ----------- + channel: :class:`abc.Snowflake` + The channel to move to. Must be a voice channel. + """ + guild_id, _ = self.channel._get_voice_state_pair() + await self.main_ws.voice_state(guild_id, channel.id) + + def is_connected(self): + """Indicates if the voice client is connected to voice.""" + return self._connected.is_set() + + # audio related + + def _get_voice_packet(self, data): + header = bytearray(12) + + # Formulate rtp header + header[0] = 0x80 + header[1] = 0x78 + struct.pack_into('>H', header, 2, self.sequence) + struct.pack_into('>I', header, 4, self.timestamp) + struct.pack_into('>I', header, 8, self.ssrc) + + encrypt_packet = getattr(self, '_encrypt_' + self.mode) + return encrypt_packet(header, data) + + def _encrypt_xsalsa20_poly1305(self, header, data): + box = nacl.secret.SecretBox(bytes(self.secret_key)) + nonce = bytearray(24) + nonce[:12] = header + + return header + box.encrypt(bytes(data), bytes(nonce)).ciphertext + + def _encrypt_xsalsa20_poly1305_suffix(self, header, data): + box = nacl.secret.SecretBox(bytes(self.secret_key)) + nonce = nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE) + + return header + box.encrypt(bytes(data), nonce).ciphertext + nonce + + def play(self, source, *, after=None): + """Plays an :class:`AudioSource`. + + The finalizer, ``after`` is called after the source has been exhausted + or an error occurred. + + If an error happens while the audio player is running, the exception is + caught and the audio player is then stopped. + + Parameters + ----------- + source: :class:`AudioSource` + The audio source we're reading from. + after: Callable[[:class:`Exception`], Any] + The finalizer that is called after the stream is exhausted. + All exceptions it throws are silently discarded. This function + must have a single parameter, ``error``, that denotes an + optional exception that was raised during playing. + + Raises + ------- + ClientException + Already playing audio or not connected. + TypeError + source is not a :class:`AudioSource` or after is not a callable. + """ + + if not self.is_connected(): + raise ClientException('Not connected to voice.') + + if self.is_playing(): + raise ClientException('Already playing audio.') + + if not isinstance(source, AudioSource): + raise TypeError('source must an AudioSource not {0.__class__.__name__}'.format(source)) + + self._player = AudioPlayer(source, self, after=after) + self._player.start() + + def is_playing(self): + """Indicates if we're currently playing audio.""" + return self._player is not None and self._player.is_playing() + + def is_paused(self): + """Indicates if we're playing audio, but if we're paused.""" + return self._player is not None and self._player.is_paused() + + def stop(self): + """Stops playing audio.""" + if self._player: + self._player.stop() + self._player = None + + def pause(self): + """Pauses the audio playing.""" + if self._player: + self._player.pause() + + def resume(self): + """Resumes the audio playing.""" + if self._player: + self._player.resume() + + @property + def source(self): + """Optional[:class:`AudioSource`]: The audio source being played, if playing. + + This property can also be used to change the audio source currently being played. + """ + return self._player.source if self._player else None + + @source.setter + def source(self, value): + if not isinstance(value, AudioSource): + raise TypeError('expected AudioSource not {0.__class__.__name__}.'.format(value)) + + if self._player is None: + raise ValueError('Not playing anything.') + + self._player._set_source(value) + + def send_audio_packet(self, data, *, encode=True): + """Sends an audio packet composed of the data. + + You must be connected to play audio. + + Parameters + ---------- + data: :class:`bytes` + The :term:`py:bytes-like object` denoting PCM or Opus voice data. + encode: :class:`bool` + Indicates if ``data`` should be encoded into Opus. + + Raises + ------- + ClientException + You are not connected. + opus.OpusError + Encoding the data failed. + """ + + self.checked_add('sequence', 1, 65535) + if encode: + encoded_data = self.encoder.encode(data, self.encoder.SAMPLES_PER_FRAME) + else: + encoded_data = data + packet = self._get_voice_packet(encoded_data) + try: + self.socket.sendto(packet, (self.endpoint_ip, self.voice_port)) + except BlockingIOError: + log.warning('A packet has been dropped (seq: %s, timestamp: %s)', self.sequence, self.timestamp) + + self.checked_add('timestamp', self.encoder.SAMPLES_PER_FRAME, 4294967295) diff --git a/venv/lib/python3.7/site-packages/discord/webhook.py b/venv/lib/python3.7/site-packages/discord/webhook.py new file mode 100644 index 0000000..e06b628 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/webhook.py @@ -0,0 +1,748 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import asyncio +import json +import time +import re + +import aiohttp + +from . import utils +from .errors import InvalidArgument, HTTPException, Forbidden, NotFound +from .user import BaseUser, User +from .asset import Asset + +__all__ = ( + 'WebhookAdapter', + 'AsyncWebhookAdapter', + 'RequestsWebhookAdapter', + 'Webhook', +) + +class WebhookAdapter: + """Base class for all webhook adapters. + + Attributes + ------------ + webhook: :class:`Webhook` + The webhook that owns this adapter. + """ + + BASE = 'https://discordapp.com/api/v7' + + def _prepare(self, webhook): + self._webhook_id = webhook.id + self._webhook_token = webhook.token + self._request_url = '{0.BASE}/webhooks/{1}/{2}'.format(self, webhook.id, webhook.token) + self.webhook = webhook + + def request(self, verb, url, payload=None, multipart=None): + """Actually does the request. + + Subclasses must implement this. + + Parameters + ----------- + verb: :class:`str` + The HTTP verb to use for the request. + url: :class:`str` + The URL to send the request to. This will have + the query parameters already added to it, if any. + multipart: Optional[:class:`dict`] + A dict containing multipart form data to send with + the request. If a filename is being uploaded, then it will + be under a ``file`` key which will have a 3-element :class:`tuple` + denoting ``(filename, file, content_type)``. + payload: Optional[:class:`dict`] + The JSON to send with the request, if any. + """ + raise NotImplementedError() + + def delete_webhook(self): + return self.request('DELETE', self._request_url) + + def edit_webhook(self, **payload): + return self.request('PATCH', self._request_url, payload=payload) + + def handle_execution_response(self, data, *, wait): + """Transforms the webhook execution response into something + more meaningful. + + This is mainly used to convert the data into a :class:`Message` + if necessary. + + Subclasses must implement this. + + Parameters + ------------ + data + The data that was returned from the request. + wait: :class:`bool` + Whether the webhook execution was asked to wait or not. + """ + raise NotImplementedError() + + async def _wrap_coroutine_and_cleanup(self, coro, cleanup): + try: + return await coro + finally: + cleanup() + + def execute_webhook(self, *, payload, wait=False, file=None, files=None): + cleanup = None + if file is not None: + multipart = { + 'file': (file.filename, file.fp, 'application/octet-stream'), + 'payload_json': utils.to_json(payload) + } + data = None + cleanup = file.close + files_to_pass = [file] + elif files is not None: + multipart = { + 'payload_json': utils.to_json(payload) + } + for i, file in enumerate(files, start=1): + multipart['file%i' % i] = (file.filename, file.fp, 'application/octet-stream') + data = None + + def _anon(): + for f in files: + f.close() + + cleanup = _anon + files_to_pass = files + else: + data = payload + multipart = None + files_to_pass = None + + url = '%s?wait=%d' % (self._request_url, wait) + maybe_coro = None + try: + maybe_coro = self.request('POST', url, multipart=multipart, payload=data, files=files_to_pass) + finally: + if maybe_coro is not None and cleanup is not None: + if not asyncio.iscoroutine(maybe_coro): + cleanup() + else: + maybe_coro = self._wrap_coroutine_and_cleanup(maybe_coro, cleanup) + + # if request raises up there then this should never be `None` + return self.handle_execution_response(maybe_coro, wait=wait) + +class AsyncWebhookAdapter(WebhookAdapter): + """A webhook adapter suited for use with aiohttp. + + .. note:: + + You are responsible for cleaning up the client session. + + Parameters + ----------- + session: :class:`aiohttp.ClientSession` + The session to use to send requests. + """ + + def __init__(self, session): + self.session = session + self.loop = asyncio.get_event_loop() + + async def request(self, verb, url, payload=None, multipart=None, *, files=None): + headers = {} + data = None + files = files or [] + if payload: + headers['Content-Type'] = 'application/json' + data = utils.to_json(payload) + + if multipart: + data = aiohttp.FormData() + for key, value in multipart.items(): + if key.startswith('file'): + data.add_field(key, value[1], filename=value[0], content_type=value[2]) + else: + data.add_field(key, value) + + for tries in range(5): + for file in files: + file.reset(seek=tries) + + async with self.session.request(verb, url, headers=headers, data=data) as r: + response = await r.text(encoding='utf-8') + if r.headers['Content-Type'] == 'application/json': + response = json.loads(response) + + # check if we have rate limit header information + remaining = r.headers.get('X-Ratelimit-Remaining') + if remaining == '0' and r.status != 429: + delta = utils._parse_ratelimit_header(r) + await asyncio.sleep(delta, loop=self.loop) + + if 300 > r.status >= 200: + return response + + # we are being rate limited + if r.status == 429: + retry_after = response['retry_after'] / 1000.0 + await asyncio.sleep(retry_after, loop=self.loop) + continue + + if r.status in (500, 502): + await asyncio.sleep(1 + tries * 2, loop=self.loop) + continue + + if r.status == 403: + raise Forbidden(r, response) + elif r.status == 404: + raise NotFound(r, response) + else: + raise HTTPException(r, response) + + async def handle_execution_response(self, response, *, wait): + data = await response + if not wait: + return data + + # transform into Message object + from .message import Message + return Message(data=data, state=self.webhook._state, channel=self.webhook.channel) + +class RequestsWebhookAdapter(WebhookAdapter): + """A webhook adapter suited for use with ``requests``. + + Only versions of :doc:`req:index` higher than 2.13.0 are supported. + + Parameters + ----------- + session: Optional[`requests.Session `_] + The requests session to use for sending requests. If not given then + each request will create a new session. Note if a session is given, + the webhook adapter **will not** clean it up for you. You must close + the session yourself. + sleep: :class:`bool` + Whether to sleep the thread when encountering a 429 or pre-emptive + rate limit or a 5xx status code. Defaults to ``True``. If set to + ``False`` then this will raise an :exc:`HTTPException` instead. + """ + + def __init__(self, session=None, *, sleep=True): + import requests + self.session = session or requests + self.sleep = sleep + + def request(self, verb, url, payload=None, multipart=None, *, files=None): + headers = {} + data = None + files = files or [] + if payload: + headers['Content-Type'] = 'application/json' + data = utils.to_json(payload) + + if multipart is not None: + data = {'payload_json': multipart.pop('payload_json')} + + for tries in range(5): + for file in files: + file.reset(seek=tries) + + r = self.session.request(verb, url, headers=headers, data=data, files=multipart) + r.encoding = 'utf-8' + response = r.text + + # compatibility with aiohttp + r.status = r.status_code + + if r.headers['Content-Type'] == 'application/json': + response = json.loads(response) + + # check if we have rate limit header information + remaining = r.headers.get('X-Ratelimit-Remaining') + if remaining == '0' and r.status != 429 and self.sleep: + delta = utils._parse_ratelimit_header(r) + time.sleep(delta) + + if 300 > r.status >= 200: + return response + + # we are being rate limited + if r.status == 429: + if self.sleep: + retry_after = response['retry_after'] / 1000.0 + time.sleep(retry_after) + continue + else: + raise HTTPException(r, response) + + if self.sleep and r.status in (500, 502): + time.sleep(1 + tries * 2) + continue + + if r.status == 403: + raise Forbidden(r, response) + elif r.status == 404: + raise NotFound(r, response) + else: + raise HTTPException(r, response) + + def handle_execution_response(self, response, *, wait): + if not wait: + return response + + # transform into Message object + from .message import Message + return Message(data=response, state=self.webhook._state, channel=self.webhook.channel) + +class _FriendlyHttpAttributeErrorHelper: + __slots__ = () + + def __getattr__(self, attr): + raise AttributeError('PartialWebhookState does not support http methods.') + +class _PartialWebhookState: + __slots__ = ('loop',) + + def __init__(self, adapter): + # Fetch the loop from the adapter if it's there + try: + self.loop = adapter.loop + except AttributeError: + self.loop = None + + def _get_guild(self, guild_id): + return None + + def store_user(self, data): + return BaseUser(state=self, data=data) + + @property + def is_bot(self): + return True + + @property + def http(self): + # Some data classes assign state.http and that should be kosher + # however, using it should result in a late-binding error. + return _FriendlyHttpAttributeErrorHelper() + + def __getattr__(self, attr): + raise AttributeError('PartialWebhookState does not support {0:!r}.'.format(attr)) + +class Webhook: + """Represents a Discord webhook. + + Webhooks are a form to send messages to channels in Discord without a + bot user or authentication. + + There are two main ways to use Webhooks. The first is through the ones + received by the library such as :meth:`.Guild.webhooks` and + :meth:`.TextChannel.webhooks`. The ones received by the library will + automatically have an adapter bound using the library's HTTP session. + Those webhooks will have :meth:`~.Webhook.send`, :meth:`~.Webhook.delete` and + :meth:`~.Webhook.edit` as coroutines. + + The second form involves creating a webhook object manually without having + it bound to a websocket connection using the :meth:`~.Webhook.from_url` or + :meth:`~.Webhook.partial` classmethods. This form allows finer grained control + over how requests are done, allowing you to mix async and sync code using either + :doc:`aiohttp ` or :doc:`req:index`. + + For example, creating a webhook from a URL and using :doc:`aiohttp `: + + .. code-block:: python3 + + from discord import Webhook, AsyncWebhookAdapter + import aiohttp + + async def foo(): + async with aiohttp.ClientSession() as session: + webhook = Webhook.from_url('url-here', adapter=AsyncWebhookAdapter(session)) + await webhook.send('Hello World', username='Foo') + + Or creating a webhook from an ID and token and using :doc:`req:index`: + + .. code-block:: python3 + + import requests + from discord import Webhook, RequestsWebhookAdapter + + webhook = Webhook.partial(123456, 'abcdefg', adapter=RequestsWebhookAdapter()) + webhook.send('Hello World', username='Foo') + + Attributes + ------------ + id: :class:`int` + The webhook's ID + token: Optional[:class:`str`] + The authentication token of the webhook. If this is ``None`` + then the webhook cannot be used to make requests. + guild_id: Optional[:class:`int`] + The guild ID this webhook is for. + channel_id: Optional[:class:`int`] + The channel ID this webhook is for. + user: Optional[:class:`abc.User`] + The user this webhook was created by. If the webhook was + received without authentication then this will be ``None``. + name: Optional[:class:`str`] + The default name of the webhook. + avatar: Optional[:class:`str`] + The default avatar of the webhook. + """ + + __slots__ = ('id', 'guild_id', 'channel_id', 'user', 'name', 'avatar', + 'token', '_state', '_adapter') + + def __init__(self, data, *, adapter, state=None): + self.id = int(data['id']) + self.channel_id = utils._get_as_snowflake(data, 'channel_id') + self.guild_id = utils._get_as_snowflake(data, 'guild_id') + self.name = data.get('name') + self.avatar = data.get('avatar') + self.token = data.get('token') + self._state = state or _PartialWebhookState(adapter) + self._adapter = adapter + self._adapter._prepare(self) + + user = data.get('user') + if user is None: + self.user = None + elif state is None: + self.user = BaseUser(state=None, data=user) + else: + self.user = User(state=state, data=user) + + def __repr__(self): + return '' % self.id + + @property + def url(self): + """Returns the webhook's url.""" + return 'https://discordapp.com/api/webhooks/{}/{}'.format(self.id, self.token) + + @classmethod + def partial(cls, id, token, *, adapter): + """Creates a partial :class:`Webhook`. + + A partial webhook is just a webhook object with an ID and a token. + + Parameters + ----------- + id: :class:`int` + The ID of the webhook. + token: :class:`str` + The authentication token of the webhook. + adapter: :class:`WebhookAdapter` + The webhook adapter to use when sending requests. This is + typically :class:`AsyncWebhookAdapter` for :doc:`aiohttp ` or + :class:`RequestsWebhookAdapter` for :doc:`req:index`. + """ + + if not isinstance(adapter, WebhookAdapter): + raise TypeError('adapter must be a subclass of WebhookAdapter') + + data = { + 'id': id, + 'token': token + } + + return cls(data, adapter=adapter) + + @classmethod + def from_url(cls, url, *, adapter): + """Creates a partial :class:`Webhook` from a webhook URL. + + Parameters + ------------ + url: :class:`str` + The URL of the webhook. + adapter: :class:`WebhookAdapter` + The webhook adapter to use when sending requests. This is + typically :class:`AsyncWebhookAdapter` for :doc:`aiohttp ` or + :class:`RequestsWebhookAdapter` for :doc:`req:index`. + + Raises + ------- + InvalidArgument + The URL is invalid. + """ + + m = re.search(r'discordapp.com/api/webhooks/(?P[0-9]{17,21})/(?P[A-Za-z0-9\.\-\_]{60,68})', url) + if m is None: + raise InvalidArgument('Invalid webhook URL given.') + return cls(m.groupdict(), adapter=adapter) + + @classmethod + def from_state(cls, data, state): + session = state.http._HTTPClient__session + return cls(data, adapter=AsyncWebhookAdapter(session=session), state=state) + + @property + def guild(self): + """Optional[:class:`Guild`]: The guild this webhook belongs to. + + If this is a partial webhook, then this will always return ``None``. + """ + return self._state._get_guild(self.guild_id) + + @property + def channel(self): + """Optional[:class:`TextChannel`]: The text channel this webhook belongs to. + + If this is a partial webhook, then this will always return ``None``. + """ + guild = self.guild + return guild and guild.get_channel(self.channel_id) + + @property + def created_at(self): + """:class:`datetime.datetime`: Returns the webhook's creation time in UTC.""" + return utils.snowflake_time(self.id) + + @property + def avatar_url(self): + """Returns an :class:`Asset` for the avatar the webhook has. + + If the webhook does not have a traditional avatar, an asset for + the default avatar is returned instead. + + This is equivalent to calling :meth:`avatar_url_as` with the + default parameters. + """ + return self.avatar_url_as() + + def avatar_url_as(self, *, format=None, size=1024): + """Returns an :class:`Asset` for the avatar the webhook has. + + If the webhook does not have a traditional avatar, an asset for + the default avatar is returned instead. + + The format must be one of 'jpeg', 'jpg', or 'png'. + The size must be a power of 2 between 16 and 1024. + + Parameters + ----------- + format: Optional[:class:`str`] + The format to attempt to convert the avatar to. + If the format is ``None``, then it is equivalent to png. + size: :class:`int` + The size of the image to display. + + Raises + ------ + InvalidArgument + Bad image format passed to ``format`` or invalid ``size``. + + Returns + -------- + :class:`Asset` + The resulting CDN asset. + """ + if self.avatar is None: + # Default is always blurple apparently + return Asset(self._state, 'https://cdn.discordapp.com/embed/avatars/0.png') + + if not utils.valid_icon_size(size): + raise InvalidArgument("size must be a power of 2 between 16 and 1024") + + format = format or 'png' + + if format not in ('png', 'jpg', 'jpeg'): + raise InvalidArgument("format must be one of 'png', 'jpg', or 'jpeg'.") + + url = 'https://cdn.discordapp.com/avatars/{0.id}/{0.avatar}.{1}?size={2}'.format(self, format, size) + return Asset(self._state, url) + + def delete(self): + """|maybecoro| + + Deletes this Webhook. + + If the webhook is constructed with a :class:`RequestsWebhookAdapter` then this is + not a coroutine. + + Raises + ------- + HTTPException + Deleting the webhook failed. + NotFound + This webhook does not exist. + Forbidden + You do not have permissions to delete this webhook. + InvalidArgument + This webhook does not have a token associated with it. + """ + if self.token is None: + raise InvalidArgument('This webhook does not have a token associated with it') + + return self._adapter.delete_webhook() + + def edit(self, **kwargs): + """|maybecoro| + + Edits this Webhook. + + If the webhook is constructed with a :class:`RequestsWebhookAdapter` then this is + not a coroutine. + + Parameters + ------------- + name: Optional[:class:`str`] + The webhook's new default name. + avatar: Optional[:class:`bytes`] + A :term:`py:bytes-like object` representing the webhook's new default avatar. + + Raises + ------- + HTTPException + Editing the webhook failed. + NotFound + This webhook does not exist. + InvalidArgument + This webhook does not have a token associated with it. + """ + if self.token is None: + raise InvalidArgument('This webhook does not have a token associated with it') + + payload = {} + + try: + name = kwargs['name'] + except KeyError: + pass + else: + if name is not None: + payload['name'] = str(name) + else: + payload['name'] = None + + try: + avatar = kwargs['avatar'] + except KeyError: + pass + else: + if avatar is not None: + payload['avatar'] = utils._bytes_to_base64_data(avatar) + else: + payload['avatar'] = None + + return self._adapter.edit_webhook(**payload) + + def send(self, content=None, *, wait=False, username=None, avatar_url=None, tts=False, + file=None, files=None, embed=None, embeds=None): + """|maybecoro| + + Sends a message using the webhook. + + If the webhook is constructed with a :class:`RequestsWebhookAdapter` then this is + not a coroutine. + + The content must be a type that can convert to a string through ``str(content)``. + + To upload a single file, the ``file`` parameter should be used with a + single :class:`File` object. + + If the ``embed`` parameter is provided, it must be of type :class:`Embed` and + it must be a rich embed type. You cannot mix the ``embed`` parameter with the + ``embeds`` parameter, which must be a :class:`list` of :class:`Embed` objects to send. + + Parameters + ------------ + content: :class:`str` + The content of the message to send. + wait: :class:`bool` + Whether the server should wait before sending a response. This essentially + means that the return type of this function changes from ``None`` to + a :class:`Message` if set to ``True``. + username: :class:`str` + The username to send with this message. If no username is provided + then the default username for the webhook is used. + avatar_url: Union[:class:`str`, :class:`Asset`] + The avatar URL to send with this message. If no avatar URL is provided + then the default avatar for the webhook is used. + tts: :class:`bool` + Indicates if the message should be sent using text-to-speech. + file: :class:`File` + The file to upload. This cannot be mixed with ``files`` parameter. + files: List[:class:`File`] + A list of files to send with the content. This cannot be mixed with the + ``file`` parameter. + embed: :class:`Embed` + The rich embed for the content to send. This cannot be mixed with + ``embeds`` parameter. + embeds: List[:class:`Embed`] + A list of embeds to send with the content. Maximum of 10. This cannot + be mixed with the ``embed`` parameter. + + Raises + -------- + HTTPException + Sending the message failed. + NotFound + This webhook was not found. + Forbidden + The authorization token for the webhook is incorrect. + InvalidArgument + You specified both ``embed`` and ``embeds`` or the length of + ``embeds`` was invalid or there was no token associated with + this webhook. + + Returns + --------- + Optional[:class:`Message`] + The message that was sent. + """ + + payload = {} + if self.token is None: + raise InvalidArgument('This webhook does not have a token associated with it') + if files is not None and file is not None: + raise InvalidArgument('Cannot mix file and files keyword arguments.') + if embeds is not None and embed is not None: + raise InvalidArgument('Cannot mix embed and embeds keyword arguments.') + + if embeds is not None: + if len(embeds) > 10: + raise InvalidArgument('embeds has a maximum of 10 elements.') + payload['embeds'] = [e.to_dict() for e in embeds] + + if embed is not None: + payload['embeds'] = [embed.to_dict()] + + if content is not None: + payload['content'] = str(content) + + payload['tts'] = tts + if avatar_url: + payload['avatar_url'] = str(avatar_url) + if username: + payload['username'] = username + + return self._adapter.execute_webhook(wait=wait, file=file, files=files, payload=payload) + + def execute(self, *args, **kwargs): + """An alias for :meth:`~.Webhook.send`.""" + return self.send(*args, **kwargs) diff --git a/venv/lib/python3.7/site-packages/discord/widget.py b/venv/lib/python3.7/site-packages/discord/widget.py new file mode 100644 index 0000000..988b0b0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/discord/widget.py @@ -0,0 +1,250 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2019 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from .utils import snowflake_time, _get_as_snowflake, resolve_invite +from .user import BaseUser +from .activity import create_activity +from .invite import Invite +from .enums import Status, try_enum +from collections import namedtuple + +class WidgetChannel(namedtuple('WidgetChannel', 'id name position')): + """Represents a "partial" widget channel. + + .. container:: operations + + .. describe:: x == y + + Checks if two partial channels are the same. + + .. describe:: x != y + + Checks if two partial channels are not the same. + + .. describe:: hash(x) + + Return the partial channel's hash. + + .. describe:: str(x) + + Returns the partial channel's name. + + Attributes + ----------- + id: :class:`int` + The channel's ID. + name: :class:`str` + The channel's name. + position: :class:`int` + The channel's position + """ + __slots__ = () + + def __str__(self): + return self.name + + @property + def mention(self): + """:class:`str`: The string that allows you to mention the channel.""" + return '<#%s>' % self.id + + @property + def created_at(self): + """:class:`datetime.datetime`: Returns the channel's creation time in UTC.""" + return snowflake_time(self.id) + +class WidgetMember(BaseUser): + """Represents a "partial" member of the widget's guild. + + .. container:: operations + + .. describe:: x == y + + Checks if two widget members are the same. + + .. describe:: x != y + + Checks if two widget members are not the same. + + .. describe:: hash(x) + + Return the widget member's hash. + + .. describe:: str(x) + + Returns the widget member's `name#discriminator`. + + Attributes + ----------- + id: :class:`int` + The member's ID. + name: :class:`str` + The member's username. + discriminator: :class:`str` + The member's discriminator. + bot: :class:`bool` + Whether the member is a bot. + status: :class:`Status` + The member's status. + nick: Optional[:class:`str`] + The member's nickname. + avatar: Optional[:class:`str`] + The member's avatar hash. + activity: Optional[Union[:class:`Activity`, :class:`Game`, :class:`Streaming`, :class:`Spotify`]] + The member's activity. + deafened: Optional[:class:`bool`] + Whether the member is currently deafened. + muted: Optional[:class:`bool`] + Whether the member is currently muted. + suppress: Optional[:class:`bool`] + Whether the member is currently being suppressed. + connected_channel: Optional[:class:`VoiceChannel`] + Which channel the member is connected to. + """ + __slots__ = ('name', 'status', 'nick', 'avatar', 'discriminator', + 'id', 'bot', 'activity', 'deafened', 'suppress', 'muted', + 'connected_channel') + + def __init__(self, *, state, data, connected_channel=None): + super().__init__(state=state, data=data) + self.nick = data.get('nick') + self.status = try_enum(Status, data.get('status')) + self.deafened = data.get('deaf', False) or data.get('self_deaf', False) + self.muted = data.get('mute', False) or data.get('self_mute', False) + self.suppress = data.get('suppress', False) + + try: + game = data['game'] + except KeyError: + self.activity = None + else: + self.activity = create_activity(game) + + self.connected_channel = connected_channel + + @property + def display_name(self): + """:class:`str`: Returns the member's display name.""" + return self.nick if self.nick else self.name + +class Widget: + """Represents a :class:`Guild` widget. + + .. container:: operations + + .. describe:: x == y + + Checks if two widgets are the same. + + .. describe:: x != y + + Checks if two widgets are not the same. + + .. describe:: str(x) + + Returns the widget's JSON URL. + + Attributes + ----------- + id: :class:`int` + The guild's ID. + name: :class:`str` + The guild's name. + channels: Optional[List[:class:`WidgetChannel`]] + The accessible voice channels in the guild. + members: Optional[List[:class:`Member`]] + The online members in the server. Offline members + do not appear in the widget. + """ + __slots__ = ('_state', 'channels', '_invite', 'id', 'members', 'name') + + def __init__(self, *, state, data): + self._state = state + self._invite = data['instant_invite'] + self.name = data['name'] + self.id = int(data['id']) + + self.channels = [] + for channel in data.get('channels', []): + _id = int(channel['id']) + self.channels.append(WidgetChannel(id=_id, name=channel['name'], position=channel['position'])) + + self.members = [] + channels = {channel.id: channel for channel in self.channels} + for member in data.get('members', []): + connected_channel = _get_as_snowflake(member, 'channel_id') + if connected_channel: + connected_channel = channels[connected_channel] + + self.members.append(WidgetMember(state=self._state, data=member, connected_channel=connected_channel)) + + def __str__(self): + return self.json_url + + def __eq__(self, other): + return self.id == other.id + + def __repr__(self): + return ''.format(self) + + @property + def created_at(self): + """:class:`datetime.datetime`: Returns the member's creation time in UTC.""" + return snowflake_time(self.id) + + @property + def json_url(self): + """:class:`str`: The JSON URL of the widget.""" + return "https://discordapp.com/api/guilds/{0.id}/widget.json".format(self) + + @property + def invite_url(self): + """Optiona[:class:`str`]: The invite URL for the guild, if available.""" + return self._invite + + async def fetch_invite(self, *, with_counts=True): + """|coro| + + Retrieves an :class:`Invite` from a invite URL or ID. + This is the same as :meth:`Client.fetch_invite`; the invite + code is abstracted away. + + Parameters + ----------- + with_counts: :class:`bool` + Whether to include count information in the invite. This fills the + :attr:`Invite.approximate_member_count` and :attr:`Invite.approximate_presence_count` + fields. + + Returns + -------- + :class:`Invite` + The invite from the URL/ID. + """ + if self._invite: + invite_id = resolve_invite(self._invite) + data = await self._state.http.get_invite(invite_id, with_counts=with_counts) + return Invite.from_incomplete(state=self._state, data=data) diff --git a/venv/lib/python3.7/site-packages/easy_install.py b/venv/lib/python3.7/site-packages/easy_install.py new file mode 100644 index 0000000..d87e984 --- /dev/null +++ b/venv/lib/python3.7/site-packages/easy_install.py @@ -0,0 +1,5 @@ +"""Run the EasyInstall command""" + +if __name__ == '__main__': + from setuptools.command.easy_install import main + main() diff --git a/venv/lib/python3.7/site-packages/git/__init__.py b/venv/lib/python3.7/site-packages/git/__init__.py new file mode 100644 index 0000000..1c587ac --- /dev/null +++ b/venv/lib/python3.7/site-packages/git/__init__.py @@ -0,0 +1,86 @@ +# __init__.py +# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors +# +# This module is part of GitPython and is released under +# the BSD License: http://www.opensource.org/licenses/bsd-license.php +# flake8: noqa +#@PydevCodeAnalysisIgnore +import inspect +import os +import sys + +import os.path as osp + + +__version__ = '3.0.5' + + +#{ Initialization +def _init_externals(): + """Initialize external projects by putting them into the path""" + if __version__ == '3.0.5': + sys.path.insert(0, osp.join(osp.dirname(__file__), 'ext', 'gitdb')) + + try: + import gitdb + except ImportError: + raise ImportError("'gitdb' could not be found in your PYTHONPATH") + # END verify import + +#} END initialization + +################# +_init_externals() +################# + +#{ Imports + +from git.exc import * # @NoMove @IgnorePep8 +try: + from git.config import GitConfigParser # @NoMove @IgnorePep8 + from git.objects import * # @NoMove @IgnorePep8 + from git.refs import * # @NoMove @IgnorePep8 + from git.diff import * # @NoMove @IgnorePep8 + from git.db import * # @NoMove @IgnorePep8 + from git.cmd import Git # @NoMove @IgnorePep8 + from git.repo import Repo # @NoMove @IgnorePep8 + from git.remote import * # @NoMove @IgnorePep8 + from git.index import * # @NoMove @IgnorePep8 + from git.util import ( # @NoMove @IgnorePep8 + LockFile, + BlockingLockFile, + Stats, + Actor, + rmtree, + ) +except GitError as exc: + raise ImportError('%s: %s' % (exc.__class__.__name__, exc)) + +#} END imports + +__all__ = [name for name, obj in locals().items() + if not (name.startswith('_') or inspect.ismodule(obj))] + + +#{ Initialize git executable path +GIT_OK = None + +def refresh(path=None): + """Convenience method for setting the git executable path.""" + global GIT_OK + GIT_OK = False + + if not Git.refresh(path=path): + return + if not FetchInfo.refresh(): + return + + GIT_OK = True +#} END initialize git executable path + +################# +try: + refresh() +except Exception as exc: + raise ImportError('Failed to initialize: {0}'.format(exc)) +################# diff --git a/venv/lib/python3.7/site-packages/git/cmd.py b/venv/lib/python3.7/site-packages/git/cmd.py new file mode 100644 index 0000000..08e25af --- /dev/null +++ b/venv/lib/python3.7/site-packages/git/cmd.py @@ -0,0 +1,1115 @@ +# cmd.py +# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors +# +# This module is part of GitPython and is released under +# the BSD License: http://www.opensource.org/licenses/bsd-license.php + +from contextlib import contextmanager +import io +import logging +import os +import signal +from subprocess import ( + call, + Popen, + PIPE +) +import subprocess +import sys +import threading +from collections import OrderedDict +from textwrap import dedent + +from git.compat import ( + string_types, + defenc, + force_bytes, + PY3, + # just to satisfy flake8 on py3 + unicode, + safe_decode, + is_posix, + is_win, +) +from git.exc import CommandError +from git.util import is_cygwin_git, cygpath, expand_path + +from .exc import ( + GitCommandError, + GitCommandNotFound +) +from .util import ( + LazyMixin, + stream_copy, +) + +try: + PermissionError +except NameError: # Python < 3.3 + PermissionError = OSError + +execute_kwargs = {'istream', 'with_extended_output', + 'with_exceptions', 'as_process', 'stdout_as_string', + 'output_stream', 'with_stdout', 'kill_after_timeout', + 'universal_newlines', 'shell', 'env', 'max_chunk_size'} + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + +__all__ = ('Git',) + + +# ============================================================================== +## @name Utilities +# ------------------------------------------------------------------------------ +# Documentation +## @{ + +def handle_process_output(process, stdout_handler, stderr_handler, + finalizer=None, decode_streams=True): + """Registers for notifications to learn that process output is ready to read, and dispatches lines to + the respective line handlers. + This function returns once the finalizer returns + + :return: result of finalizer + :param process: subprocess.Popen instance + :param stdout_handler: f(stdout_line_string), or None + :param stderr_handler: f(stderr_line_string), or None + :param finalizer: f(proc) - wait for proc to finish + :param decode_streams: + Assume stdout/stderr streams are binary and decode them before pushing \ + their contents to handlers. + Set it to False if `universal_newline == True` (then streams are in text-mode) + or if decoding must happen later (i.e. for Diffs). + """ + # Use 2 "pump" threads and wait for both to finish. + def pump_stream(cmdline, name, stream, is_decode, handler): + try: + for line in stream: + if handler: + if is_decode: + line = line.decode(defenc) + handler(line) + except Exception as ex: + log.error("Pumping %r of cmd(%s) failed due to: %r", name, cmdline, ex) + raise CommandError(['<%s-pump>' % name] + cmdline, ex) + finally: + stream.close() + + cmdline = getattr(process, 'args', '') # PY3+ only + if not isinstance(cmdline, (tuple, list)): + cmdline = cmdline.split() + + pumps = [] + if process.stdout: + pumps.append(('stdout', process.stdout, stdout_handler)) + if process.stderr: + pumps.append(('stderr', process.stderr, stderr_handler)) + + threads = [] + + for name, stream, handler in pumps: + t = threading.Thread(target=pump_stream, + args=(cmdline, name, stream, decode_streams, handler)) + t.setDaemon(True) + t.start() + threads.append(t) + + ## FIXME: Why Join?? Will block if `stdin` needs feeding... + # + for t in threads: + t.join() + + if finalizer: + return finalizer(process) + + +def dashify(string): + return string.replace('_', '-') + + +def slots_to_dict(self, exclude=()): + return {s: getattr(self, s) for s in self.__slots__ if s not in exclude} + + +def dict_to_slots_and__excluded_are_none(self, d, excluded=()): + for k, v in d.items(): + setattr(self, k, v) + for k in excluded: + setattr(self, k, None) + +## -- End Utilities -- @} + + +# value of Windows process creation flag taken from MSDN +CREATE_NO_WINDOW = 0x08000000 + +## CREATE_NEW_PROCESS_GROUP is needed to allow killing it afterwards, +# see https://docs.python.org/3/library/subprocess.html#subprocess.Popen.send_signal +PROC_CREATIONFLAGS = (CREATE_NO_WINDOW | subprocess.CREATE_NEW_PROCESS_GROUP + if is_win else 0) + + +class Git(LazyMixin): + + """ + The Git class manages communication with the Git binary. + + It provides a convenient interface to calling the Git binary, such as in:: + + g = Git( git_dir ) + g.init() # calls 'git init' program + rval = g.ls_files() # calls 'git ls-files' program + + ``Debugging`` + Set the GIT_PYTHON_TRACE environment variable print each invocation + of the command to stdout. + Set its value to 'full' to see details about the returned values. + """ + __slots__ = ("_working_dir", "cat_file_all", "cat_file_header", "_version_info", + "_git_options", "_persistent_git_options", "_environment") + + _excluded_ = ('cat_file_all', 'cat_file_header', '_version_info') + + def __getstate__(self): + return slots_to_dict(self, exclude=self._excluded_) + + def __setstate__(self, d): + dict_to_slots_and__excluded_are_none(self, d, excluded=self._excluded_) + + # CONFIGURATION + + git_exec_name = "git" # default that should work on linux and windows + + # Enables debugging of GitPython's git commands + GIT_PYTHON_TRACE = os.environ.get("GIT_PYTHON_TRACE", False) + + # If True, a shell will be used when executing git commands. + # This should only be desirable on Windows, see https://github.com/gitpython-developers/GitPython/pull/126 + # and check `git/test_repo.py:TestRepo.test_untracked_files()` TC for an example where it is required. + # Override this value using `Git.USE_SHELL = True` + USE_SHELL = False + + # Provide the full path to the git executable. Otherwise it assumes git is in the path + _git_exec_env_var = "GIT_PYTHON_GIT_EXECUTABLE" + _refresh_env_var = "GIT_PYTHON_REFRESH" + GIT_PYTHON_GIT_EXECUTABLE = None + # note that the git executable is actually found during the refresh step in + # the top level __init__ + + @classmethod + def refresh(cls, path=None): + """This gets called by the refresh function (see the top level + __init__). + """ + # discern which path to refresh with + if path is not None: + new_git = os.path.expanduser(path) + new_git = os.path.abspath(new_git) + else: + new_git = os.environ.get(cls._git_exec_env_var, cls.git_exec_name) + + # keep track of the old and new git executable path + old_git = cls.GIT_PYTHON_GIT_EXECUTABLE + cls.GIT_PYTHON_GIT_EXECUTABLE = new_git + + # test if the new git executable path is valid + + # - a GitCommandNotFound error is spawned by ourselves + # - a PermissionError is spawned if the git executable provided + # cannot be executed for whatever reason + + has_git = False + try: + cls().version() + has_git = True + except (GitCommandNotFound, PermissionError): + pass + + # warn or raise exception if test failed + if not has_git: + err = dedent("""\ + Bad git executable. + The git executable must be specified in one of the following ways: + - be included in your $PATH + - be set via $%s + - explicitly set via git.refresh() + """) % cls._git_exec_env_var + + # revert to whatever the old_git was + cls.GIT_PYTHON_GIT_EXECUTABLE = old_git + + if old_git is None: + # on the first refresh (when GIT_PYTHON_GIT_EXECUTABLE is + # None) we only are quiet, warn, or error depending on the + # GIT_PYTHON_REFRESH value + + # determine what the user wants to happen during the initial + # refresh we expect GIT_PYTHON_REFRESH to either be unset or + # be one of the following values: + # 0|q|quiet|s|silence + # 1|w|warn|warning + # 2|r|raise|e|error + + mode = os.environ.get(cls._refresh_env_var, "raise").lower() + + quiet = ["quiet", "q", "silence", "s", "none", "n", "0"] + warn = ["warn", "w", "warning", "1"] + error = ["error", "e", "raise", "r", "2"] + + if mode in quiet: + pass + elif mode in warn or mode in error: + err = dedent("""\ + %s + All git commands will error until this is rectified. + + This initial warning can be silenced or aggravated in the future by setting the + $%s environment variable. Use one of the following values: + - %s: for no warning or exception + - %s: for a printed warning + - %s: for a raised exception + + Example: + export %s=%s + """) % ( + err, + cls._refresh_env_var, + "|".join(quiet), + "|".join(warn), + "|".join(error), + cls._refresh_env_var, + quiet[0]) + + if mode in warn: + print("WARNING: %s" % err) + else: + raise ImportError(err) + else: + err = dedent("""\ + %s environment variable has been set but it has been set with an invalid value. + + Use only the following values: + - %s: for no warning or exception + - %s: for a printed warning + - %s: for a raised exception + """) % ( + cls._refresh_env_var, + "|".join(quiet), + "|".join(warn), + "|".join(error)) + raise ImportError(err) + + # we get here if this was the init refresh and the refresh mode + # was not error, go ahead and set the GIT_PYTHON_GIT_EXECUTABLE + # such that we discern the difference between a first import + # and a second import + cls.GIT_PYTHON_GIT_EXECUTABLE = cls.git_exec_name + else: + # after the first refresh (when GIT_PYTHON_GIT_EXECUTABLE + # is no longer None) we raise an exception + raise GitCommandNotFound("git", err) + + return has_git + + @classmethod + def is_cygwin(cls): + return is_cygwin_git(cls.GIT_PYTHON_GIT_EXECUTABLE) + + @classmethod + def polish_url(cls, url, is_cygwin=None): + if is_cygwin is None: + is_cygwin = cls.is_cygwin() + + if is_cygwin: + url = cygpath(url) + else: + """Remove any backslahes from urls to be written in config files. + + Windows might create config-files containing paths with backslashed, + but git stops liking them as it will escape the backslashes. + Hence we undo the escaping just to be sure. + """ + url = os.path.expandvars(url) + if url.startswith('~'): + url = os.path.expanduser(url) + url = url.replace("\\\\", "\\").replace("\\", "/") + + return url + + class AutoInterrupt(object): + """Kill/Interrupt the stored process instance once this instance goes out of scope. It is + used to prevent processes piling up in case iterators stop reading. + Besides all attributes are wired through to the contained process object. + + The wait method was overridden to perform automatic status code checking + and possibly raise.""" + + __slots__ = ("proc", "args") + + def __init__(self, proc, args): + self.proc = proc + self.args = args + + def __del__(self): + if self.proc is None: + return + + proc = self.proc + self.proc = None + if proc.stdin: + proc.stdin.close() + if proc.stdout: + proc.stdout.close() + if proc.stderr: + proc.stderr.close() + + # did the process finish already so we have a return code ? + try: + if proc.poll() is not None: + return + except OSError as ex: + log.info("Ignored error after process had died: %r", ex) + + # can be that nothing really exists anymore ... + if os is None or getattr(os, 'kill', None) is None: + return + + # try to kill it + try: + proc.terminate() + proc.wait() # ensure process goes away + except OSError as ex: + log.info("Ignored error after process had died: %r", ex) + except AttributeError: + # try windows + # for some reason, providing None for stdout/stderr still prints something. This is why + # we simply use the shell and redirect to nul. Its slower than CreateProcess, question + # is whether we really want to see all these messages. Its annoying no matter what. + if is_win: + call(("TASKKILL /F /T /PID %s 2>nul 1>nul" % str(proc.pid)), shell=True) + # END exception handling + + def __getattr__(self, attr): + return getattr(self.proc, attr) + + def wait(self, stderr=b''): # TODO: Bad choice to mimic `proc.wait()` but with different args. + """Wait for the process and return its status code. + + :param stderr: Previously read value of stderr, in case stderr is already closed. + :warn: may deadlock if output or error pipes are used and not handled separately. + :raise GitCommandError: if the return status is not 0""" + if stderr is None: + stderr = b'' + stderr = force_bytes(data=stderr, encoding='utf-8') + + status = self.proc.wait() + + def read_all_from_possibly_closed_stream(stream): + try: + return stderr + force_bytes(stream.read()) + except ValueError: + return stderr or b'' + + if status != 0: + errstr = read_all_from_possibly_closed_stream(self.proc.stderr) + log.debug('AutoInterrupt wait stderr: %r' % (errstr,)) + raise GitCommandError(self.args, status, errstr) + # END status handling + return status + # END auto interrupt + + class CatFileContentStream(object): + + """Object representing a sized read-only stream returning the contents of + an object. + It behaves like a stream, but counts the data read and simulates an empty + stream once our sized content region is empty. + If not all data is read to the end of the objects's lifetime, we read the + rest to assure the underlying stream continues to work""" + + __slots__ = ('_stream', '_nbr', '_size') + + def __init__(self, size, stream): + self._stream = stream + self._size = size + self._nbr = 0 # num bytes read + + # special case: if the object is empty, has null bytes, get the + # final newline right away. + if size == 0: + stream.read(1) + # END handle empty streams + + def read(self, size=-1): + bytes_left = self._size - self._nbr + if bytes_left == 0: + return b'' + if size > -1: + # assure we don't try to read past our limit + size = min(bytes_left, size) + else: + # they try to read all, make sure its not more than what remains + size = bytes_left + # END check early depletion + data = self._stream.read(size) + self._nbr += len(data) + + # check for depletion, read our final byte to make the stream usable by others + if self._size - self._nbr == 0: + self._stream.read(1) # final newline + # END finish reading + return data + + def readline(self, size=-1): + if self._nbr == self._size: + return b'' + + # clamp size to lowest allowed value + bytes_left = self._size - self._nbr + if size > -1: + size = min(bytes_left, size) + else: + size = bytes_left + # END handle size + + data = self._stream.readline(size) + self._nbr += len(data) + + # handle final byte + if self._size - self._nbr == 0: + self._stream.read(1) + # END finish reading + + return data + + def readlines(self, size=-1): + if self._nbr == self._size: + return [] + + # leave all additional logic to our readline method, we just check the size + out = [] + nbr = 0 + while True: + line = self.readline() + if not line: + break + out.append(line) + if size > -1: + nbr += len(line) + if nbr > size: + break + # END handle size constraint + # END readline loop + return out + + # skipcq: PYL-E0301 + def __iter__(self): + return self + + def next(self): + line = self.readline() + if not line: + raise StopIteration + + return line + + def __del__(self): + bytes_left = self._size - self._nbr + if bytes_left: + # read and discard - seeking is impossible within a stream + # includes terminating newline + self._stream.read(bytes_left + 1) + # END handle incomplete read + + def __init__(self, working_dir=None): + """Initialize this instance with: + + :param working_dir: + Git directory we should work in. If None, we always work in the current + directory as returned by os.getcwd(). + It is meant to be the working tree directory if available, or the + .git directory in case of bare repositories.""" + super(Git, self).__init__() + self._working_dir = expand_path(working_dir) + self._git_options = () + self._persistent_git_options = [] + + # Extra environment variables to pass to git commands + self._environment = {} + + # cached command slots + self.cat_file_header = None + self.cat_file_all = None + + def __getattr__(self, name): + """A convenience method as it allows to call the command as if it was + an object. + :return: Callable object that will execute call _call_process with your arguments.""" + if name[0] == '_': + return LazyMixin.__getattr__(self, name) + return lambda *args, **kwargs: self._call_process(name, *args, **kwargs) + + def set_persistent_git_options(self, **kwargs): + """Specify command line options to the git executable + for subsequent subcommand calls + + :param kwargs: + is a dict of keyword arguments. + these arguments are passed as in _call_process + but will be passed to the git command rather than + the subcommand. + """ + + self._persistent_git_options = self.transform_kwargs( + split_single_char_options=True, **kwargs) + + def _set_cache_(self, attr): + if attr == '_version_info': + # We only use the first 4 numbers, as everything else could be strings in fact (on windows) + version_numbers = self._call_process('version').split(' ')[2] + self._version_info = tuple(int(n) for n in version_numbers.split('.')[:4] if n.isdigit()) + else: + super(Git, self)._set_cache_(attr) + # END handle version info + + @property + def working_dir(self): + """:return: Git directory we are working on""" + return self._working_dir + + @property + def version_info(self): + """ + :return: tuple(int, int, int, int) tuple with integers representing the major, minor + and additional version numbers as parsed from git version. + This value is generated on demand and is cached""" + return self._version_info + + def execute(self, command, + istream=None, + with_extended_output=False, + with_exceptions=True, + as_process=False, + output_stream=None, + stdout_as_string=True, + kill_after_timeout=None, + with_stdout=True, + universal_newlines=False, + shell=None, + env=None, + max_chunk_size=io.DEFAULT_BUFFER_SIZE, + **subprocess_kwargs + ): + """Handles executing the command on the shell and consumes and returns + the returned information (stdout) + + :param command: + The command argument list to execute. + It should be a string, or a sequence of program arguments. The + program to execute is the first item in the args sequence or string. + + :param istream: + Standard input filehandle passed to subprocess.Popen. + + :param with_extended_output: + Whether to return a (status, stdout, stderr) tuple. + + :param with_exceptions: + Whether to raise an exception when git returns a non-zero status. + + :param as_process: + Whether to return the created process instance directly from which + streams can be read on demand. This will render with_extended_output and + with_exceptions ineffective - the caller will have + to deal with the details himself. + It is important to note that the process will be placed into an AutoInterrupt + wrapper that will interrupt the process once it goes out of scope. If you + use the command in iterators, you should pass the whole process instance + instead of a single stream. + + :param output_stream: + If set to a file-like object, data produced by the git command will be + output to the given stream directly. + This feature only has any effect if as_process is False. Processes will + always be created with a pipe due to issues with subprocess. + This merely is a workaround as data will be copied from the + output pipe to the given output stream directly. + Judging from the implementation, you shouldn't use this flag ! + + :param stdout_as_string: + if False, the commands standard output will be bytes. Otherwise, it will be + decoded into a string using the default encoding (usually utf-8). + The latter can fail, if the output contains binary data. + + :param env: + A dictionary of environment variables to be passed to `subprocess.Popen`. + + :param max_chunk_size: + Maximum number of bytes in one chunk of data passed to the output_stream in + one invocation of write() method. If the given number is not positive then + the default value is used. + + :param subprocess_kwargs: + Keyword arguments to be passed to subprocess.Popen. Please note that + some of the valid kwargs are already set by this method, the ones you + specify may not be the same ones. + + :param with_stdout: If True, default True, we open stdout on the created process + :param universal_newlines: + if True, pipes will be opened as text, and lines are split at + all known line endings. + :param shell: + Whether to invoke commands through a shell (see `Popen(..., shell=True)`). + It overrides :attr:`USE_SHELL` if it is not `None`. + :param kill_after_timeout: + To specify a timeout in seconds for the git command, after which the process + should be killed. This will have no effect if as_process is set to True. It is + set to None by default and will let the process run until the timeout is + explicitly specified. This feature is not supported on Windows. It's also worth + noting that kill_after_timeout uses SIGKILL, which can have negative side + effects on a repository. For example, stale locks in case of git gc could + render the repository incapable of accepting changes until the lock is manually + removed. + + :return: + * str(output) if extended_output = False (Default) + * tuple(int(status), str(stdout), str(stderr)) if extended_output = True + + if output_stream is True, the stdout value will be your output stream: + * output_stream if extended_output = False + * tuple(int(status), output_stream, str(stderr)) if extended_output = True + + Note git is executed with LC_MESSAGES="C" to ensure consistent + output regardless of system language. + + :raise GitCommandError: + + :note: + If you add additional keyword arguments to the signature of this method, + you must update the execute_kwargs tuple housed in this module.""" + if self.GIT_PYTHON_TRACE and (self.GIT_PYTHON_TRACE != 'full' or as_process): + log.info(' '.join(command)) + + # Allow the user to have the command executed in their working dir. + cwd = self._working_dir or os.getcwd() + + # Start the process + inline_env = env + env = os.environ.copy() + # Attempt to force all output to plain ascii english, which is what some parsing code + # may expect. + # According to stackoverflow (http://goo.gl/l74GC8), we are setting LANGUAGE as well + # just to be sure. + env["LANGUAGE"] = "C" + env["LC_ALL"] = "C" + env.update(self._environment) + if inline_env is not None: + env.update(inline_env) + + if is_win: + cmd_not_found_exception = OSError + if kill_after_timeout: + raise GitCommandError(command, '"kill_after_timeout" feature is not supported on Windows.') + else: + if sys.version_info[0] > 2: + cmd_not_found_exception = FileNotFoundError # NOQA # exists, flake8 unknown @UndefinedVariable + else: + cmd_not_found_exception = OSError + # end handle + + stdout_sink = (PIPE + if with_stdout + else getattr(subprocess, 'DEVNULL', None) or open(os.devnull, 'wb')) + istream_ok = "None" + if istream: + istream_ok = "" + log.debug("Popen(%s, cwd=%s, universal_newlines=%s, shell=%s, istream=%s)", + command, cwd, universal_newlines, shell, istream_ok) + try: + proc = Popen(command, + env=env, + cwd=cwd, + bufsize=-1, + stdin=istream, + stderr=PIPE, + stdout=stdout_sink, + shell=shell is not None and shell or self.USE_SHELL, + close_fds=is_posix, # unsupported on windows + universal_newlines=universal_newlines, + creationflags=PROC_CREATIONFLAGS, + **subprocess_kwargs + ) + except cmd_not_found_exception as err: + raise GitCommandNotFound(command, err) + + if as_process: + return self.AutoInterrupt(proc, command) + + def _kill_process(pid): + """ Callback method to kill a process. """ + p = Popen(['ps', '--ppid', str(pid)], stdout=PIPE, + creationflags=PROC_CREATIONFLAGS) + child_pids = [] + for line in p.stdout: + if len(line.split()) > 0: + local_pid = (line.split())[0] + if local_pid.isdigit(): + child_pids.append(int(local_pid)) + try: + # Windows does not have SIGKILL, so use SIGTERM instead + sig = getattr(signal, 'SIGKILL', signal.SIGTERM) + os.kill(pid, sig) + for child_pid in child_pids: + try: + os.kill(child_pid, sig) + except OSError: + pass + kill_check.set() # tell the main routine that the process was killed + except OSError: + # It is possible that the process gets completed in the duration after timeout + # happens and before we try to kill the process. + pass + return + # end + + if kill_after_timeout: + kill_check = threading.Event() + watchdog = threading.Timer(kill_after_timeout, _kill_process, args=(proc.pid,)) + + # Wait for the process to return + status = 0 + stdout_value = b'' + stderr_value = b'' + try: + if output_stream is None: + if kill_after_timeout: + watchdog.start() + stdout_value, stderr_value = proc.communicate() + if kill_after_timeout: + watchdog.cancel() + if kill_check.isSet(): + stderr_value = ('Timeout: the command "%s" did not complete in %d ' + 'secs.' % (" ".join(command), kill_after_timeout)).encode(defenc) + # strip trailing "\n" + if stdout_value.endswith(b"\n"): + stdout_value = stdout_value[:-1] + if stderr_value.endswith(b"\n"): + stderr_value = stderr_value[:-1] + status = proc.returncode + else: + max_chunk_size = max_chunk_size if max_chunk_size and max_chunk_size > 0 else io.DEFAULT_BUFFER_SIZE + stream_copy(proc.stdout, output_stream, max_chunk_size) + stdout_value = proc.stdout.read() + stderr_value = proc.stderr.read() + # strip trailing "\n" + if stderr_value.endswith(b"\n"): + stderr_value = stderr_value[:-1] + status = proc.wait() + # END stdout handling + finally: + proc.stdout.close() + proc.stderr.close() + + if self.GIT_PYTHON_TRACE == 'full': + cmdstr = " ".join(command) + + def as_text(stdout_value): + return not output_stream and safe_decode(stdout_value) or '' + # end + + if stderr_value: + log.info("%s -> %d; stdout: '%s'; stderr: '%s'", + cmdstr, status, as_text(stdout_value), safe_decode(stderr_value)) + elif stdout_value: + log.info("%s -> %d; stdout: '%s'", cmdstr, status, as_text(stdout_value)) + else: + log.info("%s -> %d", cmdstr, status) + # END handle debug printing + + if with_exceptions and status != 0: + raise GitCommandError(command, status, stderr_value, stdout_value) + + if isinstance(stdout_value, bytes) and stdout_as_string: # could also be output_stream + stdout_value = safe_decode(stdout_value) + + # Allow access to the command's status code + if with_extended_output: + return (status, stdout_value, safe_decode(stderr_value)) + else: + return stdout_value + + def environment(self): + return self._environment + + def update_environment(self, **kwargs): + """ + Set environment variables for future git invocations. Return all changed + values in a format that can be passed back into this function to revert + the changes: + + ``Examples``:: + + old_env = self.update_environment(PWD='/tmp') + self.update_environment(**old_env) + + :param kwargs: environment variables to use for git processes + :return: dict that maps environment variables to their old values + """ + old_env = {} + for key, value in kwargs.items(): + # set value if it is None + if value is not None: + old_env[key] = self._environment.get(key) + self._environment[key] = value + # remove key from environment if its value is None + elif key in self._environment: + old_env[key] = self._environment[key] + del self._environment[key] + return old_env + + @contextmanager + def custom_environment(self, **kwargs): + """ + A context manager around the above ``update_environment`` method to restore the + environment back to its previous state after operation. + + ``Examples``:: + + with self.custom_environment(GIT_SSH='/bin/ssh_wrapper'): + repo.remotes.origin.fetch() + + :param kwargs: see update_environment + """ + old_env = self.update_environment(**kwargs) + try: + yield + finally: + self.update_environment(**old_env) + + def transform_kwarg(self, name, value, split_single_char_options): + if len(name) == 1: + if value is True: + return ["-%s" % name] + elif value not in (False, None): + if split_single_char_options: + return ["-%s" % name, "%s" % value] + else: + return ["-%s%s" % (name, value)] + else: + if value is True: + return ["--%s" % dashify(name)] + elif value is not False and value is not None: + return ["--%s=%s" % (dashify(name), value)] + return [] + + def transform_kwargs(self, split_single_char_options=True, **kwargs): + """Transforms Python style kwargs into git command line options.""" + args = [] + kwargs = OrderedDict(sorted(kwargs.items(), key=lambda x: x[0])) + for k, v in kwargs.items(): + if isinstance(v, (list, tuple)): + for value in v: + args += self.transform_kwarg(k, value, split_single_char_options) + else: + args += self.transform_kwarg(k, v, split_single_char_options) + return args + + @classmethod + def __unpack_args(cls, arg_list): + if not isinstance(arg_list, (list, tuple)): + # This is just required for unicode conversion, as subprocess can't handle it + # However, in any other case, passing strings (usually utf-8 encoded) is totally fine + if not PY3 and isinstance(arg_list, unicode): + return [arg_list.encode(defenc)] + return [str(arg_list)] + + outlist = [] + for arg in arg_list: + if isinstance(arg_list, (list, tuple)): + outlist.extend(cls.__unpack_args(arg)) + elif not PY3 and isinstance(arg_list, unicode): + outlist.append(arg_list.encode(defenc)) + # END recursion + else: + outlist.append(str(arg)) + # END for each arg + return outlist + + def __call__(self, **kwargs): + """Specify command line options to the git executable + for a subcommand call + + :param kwargs: + is a dict of keyword arguments. + these arguments are passed as in _call_process + but will be passed to the git command rather than + the subcommand. + + ``Examples``:: + git(work_tree='/tmp').difftool()""" + self._git_options = self.transform_kwargs( + split_single_char_options=True, **kwargs) + return self + + def _call_process(self, method, *args, **kwargs): + """Run the given git command with the specified arguments and return + the result as a String + + :param method: + is the command. Contained "_" characters will be converted to dashes, + such as in 'ls_files' to call 'ls-files'. + + :param args: + is the list of arguments. If None is included, it will be pruned. + This allows your commands to call git more conveniently as None + is realized as non-existent + + :param kwargs: + It contains key-values for the following: + - the :meth:`execute()` kwds, as listed in :var:`execute_kwargs`; + - "command options" to be converted by :meth:`transform_kwargs()`; + - the `'insert_kwargs_after'` key which its value must match one of ``*args``, + and any cmd-options will be appended after the matched arg. + + Examples:: + + git.rev_list('master', max_count=10, header=True) + + turns into:: + + git rev-list max-count 10 --header master + + :return: Same as ``execute``""" + # Handle optional arguments prior to calling transform_kwargs + # otherwise these'll end up in args, which is bad. + exec_kwargs = {k: v for k, v in kwargs.items() if k in execute_kwargs} + opts_kwargs = {k: v for k, v in kwargs.items() if k not in execute_kwargs} + + insert_after_this_arg = opts_kwargs.pop('insert_kwargs_after', None) + + # Prepare the argument list + opt_args = self.transform_kwargs(**opts_kwargs) + ext_args = self.__unpack_args([a for a in args if a is not None]) + + if insert_after_this_arg is None: + args = opt_args + ext_args + else: + try: + index = ext_args.index(insert_after_this_arg) + except ValueError: + raise ValueError("Couldn't find argument '%s' in args %s to insert cmd options after" + % (insert_after_this_arg, str(ext_args))) + # end handle error + args = ext_args[:index + 1] + opt_args + ext_args[index + 1:] + # end handle opts_kwargs + + call = [self.GIT_PYTHON_GIT_EXECUTABLE] + + # add persistent git options + call.extend(self._persistent_git_options) + + # add the git options, then reset to empty + # to avoid side_effects + call.extend(self._git_options) + self._git_options = () + + call.append(dashify(method)) + call.extend(args) + + return self.execute(call, **exec_kwargs) + + def _parse_object_header(self, header_line): + """ + :param header_line: + type_string size_as_int + + :return: (hex_sha, type_string, size_as_int) + + :raise ValueError: if the header contains indication for an error due to + incorrect input sha""" + tokens = header_line.split() + if len(tokens) != 3: + if not tokens: + raise ValueError("SHA could not be resolved, git returned: %r" % (header_line.strip())) + else: + raise ValueError("SHA %s could not be resolved, git returned: %r" % (tokens[0], header_line.strip())) + # END handle actual return value + # END error handling + + if len(tokens[0]) != 40: + raise ValueError("Failed to parse header: %r" % header_line) + return (tokens[0], tokens[1], int(tokens[2])) + + def _prepare_ref(self, ref): + # required for command to separate refs on stdin, as bytes + refstr = ref + if isinstance(ref, bytes): + # Assume 40 bytes hexsha - bin-to-ascii for some reason returns bytes, not text + refstr = ref.decode('ascii') + elif not isinstance(ref, string_types): + refstr = str(ref) # could be ref-object + + if not refstr.endswith("\n"): + refstr += "\n" + return refstr.encode(defenc) + + def _get_persistent_cmd(self, attr_name, cmd_name, *args, **kwargs): + cur_val = getattr(self, attr_name) + if cur_val is not None: + return cur_val + + options = {"istream": PIPE, "as_process": True} + options.update(kwargs) + + cmd = self._call_process(cmd_name, *args, **options) + setattr(self, attr_name, cmd) + return cmd + + def __get_object_header(self, cmd, ref): + cmd.stdin.write(self._prepare_ref(ref)) + cmd.stdin.flush() + return self._parse_object_header(cmd.stdout.readline()) + + def get_object_header(self, ref): + """ Use this method to quickly examine the type and size of the object behind + the given ref. + + :note: The method will only suffer from the costs of command invocation + once and reuses the command in subsequent calls. + + :return: (hexsha, type_string, size_as_int)""" + cmd = self._get_persistent_cmd("cat_file_header", "cat_file", batch_check=True) + return self.__get_object_header(cmd, ref) + + def get_object_data(self, ref): + """ As get_object_header, but returns object data as well + :return: (hexsha, type_string, size_as_int,data_string) + :note: not threadsafe""" + hexsha, typename, size, stream = self.stream_object_data(ref) + data = stream.read(size) + del(stream) + return (hexsha, typename, size, data) + + def stream_object_data(self, ref): + """ As get_object_header, but returns the data as a stream + + :return: (hexsha, type_string, size_as_int, stream) + :note: This method is not threadsafe, you need one independent Command instance per thread to be safe !""" + cmd = self._get_persistent_cmd("cat_file_all", "cat_file", batch=True) + hexsha, typename, size = self.__get_object_header(cmd, ref) + return (hexsha, typename, size, self.CatFileContentStream(size, cmd.stdout)) + + def clear_cache(self): + """Clear all kinds of internal caches to release resources. + + Currently persistent commands will be interrupted. + + :return: self""" + for cmd in (self.cat_file_all, self.cat_file_header): + if cmd: + cmd.__del__() + + self.cat_file_all = None + self.cat_file_header = None + return self diff --git a/venv/lib/python3.7/site-packages/git/compat.py b/venv/lib/python3.7/site-packages/git/compat.py new file mode 100644 index 0000000..e88ca9b --- /dev/null +++ b/venv/lib/python3.7/site-packages/git/compat.py @@ -0,0 +1,313 @@ +# -*- coding: utf-8 -*- +# config.py +# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors +# +# This module is part of GitPython and is released under +# the BSD License: http://www.opensource.org/licenses/bsd-license.php +"""utilities to help provide compatibility with python 3""" +# flake8: noqa + +import locale +import os +import sys +import codecs + + +from gitdb.utils.compat import ( + xrange, + MAXSIZE, # @UnusedImport + izip, # @UnusedImport +) +from gitdb.utils.encoding import ( + string_types, # @UnusedImport + text_type, # @UnusedImport + force_bytes, # @UnusedImport + force_text # @UnusedImport +) + + +PY3 = sys.version_info[0] >= 3 +is_win = (os.name == 'nt') +is_posix = (os.name == 'posix') +is_darwin = (os.name == 'darwin') +if hasattr(sys, 'getfilesystemencoding'): + defenc = sys.getfilesystemencoding() +if defenc is None: + defenc = sys.getdefaultencoding() + +if PY3: + import io + FileType = io.IOBase + + def byte_ord(b): + return b + + def bchr(n): + return bytes([n]) + + def mviter(d): + return d.values() + + range = xrange # @ReservedAssignment + unicode = str + binary_type = bytes +else: + FileType = file # @UndefinedVariable on PY3 + # usually, this is just ascii, which might not enough for our encoding needs + # Unless it's set specifically, we override it to be utf-8 + if defenc == 'ascii': + defenc = 'utf-8' + byte_ord = ord + bchr = chr + unicode = unicode + binary_type = str + range = xrange # @ReservedAssignment + + def mviter(d): + return d.itervalues() + + +def safe_decode(s): + """Safely decodes a binary string to unicode""" + if isinstance(s, unicode): + return s + elif isinstance(s, bytes): + return s.decode(defenc, 'surrogateescape') + elif s is not None: + raise TypeError('Expected bytes or text, but got %r' % (s,)) + + +def safe_encode(s): + """Safely decodes a binary string to unicode""" + if isinstance(s, unicode): + return s.encode(defenc) + elif isinstance(s, bytes): + return s + elif s is not None: + raise TypeError('Expected bytes or text, but got %r' % (s,)) + + +def win_encode(s): + """Encode unicodes for process arguments on Windows.""" + if isinstance(s, unicode): + return s.encode(locale.getpreferredencoding(False)) + elif isinstance(s, bytes): + return s + elif s is not None: + raise TypeError('Expected bytes or text, but got %r' % (s,)) + + +def with_metaclass(meta, *bases): + """copied from https://github.com/Byron/bcore/blob/master/src/python/butility/future.py#L15""" + class metaclass(meta): + __call__ = type.__call__ + __init__ = type.__init__ + + def __new__(cls, name, nbases, d): + if nbases is None: + return type.__new__(cls, name, (), d) + # There may be clients who rely on this attribute to be set to a reasonable value, which is why + # we set the __metaclass__ attribute explicitly + if not PY3 and '___metaclass__' not in d: + d['__metaclass__'] = meta + return meta(name, bases, d) + return metaclass(meta.__name__ + 'Helper', None, {}) + + +## From https://docs.python.org/3.3/howto/pyporting.html +class UnicodeMixin(object): + + """Mixin class to handle defining the proper __str__/__unicode__ + methods in Python 2 or 3.""" + + if PY3: + def __str__(self): + return self.__unicode__() + else: # Python 2 + def __str__(self): + return self.__unicode__().encode(defenc) + + +""" +This is Victor Stinner's pure-Python implementation of PEP 383: the "surrogateescape" error +handler of Python 3. +Source: misc/python/surrogateescape.py in https://bitbucket.org/haypo/misc +""" + +# This code is released under the Python license and the BSD 2-clause license + + +FS_ERRORS = 'surrogateescape' + +# # -- Python 2/3 compatibility ------------------------------------- +# FS_ERRORS = 'my_surrogateescape' + +def u(text): + if PY3: + return text + return text.decode('unicode_escape') + +def b(data): + if PY3: + return data.encode('latin1') + return data + +if PY3: + _unichr = chr + bytes_chr = lambda code: bytes((code,)) +else: + _unichr = unichr + bytes_chr = chr + +def surrogateescape_handler(exc): + """ + Pure Python implementation of the PEP 383: the "surrogateescape" error + handler of Python 3. Undecodable bytes will be replaced by a Unicode + character U+DCxx on decoding, and these are translated into the + original bytes on encoding. + """ + mystring = exc.object[exc.start:exc.end] + + try: + if isinstance(exc, UnicodeDecodeError): + # mystring is a byte-string in this case + decoded = replace_surrogate_decode(mystring) + elif isinstance(exc, UnicodeEncodeError): + # In the case of u'\udcc3'.encode('ascii', + # 'this_surrogateescape_handler'), both Python 2.x and 3.x raise an + # exception anyway after this function is called, even though I think + # it's doing what it should. It seems that the strict encoder is called + # to encode the unicode string that this function returns ... + decoded = replace_surrogate_encode(mystring, exc) + else: + raise exc + except NotASurrogateError: + raise exc + return (decoded, exc.end) + + +class NotASurrogateError(Exception): + pass + + +def replace_surrogate_encode(mystring, exc): + """ + Returns a (unicode) string, not the more logical bytes, because the codecs + register_error functionality expects this. + """ + decoded = [] + for ch in mystring: + # if PY3: + # code = ch + # else: + code = ord(ch) + + # The following magic comes from Py3.3's Python/codecs.c file: + if not 0xD800 <= code <= 0xDCFF: + # Not a surrogate. Fail with the original exception. + raise exc + # mybytes = [0xe0 | (code >> 12), + # 0x80 | ((code >> 6) & 0x3f), + # 0x80 | (code & 0x3f)] + # Is this a good idea? + if 0xDC00 <= code <= 0xDC7F: + decoded.append(_unichr(code - 0xDC00)) + elif code <= 0xDCFF: + decoded.append(_unichr(code - 0xDC00)) + else: + raise NotASurrogateError + return str().join(decoded) + + +def replace_surrogate_decode(mybytes): + """ + Returns a (unicode) string + """ + decoded = [] + for ch in mybytes: + # We may be parsing newbytes (in which case ch is an int) or a native + # str on Py2 + if isinstance(ch, int): + code = ch + else: + code = ord(ch) + if 0x80 <= code <= 0xFF: + decoded.append(_unichr(0xDC00 + code)) + elif code <= 0x7F: + decoded.append(_unichr(code)) + else: + # # It may be a bad byte + # # Try swallowing it. + # continue + # print("RAISE!") + raise NotASurrogateError + return str().join(decoded) + + +def encodefilename(fn): + if FS_ENCODING == 'ascii': + # ASCII encoder of Python 2 expects that the error handler returns a + # Unicode string encodable to ASCII, whereas our surrogateescape error + # handler has to return bytes in 0x80-0xFF range. + encoded = [] + for index, ch in enumerate(fn): + code = ord(ch) + if code < 128: + ch = bytes_chr(code) + elif 0xDC80 <= code <= 0xDCFF: + ch = bytes_chr(code - 0xDC00) + else: + raise UnicodeEncodeError(FS_ENCODING, + fn, index, index+1, + 'ordinal not in range(128)') + encoded.append(ch) + return bytes().join(encoded) + elif FS_ENCODING == 'utf-8': + # UTF-8 encoder of Python 2 encodes surrogates, so U+DC80-U+DCFF + # doesn't go through our error handler + encoded = [] + for index, ch in enumerate(fn): + code = ord(ch) + if 0xD800 <= code <= 0xDFFF: + if 0xDC80 <= code <= 0xDCFF: + ch = bytes_chr(code - 0xDC00) + encoded.append(ch) + else: + raise UnicodeEncodeError( + FS_ENCODING, + fn, index, index+1, 'surrogates not allowed') + else: + ch_utf8 = ch.encode('utf-8') + encoded.append(ch_utf8) + return bytes().join(encoded) + return fn.encode(FS_ENCODING, FS_ERRORS) + +def decodefilename(fn): + return fn.decode(FS_ENCODING, FS_ERRORS) + +FS_ENCODING = 'ascii'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]') +# FS_ENCODING = 'cp932'; fn = b('[abc\x81\x00]'); encoded = u('[abc\udc81\x00]') +# FS_ENCODING = 'UTF-8'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]') + + +# normalize the filesystem encoding name. +# For example, we expect "utf-8", not "UTF8". +FS_ENCODING = codecs.lookup(FS_ENCODING).name + + +def register_surrogateescape(): + """ + Registers the surrogateescape error handler on Python 2 (only) + """ + if PY3: + return + try: + codecs.lookup_error(FS_ERRORS) + except LookupError: + codecs.register_error(FS_ERRORS, surrogateescape_handler) + + +try: + b"100644 \x9f\0aaa".decode(defenc, "surrogateescape") +except Exception: + register_surrogateescape() diff --git a/venv/lib/python3.7/site-packages/git/config.py b/venv/lib/python3.7/site-packages/git/config.py new file mode 100644 index 0000000..762069c --- /dev/null +++ b/venv/lib/python3.7/site-packages/git/config.py @@ -0,0 +1,744 @@ +# config.py +# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors +# +# This module is part of GitPython and is released under +# the BSD License: http://www.opensource.org/licenses/bsd-license.php +"""Module containing module parser implementation able to properly read and write +configuration files""" + +import abc +from functools import wraps +import inspect +import logging +import os +import re +from collections import OrderedDict + +from git.compat import ( + string_types, + FileType, + defenc, + force_text, + with_metaclass, + PY3, + is_win, +) +from git.util import LockFile + +import os.path as osp + + +try: + import ConfigParser as cp +except ImportError: + # PY3 + import configparser as cp + + +__all__ = ('GitConfigParser', 'SectionConstraint') + + +log = logging.getLogger('git.config') +log.addHandler(logging.NullHandler()) + +# invariants +# represents the configuration level of a configuration file +CONFIG_LEVELS = ("system", "user", "global", "repository") + + +class MetaParserBuilder(abc.ABCMeta): + + """Utlity class wrapping base-class methods into decorators that assure read-only properties""" + def __new__(cls, name, bases, clsdict): + """ + Equip all base-class methods with a needs_values decorator, and all non-const methods + with a set_dirty_and_flush_changes decorator in addition to that.""" + kmm = '_mutating_methods_' + if kmm in clsdict: + mutating_methods = clsdict[kmm] + for base in bases: + methods = (t for t in inspect.getmembers(base, inspect.isroutine) if not t[0].startswith("_")) + for name, method in methods: + if name in clsdict: + continue + method_with_values = needs_values(method) + if name in mutating_methods: + method_with_values = set_dirty_and_flush_changes(method_with_values) + # END mutating methods handling + + clsdict[name] = method_with_values + # END for each name/method pair + # END for each base + # END if mutating methods configuration is set + + new_type = super(MetaParserBuilder, cls).__new__(cls, name, bases, clsdict) + return new_type + + +def needs_values(func): + """Returns method assuring we read values (on demand) before we try to access them""" + + @wraps(func) + def assure_data_present(self, *args, **kwargs): + self.read() + return func(self, *args, **kwargs) + # END wrapper method + return assure_data_present + + +def set_dirty_and_flush_changes(non_const_func): + """Return method that checks whether given non constant function may be called. + If so, the instance will be set dirty. + Additionally, we flush the changes right to disk""" + + def flush_changes(self, *args, **kwargs): + rval = non_const_func(self, *args, **kwargs) + self._dirty = True + self.write() + return rval + # END wrapper method + flush_changes.__name__ = non_const_func.__name__ + return flush_changes + + +class SectionConstraint(object): + + """Constrains a ConfigParser to only option commands which are constrained to + always use the section we have been initialized with. + + It supports all ConfigParser methods that operate on an option. + + :note: + If used as a context manager, will release the wrapped ConfigParser.""" + __slots__ = ("_config", "_section_name") + _valid_attrs_ = ("get_value", "set_value", "get", "set", "getint", "getfloat", "getboolean", "has_option", + "remove_section", "remove_option", "options") + + def __init__(self, config, section): + self._config = config + self._section_name = section + + def __del__(self): + # Yes, for some reason, we have to call it explicitly for it to work in PY3 ! + # Apparently __del__ doesn't get call anymore if refcount becomes 0 + # Ridiculous ... . + self._config.release() + + def __getattr__(self, attr): + if attr in self._valid_attrs_: + return lambda *args, **kwargs: self._call_config(attr, *args, **kwargs) + return super(SectionConstraint, self).__getattribute__(attr) + + def _call_config(self, method, *args, **kwargs): + """Call the configuration at the given method which must take a section name + as first argument""" + return getattr(self._config, method)(self._section_name, *args, **kwargs) + + @property + def config(self): + """return: Configparser instance we constrain""" + return self._config + + def release(self): + """Equivalent to GitConfigParser.release(), which is called on our underlying parser instance""" + return self._config.release() + + def __enter__(self): + self._config.__enter__() + return self + + def __exit__(self, exception_type, exception_value, traceback): + self._config.__exit__(exception_type, exception_value, traceback) + + +class _OMD(OrderedDict): + """Ordered multi-dict.""" + + def __setitem__(self, key, value): + super(_OMD, self).__setitem__(key, [value]) + + def add(self, key, value): + if key not in self: + super(_OMD, self).__setitem__(key, [value]) + return + + super(_OMD, self).__getitem__(key).append(value) + + def setall(self, key, values): + super(_OMD, self).__setitem__(key, values) + + def __getitem__(self, key): + return super(_OMD, self).__getitem__(key)[-1] + + def getlast(self, key): + return super(_OMD, self).__getitem__(key)[-1] + + def setlast(self, key, value): + if key not in self: + super(_OMD, self).__setitem__(key, [value]) + return + + prior = super(_OMD, self).__getitem__(key) + prior[-1] = value + + def get(self, key, default=None): + return super(_OMD, self).get(key, [default])[-1] + + def getall(self, key): + return super(_OMD, self).__getitem__(key) + + def items(self): + """List of (key, last value for key).""" + return [(k, self[k]) for k in self] + + def items_all(self): + """List of (key, list of values for key).""" + return [(k, self.getall(k)) for k in self] + + +def get_config_path(config_level): + + # we do not support an absolute path of the gitconfig on windows , + # use the global config instead + if is_win and config_level == "system": + config_level = "global" + + if config_level == "system": + return "/etc/gitconfig" + elif config_level == "user": + config_home = os.environ.get("XDG_CONFIG_HOME") or osp.join(os.environ.get("HOME", '~'), ".config") + return osp.normpath(osp.expanduser(osp.join(config_home, "git", "config"))) + elif config_level == "global": + return osp.normpath(osp.expanduser("~/.gitconfig")) + elif config_level == "repository": + raise ValueError("No repo to get repository configuration from. Use Repo._get_config_path") + + raise ValueError("Invalid configuration level: %r" % config_level) + + +class GitConfigParser(with_metaclass(MetaParserBuilder, cp.RawConfigParser, object)): + + """Implements specifics required to read git style configuration files. + + This variation behaves much like the git.config command such that the configuration + will be read on demand based on the filepath given during initialization. + + The changes will automatically be written once the instance goes out of scope, but + can be triggered manually as well. + + The configuration file will be locked if you intend to change values preventing other + instances to write concurrently. + + :note: + The config is case-sensitive even when queried, hence section and option names + must match perfectly. + If used as a context manager, will release the locked file.""" + + #{ Configuration + # The lock type determines the type of lock to use in new configuration readers. + # They must be compatible to the LockFile interface. + # A suitable alternative would be the BlockingLockFile + t_lock = LockFile + re_comment = re.compile(r'^\s*[#;]') + + #} END configuration + + optvalueonly_source = r'\s*(?P