diff --git a/bot.py b/bot.py index bc5e67a..0f9486d 100755 --- a/bot.py +++ b/bot.py @@ -1,13 +1,5 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -__author__ = "Maël / Outout | Romain" -__licence__ = "WTFPL Licence 2.0" - - -import copy import datetime -import os +import logging import sys import traceback @@ -15,122 +7,95 @@ import aiohttp import discord from discord.ext import commands -import cogs.utils.cli_colors as colors import config -from cogs.utils import checks +from cogs.utils.lang import _ -if sys.version_info[1] < 7 or sys.version_info[0] < 3: - print(f"{colors.text_colors.RED}[ERROR] Python 3.7 or + is required.{colors.ENDC}") - exit() +description = """ +Je suis TuxBot, le bot qui vit de l'OpenSource ! ;) +""" + +log = logging.getLogger(__name__) l_extensions = ( 'cogs.admin', - 'cogs.afk', - 'cogs.atc', - 'cogs.basics', - 'cogs.ci', - 'cogs.filter_messages', - 'cogs.funs', - 'cogs.role', - 'cogs.search', - 'cogs.send_logs', - 'cogs.sondage', - 'cogs.utility', - 'cogs.vocal', - 'cogs.private', + 'cogs.basaics', ) -help_attrs = dict(hidden=True, in_help=True, name="DONOTUSE") + +async def _prefix_callable(bot, message): + base = [] if config.prefix is None else config.prefix + + # if message.guild is not None: + # base.extend(bot.prefixes.get(message.guild.id)) + return commands.when_mentioned_or(base) -class TuxBot(commands.Bot): - def __init__(self): +class TuxBot(commands.AutoShardedBot): + __slots__ = ('uptime', 'config', 'session') + + def __init__(self, unload): + super().__init__(command_prefix=_prefix_callable, + description=description, pm_help=None, + help_command=None, help_attrs=dict(hidden=True)) + self.uptime = datetime.datetime.utcnow() self.config = config - super().__init__(command_prefix=self.config.prefix[0], - description=self.config.description, - pm_help=None, - help_command=None) - - self.client_id = self.config.client_id + self.prefixes = {} self.session = aiohttp.ClientSession(loop=self.loop) - self._events = [] - - self.add_command(self.do) for extension in l_extensions: - try: - self.load_extension(extension) - print(f"{colors.text_colors.GREEN}\"{extension}\"" - f" chargé !{colors.ENDC}") - except Exception as e: - print(f"{colors.text_colors.RED}" - f"Impossible de charger l'extension {extension}\n" - f"{type(e).__name__}: {e}{colors.ENDC}", file=sys.stderr) + if extension not in unload: + try: + self.load_extension(extension) + except Exception as e: + print(_("Failed to load extension : ") + extension, + file=sys.stderr) + traceback.print_exc() async def on_command_error(self, ctx, error): if isinstance(error, commands.NoPrivateMessage): - await ctx.author.send('Cette commande ne peut pas être utilisee ' - 'en message privee.') + await ctx.author.send( + _('This command cannot be used in private messages.') + ) elif isinstance(error, commands.DisabledCommand): - await ctx.author.send('Desoler mais cette commande est desactive, ' - 'elle ne peut donc pas être utilisée.') + await ctx.author.send( + _('Sorry. This command is disabled and cannot be used.') + ) elif isinstance(error, commands.CommandInvokeError): - print(f'In {ctx.command.qualified_name}:', file=sys.stderr) + print(_('In ') + f'{ctx.command.qualified_name}:', file=sys.stderr) traceback.print_tb(error.original.__traceback__) print(f'{error.original.__class__.__name__}: {error.original}', file=sys.stderr) + elif isinstance(error, commands.ArgumentParsingError): + await ctx.send(error) async def on_ready(self): - log_channel_id = await self.fetch_channel(self.config.log_channel_id) + if not hasattr(self, 'uptime'): + self.uptime = datetime.datetime.utcnow() - print('\n\n---------------------') - print('CONNECTÉ :') - print(f'Nom d\'utilisateur: {self.user} {colors.text_style.DIM}' - f'(ID: {self.user.id}){colors.ENDC}') - print(f'Channel de log: {log_channel_id} {colors.text_style.DIM}' - f'(ID: {log_channel_id.id}){colors.ENDC}') - print(f'Prefix: {self.config.prefix[0]}') - print('Merci d\'utiliser TuxBot') - print('---------------------\n\n') + print(_('Ready:') + f' {self.user} (ID: {self.user.id})') await self.change_presence(status=discord.Status.dnd, activity=discord.Game( - name=self.config.game) - ) + name=self.config.activity + )) @staticmethod async def on_resumed(): print('resumed...') - async def on_message(self, message): - if message.author.bot: - return + @property + def logs_webhook(self): + logs_webhook = self.config.logs_webhook + webhook = discord.Webhook.partial(id=logs_webhook.get('id'), + token=logs_webhook.get('token'), + adapter=discord.AsyncWebhookAdapter( + self.session)) + return webhook - try: - await self.process_commands(message) - except Exception as e: - print(f'{colors.text_colors.RED}Erreur rencontré : \n' - f' {type(e).__name__}: {e}{colors.ENDC} \n \n') + async def close(self): + await super().close() + await self.session.close() def run(self): - super().run(self.config.token, reconnect=True) - - @checks.has_permissions(administrator=True) - @commands.command(pass_context=True, hidden=True) - async def do(self, ctx, times: int, *, command): - """Repeats a command a specified number of times.""" - msg = copy.copy(ctx.message) - msg.content = command - for i in range(times): - await self.process_commands(msg) - - -if __name__ == '__main__': - if os.path.exists('config.py') is not True: - print(f"{colors.text_colors.RED}" - f"Veuillez créer le fichier config.py{colors.ENDC}") - exit() - - tuxbot = TuxBot() - tuxbot.run() + super().run(config.token, reconnect=True) diff --git a/cogs/utils/checks.py b/cogs/utils/checks.py old mode 100755 new mode 100644 index b965267..4453006 --- a/cogs/utils/checks.py +++ b/cogs/utils/checks.py @@ -121,4 +121,4 @@ def check_date(date: str): if len(date) == 1: return f"0{date}" else: - return date + return date \ No newline at end of file diff --git a/cogs/utils/db.py b/cogs/utils/db.py index 502d11a..90c7604 100755 --- a/cogs/utils/db.py +++ b/cogs/utils/db.py @@ -1,29 +1,1085 @@ -import pymysql +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2017 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +# These are just things that allow me to make tables for PostgreSQL easier +# This isn't exactly good. It's just good enough for my uses. +# Also shoddy migration support. + +import asyncio +import datetime +import decimal +import inspect +import json +import logging +import pydoc +import uuid +from collections import OrderedDict +from pathlib import Path + +import asyncpg + +log = logging.getLogger(__name__) -def connect_to_db(self): - mysqlHost = self.bot.config.mysql["host"] - mysqlUser = self.bot.config.mysql["username"] - mysqlPass = self.bot.config.mysql["password"] - mysqlDB = self.bot.config.mysql["dbname"] - - try: - return pymysql.connect(host=mysqlHost, user=mysqlUser, - passwd=mysqlPass, db=mysqlDB, charset='utf8') - except KeyError: - print( - "Rest in peperoni, Impossible de se connecter a la base de données.") - print(str(KeyError)) - return +class SchemaError(Exception): + pass -def reconnect_to_db(self): - if not self.conn: - mysqlHost = self.bot.config.mysql["host"] - mysqlUser = self.bot.config.mysql["username"] - mysqlPass = self.bot.config.mysql["password"] - mysqlDB = self.bot.config.mysql["dbname"] +class SQLType: + python = None - return pymysql.connect(host=mysqlHost, user=mysqlUser, - passwd=mysqlPass, db=mysqlDB, charset='utf8') - return self.conn + def to_dict(self): + o = self.__dict__.copy() + cls = self.__class__ + o['__meta__'] = cls.__module__ + '.' + cls.__qualname__ + return o + + @classmethod + def from_dict(cls, data): + meta = data.pop('__meta__') + given = cls.__module__ + '.' + cls.__qualname__ + if given != meta: + cls = pydoc.locate(meta) + if cls is None: + raise RuntimeError('Could not locate "%s".' % meta) + + self = cls.__new__(cls) + self.__dict__.update(data) + return self + + def __eq__(self, other): + return isinstance(other, + self.__class__) and self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not self.__eq__(other) + + def to_sql(self): + raise NotImplementedError() + + def is_real_type(self): + return True + + +class Binary(SQLType): + python = bytes + + def to_sql(self): + return 'BYTEA' + + +class Boolean(SQLType): + python = bool + + def to_sql(self): + return 'BOOLEAN' + + +class Date(SQLType): + python = datetime.date + + def to_sql(self): + return 'DATE' + + +class Datetime(SQLType): + python = datetime.datetime + + def __init__(self, *, timezone=False): + self.timezone = timezone + + def to_sql(self): + if self.timezone: + return 'TIMESTAMP WITH TIME ZONE' + return 'TIMESTAMP' + + +class Double(SQLType): + python = float + + def to_sql(self): + return 'REAL' + + +class Float(SQLType): + python = float + + def to_sql(self): + return 'FLOAT' + + +class Integer(SQLType): + python = int + + def __init__(self, *, big=False, small=False, auto_increment=False): + self.big = big + self.small = small + self.auto_increment = auto_increment + + if big and small: + raise SchemaError( + 'Integer column type cannot be both big and small.') + + def to_sql(self): + if self.auto_increment: + if self.big: + return 'BIGSERIAL' + if self.small: + return 'SMALLSERIAL' + return 'SERIAL' + if self.big: + return 'BIGINT' + if self.small: + return 'SMALLINT' + return 'INTEGER' + + def is_real_type(self): + return not self.auto_increment + + +class Interval(SQLType): + python = datetime.timedelta + + def __init__(self, field=None): + if field: + field = field.upper() + if field not in ( + 'YEAR', 'MONTH', 'DAY', 'HOUR', 'MINUTE', 'SECOND', + 'YEAR TO MONTH', 'DAY TO HOUR', 'DAY TO MINUTE', 'DAY TO SECOND', + 'HOUR TO MINUTE', 'HOUR TO SECOND', 'MINUTE TO SECOND'): + raise SchemaError('invalid interval specified') + self.field = field + else: + self.field = None + + def to_sql(self): + if self.field: + return 'INTERVAL ' + self.field + return 'INTERVAL' + + +class Numeric(SQLType): + python = decimal.Decimal + + def __init__(self, *, precision=None, scale=None): + if precision is not None: + if precision < 0 or precision > 1000: + raise SchemaError( + 'precision must be greater than 0 and below 1000') + if scale is None: + scale = 0 + + self.precision = precision + self.scale = scale + + def to_sql(self): + if self.precision is not None: + return 'NUMERIC({0.precision}, {0.scale})'.format(self) + return 'NUMERIC' + + +class String(SQLType): + python = str + + def __init__(self, *, length=None, fixed=False): + self.length = length + self.fixed = fixed + + if fixed and length is None: + raise SchemaError('Cannot have fixed string with no length') + + def to_sql(self): + if self.length is None: + return 'TEXT' + if self.fixed: + return 'CHAR({0.length})'.format(self) + return 'VARCHAR({0.length})'.format(self) + + +class Time(SQLType): + python = datetime.time + + def __init__(self, *, timezone=False): + self.timezone = timezone + + def to_sql(self): + if self.timezone: + return 'TIME WITH TIME ZONE' + return 'TIME' + + +class JSON(SQLType): + python = None + + def to_sql(self): + return 'JSONB' + + +class ForeignKey(SQLType): + def __init__(self, table, column, *, sql_type=None, on_delete='CASCADE', + on_update='NO ACTION'): + if not table or not isinstance(table, str): + raise SchemaError('missing table to reference (must be string)') + + valid_actions = ( + 'NO ACTION', + 'RESTRICT', + 'CASCADE', + 'SET NULL', + 'SET DEFAULT', + ) + + on_delete = on_delete.upper() + on_update = on_update.upper() + + if on_delete not in valid_actions: + raise TypeError('on_delete must be one of %s.' % valid_actions) + + if on_update not in valid_actions: + raise TypeError('on_update must be one of %s.' % valid_actions) + + self.table = table + self.column = column + self.on_update = on_update + self.on_delete = on_delete + + if sql_type is None: + sql_type = Integer + + if inspect.isclass(sql_type): + sql_type = sql_type() + + if not isinstance(sql_type, SQLType): + raise TypeError('Cannot have non-SQLType derived sql_type') + + if not sql_type.is_real_type(): + raise SchemaError('sql_type must be a "real" type') + + self.sql_type = sql_type.to_sql() + + def is_real_type(self): + return False + + def to_sql(self): + fmt = '{0.sql_type} REFERENCES {0.table} ({0.column})' \ + ' ON DELETE {0.on_delete} ON UPDATE {0.on_update}' + return fmt.format(self) + + +class Array(SQLType): + python = list + + def __init__(self, sql_type): + if inspect.isclass(sql_type): + sql_type = sql_type() + + if not isinstance(sql_type, SQLType): + raise TypeError('Cannot have non-SQLType derived sql_type') + + if not sql_type.is_real_type(): + raise SchemaError('sql_type must be a "real" type') + + self.sql_type = sql_type.to_sql() + + def to_sql(self): + return '{0.sql_type} ARRAY'.format(self) + + def is_real_type(self): + # technically, it is a real type + # however, it doesn't play very well with migrations + # so we're going to pretend that it isn't + return False + + +class Column: + __slots__ = ('column_type', 'index', 'primary_key', 'nullable', + 'default', 'unique', 'name', 'index_name') + + def __init__(self, column_type, *, index=False, primary_key=False, + nullable=True, unique=False, default=None, name=None): + + if inspect.isclass(column_type): + column_type = column_type() + + if not isinstance(column_type, SQLType): + raise TypeError('Cannot have a non-SQLType derived column_type') + + self.column_type = column_type + self.index = index + self.unique = unique + self.primary_key = primary_key + self.nullable = nullable + self.default = default + self.name = name + self.index_name = None # to be filled later + + if sum(map(bool, (unique, primary_key, default is not None))) > 1: + raise SchemaError( + "'unique', 'primary_key', and 'default' are mutually exclusive.") + + @classmethod + def from_dict(cls, data): + index_name = data.pop('index_name', None) + column_type = data.pop('column_type') + column_type = SQLType.from_dict(column_type) + self = cls(column_type=column_type, **data) + self.index_name = index_name + return self + + @property + def _comparable_id(self): + return '-'.join( + '%s:%s' % (attr, getattr(self, attr)) for attr in self.__slots__) + + def _to_dict(self): + d = { + attr: getattr(self, attr) + for attr in self.__slots__ + } + d['column_type'] = self.column_type.to_dict() + return d + + def _qualifiers_dict(self): + return {attr: getattr(self, attr) for attr in ('nullable', 'default')} + + def _is_rename(self, other): + if self.name == other.name: + return False + + return self.unique == other.unique and self.primary_key == other.primary_key + + def _create_table(self): + builder = [] + builder.append(self.name) + builder.append(self.column_type.to_sql()) + + default = self.default + if default is not None: + builder.append('DEFAULT') + if isinstance(default, str) and isinstance(self.column_type, + String): + builder.append("'%s'" % default) + elif isinstance(default, bool): + builder.append(str(default).upper()) + else: + builder.append("(%s)" % default) + elif self.unique: + builder.append('UNIQUE') + if not self.nullable: + builder.append('NOT NULL') + + return ' '.join(builder) + + +class PrimaryKeyColumn(Column): + """Shortcut for a SERIAL PRIMARY KEY column.""" + + def __init__(self): + super().__init__(Integer(auto_increment=True), primary_key=True) + + +class SchemaDiff: + __slots__ = ('table', 'upgrade', 'downgrade') + + def __init__(self, table, upgrade, downgrade): + self.table = table + self.upgrade = upgrade + self.downgrade = downgrade + + def to_dict(self): + return {'upgrade': self.upgrade, 'downgrade': self.downgrade} + + def is_empty(self): + return len(self.upgrade) == 0 and len(self.downgrade) == 0 + + def to_sql(self, *, downgrade=False): + statements = [] + base = 'ALTER TABLE %s ' % self.table.__tablename__ + path = self.upgrade if not downgrade else self.downgrade + + for rename in path.get('rename_columns', []): + fmt = '{0}RENAME COLUMN {1[before]} TO {1[after]};'.format(base, + rename) + statements.append(fmt) + + sub_statements = [] + for dropped in path.get('remove_columns', []): + fmt = 'DROP COLUMN {0[name]} RESTRICT'.format(dropped) + sub_statements.append(fmt) + + for changed_types in path.get('changed_column_types', []): + fmt = 'ALTER COLUMN {0[name]} SET DATA TYPE {0[type]}'.format( + changed_types) + + using = changed_types.get('using') + if using is not None: + fmt = '%s USING %s' % (fmt, using) + + sub_statements.append(fmt) + + for constraints in path.get('changed_constraints', []): + before, after = constraints['before'], constraints['after'] + + before_default, after_default = before.get('default'), after.get( + 'default') + if before_default is None and after_default is not None: + fmt = 'ALTER COLUMN {0[name]} SET DEFAULT {1[default]}'.format( + constraints, after) + sub_statements.append(fmt) + elif before_default is not None and after_default is None: + fmt = 'ALTER COLUMN {0[name]} DROP DEFAULT'.format(constraints) + sub_statements.append(fmt) + + before_nullable, after_nullable = before.get( + 'nullable'), after.get('nullable') + if not before_nullable and after_nullable: + fmt = 'ALTER COLUMN {0[name]} DROP NOT NULL'.format( + constraints) + sub_statements.append(fmt) + elif before_nullable and not after_nullable: + fmt = 'ALTER COLUMN {0[name]} SET NOT NULL'.format(constraints) + sub_statements.append(fmt) + + for added in path.get('add_columns', []): + column = Column.from_dict(added) + sub_statements.append('ADD COLUMN ' + column._create_table()) + + if sub_statements: + statements.append(base + ', '.join(sub_statements) + ';') + + # handle the index creation bits + for dropped in path.get('drop_index', []): + statements.append( + 'DROP INDEX IF EXISTS {0[index]};'.format(dropped)) + + for added in path.get('add_index', []): + fmt = 'CREATE INDEX IF NOT EXISTS {0[index]} ON {1.__tablename__} ({0[name]});' + statements.append(fmt.format(added, self.table)) + + return '\n'.join(statements) + + +class MaybeAcquire: + def __init__(self, connection, *, pool): + self.connection = connection + self.pool = pool + self._cleanup = False + + async def __aenter__(self): + if self.connection is None: + self._cleanup = True + self._connection = c = await self.pool.acquire() + return c + return self.connection + + async def __aexit__(self, *args): + if self._cleanup: + await self.pool.release(self._connection) + + +class TableMeta(type): + @classmethod + def __prepare__(cls, name, bases, **kwargs): + return OrderedDict() + + def __new__(cls, name, parents, dct, **kwargs): + columns = [] + + try: + table_name = kwargs['table_name'] + except KeyError: + table_name = name.lower() + + dct['__tablename__'] = table_name + + for elem, value in dct.items(): + if isinstance(value, Column): + if value.name is None: + value.name = elem + + if value.index: + value.index_name = '%s_%s_idx' % (table_name, value.name) + + columns.append(value) + + dct['columns'] = columns + return super().__new__(cls, name, parents, dct) + + def __init__(self, name, parents, dct, **kwargs): + super().__init__(name, parents, dct) + + +class Table(metaclass=TableMeta): + @classmethod + async def create_pool(cls, uri, **kwargs): + """Sets up and returns the PostgreSQL connection pool that is used. + + .. note:: + + This must be called at least once before doing anything with the tables. + And must be called on the ``Table`` class. + + Parameters + ----------- + uri: str + The PostgreSQL URI to connect to. + \*\*kwargs + The arguments to forward to asyncpg.create_pool. + """ + + def _encode_jsonb(value): + return json.dumps(value) + + def _decode_jsonb(value): + return json.loads(value) + + old_init = kwargs.pop('init', None) + + async def init(con): + await con.set_type_codec('jsonb', schema='pg_catalog', + encoder=_encode_jsonb, + decoder=_decode_jsonb, format='text') + if old_init is not None: + await old_init(con) + + cls._pool = pool = await asyncpg.create_pool(uri, init=init, **kwargs) + return pool + + @classmethod + def acquire_connection(cls, connection): + return MaybeAcquire(connection, pool=cls._pool) + + @classmethod + def write_migration(cls, *, directory='migrations'): + """Writes the migration diff into the data file. + + Note + ------ + This doesn't actually commit/do the migration. + To do so, use :meth:`migrate`. + + Returns + -------- + bool + ``True`` if a migration was written, ``False`` otherwise. + + Raises + ------- + RuntimeError + Could not find the migration data necessary. + """ + + directory = Path(directory) / cls.__tablename__ + p = directory.with_suffix('.json') + + if not p.exists(): + raise RuntimeError('Could not find migration file.') + + current = directory.with_name('current-' + p.name) + + if not current.exists(): + raise RuntimeError('Could not find current data file.') + + with current.open() as fp: + current_table = cls.from_dict(json.load(fp)) + + diff = cls().diff(current_table) + + # the most common case, no difference + if diff.is_empty(): + return None + + # load the migration data + with p.open('r', encoding='utf-8') as fp: + data = json.load(fp) + migrations = data['migrations'] + + # check if we should add it + our_migrations = diff.to_dict() + if len(migrations) == 0 or migrations[-1] != our_migrations: + # we have a new migration, so add it + migrations.append(our_migrations) + temp_file = p.with_name('%s-%s.tmp' % (uuid.uuid4(), p.name)) + with temp_file.open('w', encoding='utf-8') as tmp: + json.dump(data, tmp, ensure_ascii=True, indent=4) + + temp_file.replace(p) + return True + return False + + @classmethod + async def migrate(cls, *, directory='migrations', index=-1, + downgrade=False, verbose=False, connection=None): + """Actually run the latest migration pointed by the data file. + + Parameters + ----------- + directory: str + The directory of where the migration data file resides. + index: int + The index of the migration array to use. + downgrade: bool + Whether to run an upgrade or a downgrade. + verbose: bool + Whether to output some information to stdout. + connection: Optional[asyncpg.Connection] + The connection to use, if not provided will acquire one from + the internal pool. + """ + + directory = Path(directory) / cls.__tablename__ + p = directory.with_suffix('.json') + if not p.exists(): + raise RuntimeError('Could not find migration file.') + + with p.open('r', encoding='utf-8') as fp: + data = json.load(fp) + migrations = data['migrations'] + + try: + migration = migrations[index] + except IndexError: + return False + + diff = SchemaDiff(cls, migration['upgrade'], migration['downgrade']) + if diff.is_empty(): + return False + + async with MaybeAcquire(connection, pool=cls._pool) as con: + sql = diff.to_sql(downgrade=downgrade) + if verbose: + print(sql) + await con.execute(sql) + + current = directory.with_name('current-' + p.name) + with current.open('w', encoding='utf-8') as fp: + json.dump(cls.to_dict(), fp, indent=4, ensure_ascii=True) + + @classmethod + async def create(cls, *, directory='migrations', verbose=False, + connection=None, run_migrations=True): + """Creates the database and manages migrations, if any. + + Parameters + ----------- + directory: str + The migrations directory. + verbose: bool + Whether to output some information to stdout. + connection: Optional[asyncpg.Connection] + The connection to use, if not provided will acquire one from + the internal pool. + run_migrations: bool + Whether to run migrations at all. + + Returns + -------- + Optional[bool] + ``True`` if the table was successfully created or + ``False`` if the table was successfully migrated or + ``None`` if no migration took place. + """ + directory = Path(directory) / cls.__tablename__ + p = directory.with_suffix('.json') + current = directory.with_name('current-' + p.name) + + table_data = cls.to_dict() + + if not p.exists(): + p.parent.mkdir(parents=True, exist_ok=True) + + # we're creating this table for the first time, + # it's an uncommon case so let's get it out of the way + # first, try to actually create the table + async with MaybeAcquire(connection, pool=cls._pool) as con: + sql = cls.create_table(exists_ok=True) + if verbose: + print(sql) + await con.execute(sql) + + # since that step passed, let's go ahead and make the migration + with p.open('w', encoding='utf-8') as fp: + data = {'table': table_data, 'migrations': []} + json.dump(data, fp, indent=4, ensure_ascii=True) + + with current.open('w', encoding='utf-8') as fp: + json.dump(table_data, fp, indent=4, ensure_ascii=True) + + return True + + if not run_migrations: + return None + + with current.open() as fp: + current_table = cls.from_dict(json.load(fp)) + + diff = cls().diff(current_table) + + # the most common case, no difference + if diff.is_empty(): + return None + + # execute the upgrade SQL + async with MaybeAcquire(connection, pool=cls._pool) as con: + sql = diff.to_sql() + if verbose: + print(sql) + await con.execute(sql) + + # load the migration data + with p.open('r', encoding='utf-8') as fp: + data = json.load(fp) + migrations = data['migrations'] + + # check if we should add it + our_migrations = diff.to_dict() + if len(migrations) == 0 or migrations[-1] != our_migrations: + # we have a new migration, so add it + migrations.append(our_migrations) + temp_file = p.with_name('%s-%s.tmp' % (uuid.uuid4(), p.name)) + with temp_file.open('w', encoding='utf-8') as tmp: + json.dump(data, tmp, ensure_ascii=True, indent=4) + + temp_file.replace(p) + + # update our "current" data in the filesystem + with current.open('w', encoding='utf-8') as fp: + json.dump(table_data, fp, indent=4, ensure_ascii=True) + + return False + + @classmethod + async def drop(cls, *, directory='migrations', verbose=False, + connection=None): + """Drops the database and migrations, if any. + + Parameters + ----------- + directory: str + The migrations directory. + verbose: bool + Whether to output some information to stdout. + connection: Optional[asyncpg.Connection] + The connection to use, if not provided will acquire one from + the internal pool. + """ + + directory = Path(directory) / cls.__tablename__ + p = directory.with_suffix('.json') + current = directory.with_name('current-' + p.name) + + if not p.exists() or not current.exists(): + raise RuntimeError('Could not find the appropriate data files.') + + try: + p.unlink() + except: + raise RuntimeError('Could not delete migration file') + + try: + current.unlink() + except: + raise RuntimeError('Could not delete current migration file') + + async with MaybeAcquire(connection, pool=cls._pool) as con: + sql = 'DROP TABLE {0} CASCADE;'.format(cls.__tablename__) + if verbose: + print(sql) + await con.execute(sql) + + @classmethod + def create_table(cls, *, exists_ok=True): + """Generates the CREATE TABLE stub.""" + statements = [] + builder = ['CREATE TABLE'] + + if exists_ok: + builder.append('IF NOT EXISTS') + + builder.append(cls.__tablename__) + column_creations = [] + primary_keys = [] + for col in cls.columns: + column_creations.append(col._create_table()) + if col.primary_key: + primary_keys.append(col.name) + + column_creations.append('PRIMARY KEY (%s)' % ', '.join(primary_keys)) + builder.append('(%s)' % ', '.join(column_creations)) + statements.append(' '.join(builder) + ';') + + # handle the index creations + for column in cls.columns: + if column.index: + fmt = 'CREATE INDEX IF NOT EXISTS {1.index_name} ON {0} ({1.name});'.format( + cls.__tablename__, column) + statements.append(fmt) + + return '\n'.join(statements) + + @classmethod + async def insert(cls, connection=None, **kwargs): + """Inserts an element to the table.""" + + # verify column names: + verified = {} + for column in cls.columns: + try: + value = kwargs[column.name] + except KeyError: + continue + + check = column.column_type.python + if value is None and not column.nullable: + raise TypeError( + 'Cannot pass None to non-nullable column %s.' % column.name) + elif not check or not isinstance(value, check): + fmt = 'column {0.name} expected {1.__name__}, received {2.__class__.__name__}' + raise TypeError(fmt.format(column, check, value)) + + verified[column.name] = value + + sql = 'INSERT INTO {0} ({1}) VALUES ({2});'.format(cls.__tablename__, + ', '.join(verified), + ', '.join( + '$' + str(i) for + i, _ in + enumerate( + verified, + 1))) + + async with MaybeAcquire(connection, pool=cls._pool) as con: + await con.execute(sql, *verified.values()) + + @classmethod + def to_dict(cls): + x = {} + x['name'] = cls.__tablename__ + x['__meta__'] = cls.__module__ + '.' + cls.__qualname__ + + # nb: columns is ordered due to the ordered dict usage + # this is used to help detect renames + x['columns'] = [a._to_dict() for a in cls.columns] + return x + + @classmethod + def from_dict(cls, data): + meta = data['__meta__'] + given = cls.__module__ + '.' + cls.__qualname__ + if given != meta: + cls = pydoc.locate(meta) + if cls is None: + raise RuntimeError('Could not locate "%s".' % meta) + + self = cls() + self.__tablename__ = data['name'] + self.columns = [Column.from_dict(a) for a in data['columns']] + return self + + @classmethod + def all_tables(cls): + return cls.__subclasses__() + + def diff(self, before): + """Outputs the upgrade and downgrade path in JSON. + + This isn't necessarily good, but it outputs it in a format + that allows the user to manually make edits if something is wrong. + + The following JSON schema is used: + + Note that every major key takes a list of objects as noted below. + + Note that add_column and drop_column automatically create and drop + indices as necessary. + + changed_column_types: + name: str [The column name] + type: str [The new column type] + using: Optional[str] [The USING expression to use, if applicable] + add_columns: + column: object + remove_columns: + column: object + rename_columns: + before: str [The previous column name] + after: str [The new column name] + drop_index: + name: str [The column name] + index: str [The index name] + add_index: + name: str [The column name] + index: str [The index name] + changed_constraints: + name: str [The column name] + before: + nullable: Optional[bool] + default: Optional[str] + after: + nullable: Optional[bool] + default: Optional[str] + """ + upgrade = {} + downgrade = {} + + def check_index_diff(a, b): + if a.index != b.index: + # Let's assume we have {name: thing, index: True} + # and we're going to { name: foo, index: False } + # This is a 'dropped' column when we upgrade with a rename + # care must be taken to use the old name when dropping + + # check if we're dropping the index + if not a.index: + # we could also be renaming so make sure to use the old index name + upgrade.setdefault('drop_index', []).append( + {'name': a.name, 'index': b.index_name}) + # if we want to roll back, we need to re-add the old index to the old column name + downgrade.setdefault('add_index', []).append( + {'name': b.name, 'index': b.index_name}) + else: + # we're not dropping an index, instead we're adding one + upgrade.setdefault('add_index', []).append( + {'name': a.name, 'index': a.index_name}) + downgrade.setdefault('drop_index', []).append( + {'name': a.name, 'index': a.index_name}) + + def insert_column_diff(a, b): + if a.column_type != b.column_type: + if a.name == b.name and a.column_type.is_real_type() and b.column_type.is_real_type(): + upgrade.setdefault('changed_column_types', []).append( + {'name': a.name, 'type': a.column_type.to_sql()}) + downgrade.setdefault('changed_column_types', []).append( + {'name': a.name, 'type': b.column_type.to_sql()}) + else: + a_dict, b_dict = a._to_dict(), b._to_dict() + upgrade.setdefault('add_columns', []).append(a_dict) + upgrade.setdefault('remove_columns', []).append(b_dict) + downgrade.setdefault('remove_columns', []).append(a_dict) + downgrade.setdefault('add_columns', []).append(b_dict) + check_index_diff(a, b) + return + + elif a._is_rename(b): + upgrade.setdefault('rename_columns', []).append( + {'before': b.name, 'after': a.name}) + downgrade.setdefault('rename_columns', []).append( + {'before': a.name, 'after': b.name}) + + # technically, adding UNIQUE or PRIMARY KEY is rather simple and straight forward + # however, since the inverse is a little bit more complicated (you have to remove + # the index it maintains and you can't easily know what it is), it's not exactly + # worth supporting any sort of change to the uniqueness/primary_key as it stands. + # So.. just drop/add the column and call it a day. + if a.unique != b.unique or a.primary_key != b.primary_key: + a_dict, b_dict = a._to_dict(), b._to_dict() + upgrade.setdefault('add_columns', []).append(a_dict) + upgrade.setdefault('remove_columns', []).append(b_dict) + downgrade.setdefault('remove_columns', []).append(a_dict) + downgrade.setdefault('add_columns', []).append(b_dict) + check_index_diff(a, b) + return + + check_index_diff(a, b) + + b_qual, a_qual = b._qualifiers_dict(), a._qualifiers_dict() + if a_qual != b_qual: + upgrade.setdefault('changed_constraints', []).append( + {'name': a.name, 'before': b_qual, 'after': a_qual}) + downgrade.setdefault('changed_constraints', []).append( + {'name': a.name, 'before': a_qual, 'after': b_qual}) + + if len(self.columns) == len(before.columns): + # check if we have any changes at all + for a, b in zip(self.columns, before.columns): + if a._comparable_id == b._comparable_id: + # no change + continue + insert_column_diff(a, b) + + elif len(self.columns) > len(before.columns): + # check if we have more columns + # typically when we add columns we add them at the end of + # the table, this assumption makes this particularly bit easier. + # Breaking this assumption will probably break this portion and thus + # will require manual handling, sorry. + + for a, b in zip(self.columns, before.columns): + if a._comparable_id == b._comparable_id: + # no change + continue + insert_column_diff(a, b) + + new_columns = self.columns[len(before.columns):] + add, remove = upgrade.setdefault('add_columns', + []), downgrade.setdefault( + 'remove_columns', []) + for column in new_columns: + as_dict = column._to_dict() + add.append(as_dict) + remove.append(as_dict) + if column.index: + upgrade.setdefault('add_index', []).append( + {'name': column.name, 'index': column.index_name}) + downgrade.setdefault('drop_index', []).append( + {'name': column.name, 'index': column.index_name}) + + elif len(self.columns) < len(before.columns): + # check if we have fewer columns + # this one is a little bit more complicated + + # first we sort the columns by comparable IDs. + sorted_before = sorted(before.columns, + key=lambda c: c._comparable_id) + sorted_after = sorted(self.columns, key=lambda c: c._comparable_id) + + # handle the column diffs: + for a, b in zip(sorted_after, sorted_before): + if a._comparable_id == b._comparable_id: + continue + insert_column_diff(a, b) + + # check which columns are 'left over' and remove them + removed = [c._to_dict() for c in sorted_before[len(sorted_after):]] + upgrade.setdefault('remove_columns', []).extend(removed) + downgrade.setdefault('add_columns', []).extend(removed) + + return SchemaDiff(self, upgrade, downgrade) + + +async def _table_creator(tables, *, verbose=True): + for table in tables: + try: + await table.create(verbose=verbose) + except: + log.error('Failed to create table %s.', table.__tablename__) + + +def create_tables(*tables, verbose=True, loop=None): + if loop is None: + loop = asyncio.get_event_loop() + + loop.create_task(_table_creator(tables, verbose=verbose)) diff --git a/cogs/utils/formats.py b/cogs/utils/formats.py deleted file mode 100755 index d483dac..0000000 --- a/cogs/utils/formats.py +++ /dev/null @@ -1,75 +0,0 @@ -async def entry_to_code(bot, entries): - width = max(map(lambda t: len(t[0]), entries)) - output = ['```'] - fmt = '{0:<{width}}: {1}' - for name, entry in entries: - output.append(fmt.format(name, entry, width=width)) - output.append('```') - await bot.say('\n'.join(output)) - -import datetime - -async def indented_entry_to_code(bot, entries): - width = max(map(lambda t: len(t[0]), entries)) - output = ['```'] - fmt = '\u200b{0:>{width}}: {1}' - for name, entry in entries: - output.append(fmt.format(name, entry, width=width)) - output.append('```') - await bot.say('\n'.join(output)) - -async def too_many_matches(bot, msg, matches, entry): - check = lambda m: m.content.isdigit() - await bot.say('There are too many matches... Which one did you mean? **Only say the number**.') - await bot.say('\n'.join(map(entry, enumerate(matches, 1)))) - - # only give them 3 tries. - for i in range(3): - message = await bot.wait_for_message(author=msg.author, channel=msg.channel, check=check) - index = int(message.content) - try: - return matches[index - 1] - except: - await bot.say('Please give me a valid number. {} tries remaining...'.format(2 - i)) - - raise ValueError('Too many tries. Goodbye.') - -class Plural: - def __init__(self, **attr): - iterator = attr.items() - self.name, self.value = next(iter(iterator)) - - def __str__(self): - v = self.value - if v > 1: - return '%s %ss' % (v, self.name) - return '%s %s' % (v, self.name) - -def human_timedelta(dt): - now = datetime.datetime.utcnow() - delta = now - dt - hours, remainder = divmod(int(delta.total_seconds()), 3600) - minutes, seconds = divmod(remainder, 60) - days, hours = divmod(hours, 24) - years, days = divmod(days, 365) - - if years: - if days: - return '%s and %s ago' % (Plural(year=years), Plural(day=days)) - return '%s ago' % Plural(year=years) - - if days: - if hours: - return '%s and %s ago' % (Plural(day=days), Plural(hour=hours)) - return '%s ago' % Plural(day=days) - - if hours: - if minutes: - return '%s and %s ago' % (Plural(hour=hours), Plural(minute=minutes)) - return '%s ago' % Plural(hour=hours) - - if minutes: - if seconds: - return '%s and %s ago' % (Plural(minute=minutes), Plural(second=seconds)) - return '%s ago' % Plural(minute=minutes) - return '%s ago' % Plural(second=seconds) diff --git a/cogs/utils/lang.py b/cogs/utils/lang.py new file mode 100644 index 0000000..db41736 --- /dev/null +++ b/cogs/utils/lang.py @@ -0,0 +1,8 @@ +import gettext +import config + +lang = gettext.translation('base', localedir='locales', + languages=[config.lang]) +lang.install() + +_ = lang.gettext diff --git a/cogs/utils/maps.py b/cogs/utils/maps.py deleted file mode 100755 index c062452..0000000 --- a/cogs/utils/maps.py +++ /dev/null @@ -1,147 +0,0 @@ -#!/bin/env python - -# With credit to DanielKO - -from lxml import etree -import datetime, re -import asyncio, aiohttp - -NINTENDO_LOGIN_PAGE = "https://id.nintendo.net/oauth/authorize" -SPLATNET_CALLBACK_URL = "https://splatoon.nintendo.net/users/auth/nintendo/callback" -SPLATNET_CLIENT_ID = "12af3d0a3a1f441eb900411bb50a835a" -SPLATNET_SCHEDULE_URL = "https://splatoon.nintendo.net/schedule" - -class Rotation(object): - def __init__(self): - self.start = None - self.end = None - self.turf_maps = [] - self.ranked_mode = None - self.ranked_maps = [] - - - @property - def is_over(self): - return self.end < datetime.datetime.utcnow() - - def __str__(self): - now = datetime.datetime.utcnow() - prefix = '' - if self.start > now: - minutes_delta = int((self.start - now) / datetime.timedelta(minutes=1)) - hours = int(minutes_delta / 60) - minutes = minutes_delta % 60 - prefix = '**In {0} hours and {1} minutes**:\n'.format(hours, minutes) - else: - prefix = '**Current Rotation**:\n' - - fmt = 'Turf War is {0[0]} and {0[1]}\n{1} is {2[0]} and {2[1]}' - return prefix + fmt.format(self.turf_maps, self.ranked_mode, self.ranked_maps) - -# based on https://github.com/Wiwiweb/SakuraiBot/blob/master/src/sakuraibot.py -async def get_new_splatnet_cookie(username, password): - parameters = {'client_id': SPLATNET_CLIENT_ID, - 'response_type': 'code', - 'redirect_uri': SPLATNET_CALLBACK_URL, - 'username': username, - 'password': password} - - async with aiohttp.post(NINTENDO_LOGIN_PAGE, data=parameters) as response: - cookie = response.history[-1].cookies.get('_wag_session') - if cookie is None: - print(req) - raise Exception("Couldn't retrieve cookie") - return cookie - -def parse_splatnet_time(timestr): - # time is given as "MM/DD at H:MM [p|a].m. (PDT|PST)" - # there is a case where it goes over the year, e.g. 12/31 at ... and then 1/1 at ... - # this case is kind of weird though and is currently unexpected - # it could even end up being e.g. 12/31/2015 ... and then 1/1/2016 ... - # we'll never know - - regex = r'(?P\d+)\/(?P\d+)\s*at\s*(?P\d+)\:(?P\d+)\s*(?P

a\.m\.|p\.m\.)\s*\((?P.+)\)' - m = re.match(regex, timestr.strip()) - - if m is None: - raise RuntimeError('Apparently the timestamp "{}" does not match the regex.'.format(timestr)) - - matches = m.groupdict() - tz = matches['tz'].strip().upper() - offset = None - if tz == 'PDT': - # EDT is UTC - 4, PDT is UTC - 7, so you need +7 to make it UTC - offset = +7 - elif tz == 'PST': - # EST is UTC - 5, PST is UTC - 8, so you need +8 to make it UTC - offset = +8 - else: - raise RuntimeError('Unknown timezone found: {}'.format(tz)) - - pm = matches['p'].replace('.', '') # a.m. -> am - - current_time = datetime.datetime.utcnow() - - # Kind of hacky. - fmt = "{2}/{0[month]}/{0[day]} {0[hour]}:{0[minutes]} {1}".format(matches, pm, current_time.year) - splatoon_time = datetime.datetime.strptime(fmt, '%Y/%m/%d %I:%M %p') + datetime.timedelta(hours=offset) - - # check for new year - if current_time.month == 12 and splatoon_time.month == 1: - splatoon_time.replace(current_time.year + 1) - - return splatoon_time - - -async def get_splatnet_schedule(splatnet_cookie): - cookies = {'_wag_session': splatnet_cookie} - - - """ - This is repeated 3 times: - - ... <--- figure out how to parse this -

-
- <--- turf war -
- ... ... - ... ... -
-
-
- <--- ranked -
- ... ... <--- Splat Zones, Rainmaker, Tower Control - ... ... - ... ... -
- """ - - schedule = [] - async with aiohttp.get(SPLATNET_SCHEDULE_URL, cookies=cookies, data={'locale':"en"}) as response: - text = await response.text() - root = etree.fromstring(text, etree.HTMLParser()) - stage_schedule_nodes = root.xpath("//*[@class='stage-schedule']") - stage_list_nodes = root.xpath("//*[@class='stage-list']") - - if len(stage_schedule_nodes)*2 != len(stage_list_nodes): - raise RuntimeError("SplatNet changed, need to update the parsing!") - - for sched_node in stage_schedule_nodes: - r = Rotation() - - start_time, end_time = sched_node.text.split("~") - r.start = parse_splatnet_time(start_time) - r.end = parse_splatnet_time(end_time) - - tw_list_node = stage_list_nodes.pop(0) - r.turf_maps = tw_list_node.xpath(".//*[@class='map-name']/text()") - - ranked_list_node = stage_list_nodes.pop(0) - r.ranked_maps = ranked_list_node.xpath(".//*[@class='map-name']/text()") - r.ranked_mode = ranked_list_node.xpath(".//*[@class='rule-description']/text()")[0] - - schedule.append(r) - - return schedule diff --git a/cogs/utils/menu.py b/cogs/utils/menu.py deleted file mode 100755 index ea2c809..0000000 --- a/cogs/utils/menu.py +++ /dev/null @@ -1,140 +0,0 @@ -import asyncio - -class Menu: - """An interactive menu class for Discord.""" - - - class Submenu: - """A metaclass of the Menu class.""" - def __init__(self, name, content): - self.content = content - self.leads_to = [] - self.name = name - - def get_text(self): - text = "" - for idx, menu in enumerate(self.leads_to): - text += "[{}] {}\n".format(idx+1, menu.name) - return text - - def get_child(self, child_idx): - try: - return self.leads_to[child_idx] - except IndexError: - raise IndexError("child index out of range") - - def add_child(self, child): - self.leads_to.append(child) - - class InputSubmenu: - """A metaclass of the Menu class for submenu options that take input, instead of prompting the user to pick an option.""" - def __init__(self, name, content, input_function, leads_to): - self.content = content - self.name = name - self.input_function = input_function - self.leads_to = leads_to - - def next_child(self): - return self.leads_to - - class ChoiceSubmenu: - """A metaclass of the Menu class for submenu options for choosing an option from a list.""" - def __init__(self, name, content, options, input_function, leads_to): - self.content = content - self.name = name - self.options = options - self.input_function = input_function - self.leads_to = leads_to - - def next_child(self): - return self.leads_to - - - def __init__(self, main_page): - self.children = [] - self.main = self.Submenu("main", main_page) - - def add_child(self, child): - self.main.add_child(child) - - async def start(self, ctx): - current = self.main - menu_msg = None - while True: - output = "" - - if type(current) == self.Submenu: - if type(current.content) == str: - output += current.content + "\n" - elif callable(current.content): - current.content() - else: - raise TypeError("submenu body is not a str or function") - - if not current.leads_to: - if not menu_msg: - menu_msg = await ctx.send("```" + output + "```") - else: - await menu_msg.edit(content="```" + output + "```") - break - - output += "\n" + current.get_text() + "\n" - output += "Enter a number." - - if not menu_msg: - menu_msg = await ctx.send("```" + output + "```") - else: - await menu_msg.edit(content="```" + output + "```") - - reply = await ctx.bot.wait_for("message", check=lambda m: m.author == ctx.bot.user and m.content.isdigit() and m.channel == ctx.message.channel) - await reply.delete() - - try: - current = current.get_child(int(reply.content) - 1) - except IndexError: - print("Invalid number.") - break - - elif type(current) == self.InputSubmenu: - if type(current.content) == list: - answers = [] - for question in current.content: - await menu_msg.edit(content="```" + question + "\n\nEnter a value." + "```") - reply = await ctx.bot.wait_for("message", check=lambda m: m.author == ctx.bot.user and m.channel == ctx.message.channel) - await reply.delete() - answers.append(reply) - current.input_function(*answers) - else: - await menu_msg.edit(content="```" + current.content + "\n\nEnter a value." + "```") - reply = await ctx.bot.wait_for("message", check=lambda m: m.author == ctx.bot.user and m.channel == ctx.message.channel) - await reply.delete() - current.input_function(reply) - - if not current.leads_to: - break - - current = current.leads_to - - elif type(current) == self.ChoiceSubmenu: - result = "```" + current.content + "\n\n" - if type(current.options) == dict: - indexes = {} - for idx, option in enumerate(current.options): - result += "[{}] {}: {}\n".format(idx+1, option, current.options[option]) - indexes[idx] = option - else: - for idx, option in current.options: - result += "[{}] {}\n".format(idx+1, option) - await menu_msg.edit(content=result + "\nPick an option.```") - reply = await ctx.bot.wait_for("message", check=lambda m: m.author == ctx.bot.user and m.content.isdigit() and m.channel == ctx.message.channel) - await reply.delete() - if type(current.options) == dict: - current.input_function(reply, indexes[int(reply.content)-1]) - else: - current.input_function(reply, current.options[reply-1]) - - if not current.leads_to: - break - - current = current.leads_to - \ No newline at end of file diff --git a/cogs/utils/paginator.py b/cogs/utils/paginator.py deleted file mode 100755 index 8947bad..0000000 --- a/cogs/utils/paginator.py +++ /dev/null @@ -1,503 +0,0 @@ -# Help paginator by Rapptz -# Edited by F4stZ4p - -import asyncio -import discord - -class CannotPaginate(Exception): - pass - -class Pages: - """Implements a paginator that queries the user for the - pagination interface. - Pages are 1-index based, not 0-index based. - If the user does not reply within 2 minutes then the pagination - interface exits automatically. - Parameters - ------------ - ctx: Context - The context of the command. - entries: List[str] - A list of entries to paginate. - per_page: int - How many entries show up per page. - show_entry_count: bool - Whether to show an entry count in the footer. - Attributes - ----------- - embed: discord.Embed - The embed object that is being used to send pagination info. - Feel free to modify this externally. Only the description, - footer fields, and colour are internally modified. - permissions: discord.Permissions - Our permissions for the channel. - """ - def __init__(self, ctx, *, entries, per_page=12, show_entry_count=True): - self.bot = ctx.bot - self.entries = entries - self.message = ctx.message - self.channel = ctx.channel - self.author = ctx.author - self.per_page = per_page - pages, left_over = divmod(len(self.entries), self.per_page) - if left_over: - pages += 1 - self.maximum_pages = pages - self.embed = discord.Embed(colour=discord.Color.green()) - self.paginating = len(entries) > per_page - self.show_entry_count = show_entry_count - self.reaction_emojis = [ - ('\N{BLACK LEFT-POINTING DOUBLE TRIANGLE WITH VERTICAL BAR}', self.first_page), - ('\N{BLACK LEFT-POINTING TRIANGLE}', self.previous_page), - ('\N{BLACK RIGHT-POINTING TRIANGLE}', self.next_page), - ('\N{BLACK RIGHT-POINTING DOUBLE TRIANGLE WITH VERTICAL BAR}', self.last_page), - ('\N{INPUT SYMBOL FOR NUMBERS}', self.numbered_page ), - ('\N{BLACK SQUARE FOR STOP}', self.stop_pages), - ('\N{INFORMATION SOURCE}', self.show_help), - ] - - if ctx.guild is not None: - self.permissions = self.channel.permissions_for(ctx.guild.me) - else: - self.permissions = self.channel.permissions_for(ctx.bot.user) - - if not self.permissions.embed_links: - raise CannotPaginate('Bot does not have embed links permission.') - - if not self.permissions.send_messages: - raise CannotPaginate('Bot cannot send messages.') - - if self.paginating: - # verify we can actually use the pagination session - if not self.permissions.add_reactions: - raise CannotPaginate('Bot does not have add reactions permission.') - - if not self.permissions.read_message_history: - raise CannotPaginate('Bot does not have Read Message History permission.') - - def get_page(self, page): - base = (page - 1) * self.per_page - return self.entries[base:base + self.per_page] - - async def show_page(self, page, *, first=False): - self.current_page = page - entries = self.get_page(page) - p = [] - for index, entry in enumerate(entries, 1 + ((page - 1) * self.per_page)): - p.append(f'{index}. {entry}') - - if self.maximum_pages > 1: - if self.show_entry_count: - text = f'Page {page}/{self.maximum_pages} ({len(self.entries)} entries)' - else: - text = f'Page {page}/{self.maximum_pages}' - - self.embed.set_footer(text=text) - - if not self.paginating: - self.embed.description = '\n'.join(p) - return await self.channel.send(embed=self.embed) - - if not first: - self.embed.description = '\n'.join(p) - await self.message.edit(embed=self.embed) - return - - p.append('') - p.append('Confused? React with \N{INFORMATION SOURCE} for more info.') - self.embed.description = '\n'.join(p) - self.message = await self.channel.send(embed=self.embed) - for (reaction, _) in self.reaction_emojis: - if self.maximum_pages == 2 and reaction in ('\u23ed', '\u23ee'): - # no |<< or >>| buttons if we only have two pages - # we can't forbid it if someone ends up using it but remove - # it from the default set - continue - - await self.message.add_reaction(reaction) - - async def checked_show_page(self, page): - if page != 0 and page <= self.maximum_pages: - await self.show_page(page) - - async def first_page(self): - """goes to the first page""" - await self.show_page(1) - - async def last_page(self): - """goes to the last page""" - await self.show_page(self.maximum_pages) - - async def next_page(self): - """goes to the next page""" - await self.checked_show_page(self.current_page + 1) - - async def previous_page(self): - """goes to the previous page""" - await self.checked_show_page(self.current_page - 1) - - async def show_current_page(self): - if self.paginating: - await self.show_page(self.current_page) - - async def numbered_page(self): - """lets you type a page number to go to""" - to_delete = [] - to_delete.append(await self.channel.send('What page do you want to go to?')) - - def message_check(m): - return m.author == self.author and \ - self.channel == m.channel and \ - m.content.isdigit() - - try: - msg = await self.bot.wait_for('message', check=message_check, timeout=30.0) - except asyncio.TimeoutError: - to_delete.append(await self.channel.send('Took too long.')) - await asyncio.sleep(5) - else: - page = int(msg.content) - to_delete.append(msg) - if page != 0 and page <= self.maximum_pages: - await self.show_page(page) - else: - to_delete.append(await self.channel.send(f'Invalid page given. ({page}/{self.maximum_pages})')) - await asyncio.sleep(5) - - try: - await self.channel.delete_messages(to_delete) - except Exception: - pass - - async def show_help(self): - """shows this message""" - messages = ['Welcome to the interactive paginator!\n'] - messages.append('This interactively allows you to see pages of text by navigating with ' \ - 'reactions. They are as follows:\n') - - for (emoji, func) in self.reaction_emojis: - messages.append(f'{emoji} {func.__doc__}') - - self.embed.description = '\n'.join(messages) - self.embed.clear_fields() - self.embed.set_footer(text=f'We were on page {self.current_page} before this message.') - await self.message.edit(embed=self.embed) - - async def go_back_to_current_page(): - await asyncio.sleep(60.0) - await self.show_current_page() - - self.bot.loop.create_task(go_back_to_current_page()) - - async def stop_pages(self): - """stops the interactive pagination session""" - await self.message.delete() - self.paginating = False - - def react_check(self, reaction, user): - if user is None or user.id != self.author.id: - return False - - if reaction.message.id != self.message.id: - return False - - for (emoji, func) in self.reaction_emojis: - if reaction.emoji == emoji: - self.match = func - return True - return False - - async def paginate(self): - """Actually paginate the entries and run the interactive loop if necessary.""" - first_page = self.show_page(1, first=True) - if not self.paginating: - await first_page - else: - # allow us to react to reactions right away if we're paginating - self.bot.loop.create_task(first_page) - - while self.paginating: - try: - reaction, user = await self.bot.wait_for('reaction_add', check=self.react_check, timeout=120.0) - except asyncio.TimeoutError: - self.paginating = False - try: - await self.message.clear_reactions() - except: - pass - finally: - break - - try: - await self.message.remove_reaction(reaction, user) - except: - pass # can't remove it so don't bother doing so - - await self.match() - -class FieldPages(Pages): - """Similar to Pages except entries should be a list of - tuples having (key, value) to show as embed fields instead. - """ - async def show_page(self, page, *, first=False): - self.current_page = page - entries = self.get_page(page) - - self.embed.clear_fields() - self.embed.description = discord.Embed.Empty - - for key, value in entries: - self.embed.add_field(name=key, value=value, inline=False) - - if self.maximum_pages > 1: - if self.show_entry_count: - text = f'Page {page}/{self.maximum_pages} ({len(self.entries)} entries)' - else: - text = f'Page {page}/{self.maximum_pages}' - - self.embed.set_footer(text=text) - - if not self.paginating: - return await self.channel.send(embed=self.embed) - - if not first: - await self.message.edit(embed=self.embed) - return - - self.message = await self.channel.send(embed=self.embed) - for (reaction, _) in self.reaction_emojis: - if self.maximum_pages == 2 and reaction in ('\u23ed', '\u23ee'): - # no |<< or >>| buttons if we only have two pages - # we can't forbid it if someone ends up using it but remove - # it from the default set - continue - - await self.message.add_reaction(reaction) - -import itertools -import inspect -import re - -# ?help -# ?help Cog -# ?help command -# -> could be a subcommand - -_mention = re.compile(r'<@\!?([0-9]{1,19})>') - -def cleanup_prefix(bot, prefix): - m = _mention.match(prefix) - if m: - user = bot.get_user(int(m.group(1))) - if user: - return f'@{user.name} ' - return prefix - -async def _can_run(cmd, ctx): - try: - return await cmd.can_run(ctx) - except: - return False - -def _command_signature(cmd): - # this is modified from discord.py source - # which I wrote myself lmao - - result = [cmd.qualified_name] - if cmd.usage: - result.append(cmd.usage) - return ' '.join(result) - - params = cmd.clean_params - if not params: - return ' '.join(result) - - for name, param in params.items(): - if param.default is not param.empty: - # We don't want None or '' to trigger the [name=value] case and instead it should - # do [name] since [name=None] or [name=] are not exactly useful for the user. - should_print = param.default if isinstance(param.default, str) else param.default is not None - if should_print: - result.append(f'[{name}={param.default!r}]') - else: - result.append(f'[{name}]') - elif param.kind == param.VAR_POSITIONAL: - result.append(f'[{name}...]') - else: - result.append(f'<{name}>') - - return ' '.join(result) - -class HelpPaginator(Pages): - def __init__(self, ctx, entries, *, per_page=4): - super().__init__(ctx, entries=entries, per_page=per_page) - self.reaction_emojis.append(('\N{WHITE QUESTION MARK ORNAMENT}', self.show_bot_help)) - self.total = len(entries) - - @classmethod - async def from_cog(cls, ctx, cog): - cog_name = cog.__class__.__name__ - - # get the commands - entries = sorted(ctx.bot.get_cog(cog_name).get_commands(), key=lambda c: c.name) - - # remove the ones we can't run - entries = [cmd for cmd in entries if (await _can_run(cmd, ctx)) and not cmd.hidden] - - self = cls(ctx, entries) - self.title = f'{cog_name} Commands' - self.description = inspect.getdoc(cog) - self.prefix = cleanup_prefix(ctx.bot, ctx.prefix) - - return self - - @classmethod - async def from_command(cls, ctx, command): - try: - entries = sorted(command.commands, key=lambda c: c.name) - except AttributeError: - entries = [] - else: - entries = [cmd for cmd in entries if (await _can_run(cmd, ctx)) and not cmd.hidden] - - self = cls(ctx, entries) - self.title = command.signature - - if command.description: - self.description = f'{command.description}\n\n{command.help}' - else: - self.description = command.help or 'No help given.' - - self.prefix = cleanup_prefix(ctx.bot, ctx.prefix) - return self - - @classmethod - async def from_bot(cls, ctx): - def key(c): - return c.cog_name or '\u200bMisc' - - entries = sorted(ctx.bot.commands, key=key) - nested_pages = [] - per_page = 9 - - # 0: (cog, desc, commands) (max len == 9) - # 1: (cog, desc, commands) (max len == 9) - # ... - - for cog, commands in itertools.groupby(entries, key=key): - plausible = [cmd for cmd in commands if (await _can_run(cmd, ctx)) and not cmd.hidden] - if len(plausible) == 0: - continue - - description = ctx.bot.get_cog(cog) - if description is None: - description = discord.Embed.Empty - else: - description = inspect.getdoc(description) or discord.Embed.Empty - - nested_pages.extend((cog, description, plausible[i:i + per_page]) for i in range(0, len(plausible), per_page)) - - self = cls(ctx, nested_pages, per_page=1) # this forces the pagination session - self.prefix = cleanup_prefix(ctx.bot, ctx.prefix) - - # swap the get_page implementation with one that supports our style of pagination - self.get_page = self.get_bot_page - self._is_bot = True - - # replace the actual total - self.total = sum(len(o) for _, _, o in nested_pages) - return self - - def get_bot_page(self, page): - cog, description, commands = self.entries[page - 1] - self.title = f'{cog} Commands' - self.description = description - return commands - - async def show_page(self, page, *, first=False): - self.current_page = page - entries = self.get_page(page) - - self.embed.clear_fields() - self.embed.description = self.description - self.embed.title = self.title - - if hasattr(self, '_is_bot'): - value ='Check the bot source: **[GitHub Link](https://github.com/F4stZ4p/DJ5n4k3/)**' - self.embed.add_field(name='**GitHub**', value=value, inline=False) - - self.embed.set_footer(text=f'Use "{self.prefix}help command" for more info on a command.') - - signature = _command_signature - - for entry in entries: - self.embed.add_field(name=signature(entry), value=entry.short_doc or "No help given", inline=False) - - if self.maximum_pages: - self.embed.set_author(name=f'Page {page}/{self.maximum_pages} ({self.total} commands)') - - if not self.paginating: - return await self.channel.send(embed=self.embed) - - if not first: - await self.message.edit(embed=self.embed) - return - - self.message = await self.channel.send(embed=self.embed) - for (reaction, _) in self.reaction_emojis: - if self.maximum_pages == 2 and reaction in ('\u23ed', '\u23ee'): - # no |<< or >>| buttons if we only have two pages - # we can't forbid it if someone ends up using it but remove - # it from the default set - continue - - await self.message.add_reaction(reaction) - - async def show_help(self): - """shows this message""" - - self.embed.title = 'Paginator help' - self.embed.description = 'Hello! Welcome to the help page.' - - messages = [f'{emoji} {func.__doc__}' for emoji, func in self.reaction_emojis] - self.embed.clear_fields() - self.embed.add_field(name='What are these reactions for?', value='\n'.join(messages), inline=False) - - self.embed.set_footer(text=f'We were on page {self.current_page} before this message.') - await self.message.edit(embed=self.embed) - - async def go_back_to_current_page(): - await asyncio.sleep(30.0) - await self.show_current_page() - - self.bot.loop.create_task(go_back_to_current_page()) - - async def show_bot_help(self): - """shows how to use the bot""" - - self.embed.title = 'Using the bot' - self.embed.description = 'Hello! Welcome to the help page.' - self.embed.clear_fields() - - entries = ( - ('', 'This means the argument is __**required**__.'), - ('[argument]', 'This means the argument is __**optional**__.'), - ('[A|B]', 'This means the it can be __**either A or B**__.'), - ('[argument...]', 'This means you can have multiple arguments.\n' \ - 'Now that you know the basics, it should be noted that...\n' \ - '__**You do not type in the brackets!**__') - ) - - self.embed.add_field(name='How do I use this bot?', value='Reading the bot signature is pretty simple.') - - for name, value in entries: - self.embed.add_field(name=name, value=value, inline=False) - - self.embed.set_footer(text=f'We were on page {self.current_page} before this message.') - await self.message.edit(embed=self.embed) - - async def go_back_to_current_page(): - await asyncio.sleep(30.0) - await self.show_current_page() - - self.bot.loop.create_task(go_back_to_current_page()) \ No newline at end of file diff --git a/first_run/__init__.py b/first_run/__init__.py index 669c547..e90bbc3 100644 --- a/first_run/__init__.py +++ b/first_run/__init__.py @@ -1,5 +1,10 @@ -from .config import Config +from .initializer import Config setup = Config() + +setup.install() + setup.ask() setup.save() + +setup.clean() \ No newline at end of file diff --git a/first_run/config_generator.py b/first_run/initializer.py similarity index 63% rename from first_run/config_generator.py rename to first_run/initializer.py index 506c176..7ebd976 100644 --- a/first_run/config_generator.py +++ b/first_run/initializer.py @@ -1,3 +1,6 @@ +from pip._internal import main as pip +import shutil + from .langs import locales, texts @@ -10,6 +13,9 @@ class Config: 'unkickable_id': '[unkickable ids here (in int)]' } + with open('requirements.txt', 'r') as f: + self.packages = f.read().split('\n') + def input(self, key, **kwargs): lang = self.config.get('lang', 'multiple') @@ -32,13 +38,30 @@ class Config: self.config[key] = response - def ask(self): + def install(self): self.input('lang', valid=locales) + print('\n\n\033[4;36m' + + texts.get(self.config.get('lang')).get('install') + + '\033[0m\n') + + for package in self.packages: + pip(['install', package]) + + def ask(self): + print('\n\n\033[4;36m' + texts.get(self.config.get('lang')).get('conf') + + '\033[0m\n') + self.input('token', empty=False) self.input('postgresql_username', empty=False) self.input('postgresql_password', empty=False) self.input('postgresql_dbname', empty=False) + print('\n\n\033[4;36m' + texts.get(self.config.get('lang')).get('logs') + + '\033[0m\n') + + self.input('wh_id', empty=True) + self.input('wh_token', empty=True) + print('\n\n\033[4;36m' + texts.get(self.config.get('lang')).get('misc') + '\033[0m\n') @@ -49,13 +72,24 @@ class Config: with open('config.py', 'w') as file: postgresql = f"postgresql://" \ f"{self.config.get('postgresql_username')}:" \ - f"{self.config.get('postgresql_password')}@host/" \ - f"{self.config.get('postgresql_dbname')}" + f"{self.config.get('postgresql_password')}" \ + f"@localhost/{self.config.get('postgresql_dbname')}" file.write(f"postgresql = '{postgresql}'\n") + logs_webhook = dict(id=int(self.config.get('wh_id')), + token=self.config.get('wh_token')) + file.write(f"logs_webhook = '{logs_webhook}'\n") + for key, value in self.config.items(): - if not key.startswith('postgresql_'): + if not key.startswith('postgresql_') \ + and not key.startswith('wh_'): value = f"'{value}'" if type(value) is str else value file.write(f"{key} = {value}\n") print('\n\n\033[4;36m' + texts.get(self.config.get('lang')).get('end') + '\033[0m\n') + + def clean(self): + print('\n\n\033[4;36m' + + texts.get(self.config.get('lang')).get('clean') + + '\033[0m\n') + shutil.rmtree('first_run') diff --git a/first_run/langs.py b/first_run/langs.py index 4d1ae7b..3048fcd 100644 --- a/first_run/langs.py +++ b/first_run/langs.py @@ -1,6 +1,10 @@ locales = ['fr', 'en'] texts = { 'fr': { + 'install': "Installation des modules...", + + 'conf': "Configuration...", + 'token': "Veuillez entrer le token", 'not_empty': "Cette valeur ne doit pas être vide", @@ -8,15 +12,26 @@ texts = { 'postgresql_password': "Veuillez entrer le mot de passe de postgresql", 'postgresql_dbname': "Veuillez entrer le nom de la base de donnée", + 'logs': "Channel de logs (non obligatoire)", + + 'wh_id': "L'id du webhook pour le channel de logs", + 'wh_token': "Le token du webhook pour le channel de logs", + 'misc': 'Autre', 'activity': "Joue à ...", 'prefix': "Prefixe (par defaut : @tuxbot)", - "end": "Configuration terminée, vous pouvez à tout moment la rectifier en modifiant le fichier config.py" + 'end': "Configuration terminée, vous pouvez à tout moment la rectifier en modifiant le fichier config.py", + + 'clean': "Nettoyage..." }, 'en': { + 'install': "Installation of the modules....", + + 'conf': "Configuration...", + 'token': "Please enter the token", 'not_empty': "This value must not be empty", @@ -24,12 +39,19 @@ texts = { 'postgresql_password': "Please enter the postgresql password", 'postgresql_dbname': "Please enter the database name", + 'logs': "Log channel (not required)", + + 'wh_id': "Webhook id for log channel", + 'wh_token': "Webhook token for log channel", + 'misc': 'Misc', 'activity': "Playing ...", 'prefix': "Prefix (default is @tuxbot)", - "end": "Configuration completed, you can fix it at any time by modifying the config.py file" + 'end': "Configuration completed, you can fix it at any time by modifying the config.py file", + + 'clean': "Cleaning..." }, 'multiple': { diff --git a/launcher.py b/launcher.py index 8cc5584..48f4542 100644 --- a/launcher.py +++ b/launcher.py @@ -1,9 +1,17 @@ -import logging +import asyncio import contextlib +import logging +import socket +import sys +import click + +from bot import TuxBot +from cogs.utils.db import Table try: import config + from cogs.utils.lang import _ except ModuleNotFoundError: import first_run @@ -34,10 +42,34 @@ def setup_logging(): log.removeHandler(hdlr) -def run_bot(): - pass # Todo: initialize bot, postgresql,... +def run_bot(unload): + loop = asyncio.get_event_loop() + log = logging.getLogger() + + try: + pool = loop.run_until_complete( + Table.create_pool(config.postgresql, command_timeout=60) + ) + except socket.gaierror as e: + click.echo(_('Could not set up PostgreSQL...'), file=sys.stderr) + log.exception(_('Could not set up PostgreSQL...')) + return + + bot = TuxBot(unload) + bot.pool = pool + bot.run() + + +@click.group(invoke_without_command=True, options_metavar='[options]') +@click.option('-u', '--unload', + multiple=True, type=str, + help=_('Launch without loading the module')) +@click.pass_context +def main(ctx, unload): + if ctx.invoked_subcommand is None: + with setup_logging(): + run_bot(unload) if __name__ == '__main__': - with setup_logging(): - run_bot() + main() diff --git a/locales/en/LC_MESSAGES/base.mo b/locales/en/LC_MESSAGES/base.mo new file mode 100644 index 0000000..e56e9c9 Binary files /dev/null and b/locales/en/LC_MESSAGES/base.mo differ diff --git a/locales/en/LC_MESSAGES/base.po b/locales/en/LC_MESSAGES/base.po new file mode 100644 index 0000000..ca46dcc --- /dev/null +++ b/locales/en/LC_MESSAGES/base.po @@ -0,0 +1,45 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR ORGANIZATION +# FIRST AUTHOR , YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2019-09-08 19:04+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +#: launcher.py:56 launcher.py:57 +msgid "Could not set up PostgreSQL..." +msgstr "" + +#: launcher.py:68 +msgid "Launch without loading the module" +msgstr "" + +#: bot.py:52 +msgid "Failed to load extension : " +msgstr "" + +#: bot.py:59 +msgid "This command cannot be used in private messages." +msgstr "" + +#: bot.py:63 +msgid "Sorry. This command is disabled and cannot be used." +msgstr "" + +#: bot.py:66 +msgid "In " +msgstr "" + +#: bot.py:77 +msgid "Ready:" +msgstr "" + diff --git a/locales/fr/LC_MESSAGES/base.mo b/locales/fr/LC_MESSAGES/base.mo new file mode 100644 index 0000000..a24b97a Binary files /dev/null and b/locales/fr/LC_MESSAGES/base.mo differ diff --git a/locales/fr/LC_MESSAGES/base.po b/locales/fr/LC_MESSAGES/base.po new file mode 100644 index 0000000..9bbd996 --- /dev/null +++ b/locales/fr/LC_MESSAGES/base.po @@ -0,0 +1,45 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR ORGANIZATION +# FIRST AUTHOR , YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2019-09-08 19:04+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +#: launcher.py:56 launcher.py:57 +msgid "Could not set up PostgreSQL..." +msgstr "Impossible de lancer PostgreSQL..." + +#: launcher.py:68 +msgid "Launch without loading the module" +msgstr "Lancer sans charger le module " + +#: bot.py:52 +msgid "Failed to load extension : " +msgstr "Impossible de charger l'extension : " + +#: bot.py:59 +msgid "This command cannot be used in private messages." +msgstr "Cette commande ne peut pas être utilisée en message privé." + +#: bot.py:63 +msgid "Sorry. This command is disabled and cannot be used." +msgstr "Désoler mais cette commande est désactivé." + +#: bot.py:66 +msgid "In " +msgstr "Dans " + +#: bot.py:77 +msgid "Ready:" +msgstr "Prêt:" + diff --git a/logs/tuxbot.log b/logs/tuxbot.log index e69de29..72e6060 100644 --- a/logs/tuxbot.log +++ b/logs/tuxbot.log @@ -0,0 +1,12 @@ +[2019-09-08 22:58:46] [INFO ] discord.client: logging in using static token +[2019-09-08 22:58:47] [INFO ] discord.gateway: Shard ID 0 has sent the IDENTIFY payload. +[2019-09-08 22:58:47] [INFO ] discord.gateway: Shard ID 0 has connected to Gateway: ["gateway-prd-main-xfx5",{"micros":31802,"calls":["discord-sessions-prd-1-18",{"micros":26858,"calls":["start_session",{"micros":12548,"calls":["api-prd-main-6rfw",{"micros":8098,"calls":["get_user",{"micros":1942},"add_authorized_ip",{"micros":5},"get_guilds",{"micros":3036},"coros_wait",{"micros":3}]}]},"guilds_connect",{"micros":139,"calls":[]},"presence_connect",{"micros":1,"calls":[]}]}]}] (Session ID: 03fcb2e35ce477c42ae58e20259b5d68). +[2019-09-08 22:58:53] [INFO ] discord.state: Processed a chunk for 463 members in guild ID 280805240977227776. +[2019-09-08 22:58:54] [INFO ] discord.state: Processed a chunk for 807 members in guild ID 331981755177238530. +[2019-09-08 22:58:55] [INFO ] discord.state: Processed a chunk for 1000 members in guild ID 296698073177128962. +[2019-09-08 22:58:55] [INFO ] discord.state: Processed a chunk for 1000 members in guild ID 296698073177128962. +[2019-09-08 22:58:55] [INFO ] discord.state: Processed a chunk for 662 members in guild ID 296698073177128962. +[2019-09-08 23:03:12] [INFO ] discord.client: Cleaning up tasks. +[2019-09-08 23:03:12] [INFO ] discord.client: Cleaning up after 5 tasks. +[2019-09-08 23:03:12] [INFO ] discord.client: All tasks finished cancelling. +[2019-09-08 23:03:12] [INFO ] discord.client: Closing the event loop. diff --git a/requirements.txt b/requirements.txt index 68f77bd..c94599b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -discord.py +discord.py[voice] lxml click asyncpg>=0.12.0 \ No newline at end of file