2021-01-27 15:14:53 +01:00
|
|
|
import asyncio
|
2021-01-25 17:28:59 +01:00
|
|
|
import logging
|
2021-01-30 16:43:17 +01:00
|
|
|
import time
|
2021-04-23 00:47:58 +02:00
|
|
|
from datetime import datetime
|
2021-05-16 23:21:27 +02:00
|
|
|
from typing import Optional, Union
|
2021-01-26 17:11:30 +01:00
|
|
|
|
2021-01-27 15:14:53 +01:00
|
|
|
import aiohttp
|
2021-01-25 17:28:59 +01:00
|
|
|
import discord
|
2021-04-25 01:22:39 +02:00
|
|
|
from aiohttp import ClientConnectorError, InvalidURL, TCPConnector
|
2021-04-21 17:59:43 +02:00
|
|
|
from jishaku.models import copy_context_with
|
2021-04-23 23:47:35 +02:00
|
|
|
from discord.ext import commands, tasks
|
2021-01-25 17:28:59 +01:00
|
|
|
from ipinfo.exceptions import RequestQuotaExceededError
|
|
|
|
from structured_config import ConfigFile
|
|
|
|
from tuxbot.cogs.Network.functions.converters import (
|
|
|
|
IPConverter,
|
2021-04-24 22:34:53 +02:00
|
|
|
IPParamsConverter,
|
2021-03-02 19:00:08 +01:00
|
|
|
DomainConverter,
|
2021-01-26 17:11:30 +01:00
|
|
|
QueryTypeConverter,
|
2021-04-22 18:11:55 +02:00
|
|
|
ASConverter,
|
2021-01-25 17:28:59 +01:00
|
|
|
)
|
2021-01-26 10:21:39 +01:00
|
|
|
from tuxbot.cogs.Network.functions.exceptions import (
|
|
|
|
RFC18,
|
|
|
|
InvalidIp,
|
|
|
|
VersionNotFound,
|
2021-01-26 17:11:30 +01:00
|
|
|
InvalidDomain,
|
|
|
|
InvalidQueryType,
|
2021-04-22 18:11:55 +02:00
|
|
|
InvalidAsn,
|
2021-01-26 10:21:39 +01:00
|
|
|
)
|
2021-01-25 17:28:59 +01:00
|
|
|
from tuxbot.core.bot import Tux
|
|
|
|
from tuxbot.core.i18n import (
|
|
|
|
Translator,
|
|
|
|
)
|
|
|
|
from tuxbot.core.utils.data_manager import cogs_data_path
|
|
|
|
from tuxbot.core.utils.functions.extra import (
|
|
|
|
ContextPlus,
|
|
|
|
command_extra,
|
|
|
|
)
|
2021-04-23 00:47:58 +02:00
|
|
|
from tuxbot.core.utils.functions.utils import shorten, str_if_empty
|
2021-01-25 17:28:59 +01:00
|
|
|
from .config import NetworkConfig
|
2021-01-26 10:21:39 +01:00
|
|
|
from .functions.utils import (
|
|
|
|
get_ip,
|
|
|
|
get_hostname,
|
2021-04-22 18:11:55 +02:00
|
|
|
get_crimeflare_result,
|
2021-01-26 10:21:39 +01:00
|
|
|
get_ipinfo_result,
|
|
|
|
get_ipwhois_result,
|
2021-04-24 22:34:53 +02:00
|
|
|
get_map_bytes,
|
2021-01-26 17:11:30 +01:00
|
|
|
get_pydig_result,
|
2021-04-22 18:11:55 +02:00
|
|
|
merge_ipinfo_ipwhois,
|
2021-03-02 19:00:08 +01:00
|
|
|
check_query_type_or_raise,
|
|
|
|
check_ip_version_or_raise,
|
2021-04-22 18:11:55 +02:00
|
|
|
check_asn_or_raise,
|
2021-01-26 10:21:39 +01:00
|
|
|
)
|
2021-01-25 17:28:59 +01:00
|
|
|
|
|
|
|
log = logging.getLogger("tuxbot.cogs.Network")
|
|
|
|
_ = Translator("Network", __file__)
|
|
|
|
|
|
|
|
|
2021-04-20 17:12:38 +02:00
|
|
|
class Network(commands.Cog):
|
2021-04-25 00:40:46 +02:00
|
|
|
_peeringdb_net: Optional[dict]
|
|
|
|
|
2021-01-25 17:28:59 +01:00
|
|
|
def __init__(self, bot: Tux):
|
|
|
|
self.bot = bot
|
2021-01-26 10:21:39 +01:00
|
|
|
self.__config: NetworkConfig = ConfigFile(
|
2021-02-16 19:28:30 +01:00
|
|
|
str(cogs_data_path("Network") / "config.yaml"),
|
2021-01-25 17:28:59 +01:00
|
|
|
NetworkConfig,
|
|
|
|
).config
|
2021-04-25 00:40:46 +02:00
|
|
|
|
|
|
|
self._peeringdb_net = None
|
|
|
|
|
2021-04-23 23:47:35 +02:00
|
|
|
self._update_peering_db.start() # pylint: disable=no-member
|
2021-01-25 17:28:59 +01:00
|
|
|
|
2021-04-21 17:59:43 +02:00
|
|
|
async def cog_command_error(self, ctx: ContextPlus, error):
|
2021-01-26 10:21:39 +01:00
|
|
|
if isinstance(
|
|
|
|
error,
|
2021-01-26 17:11:30 +01:00
|
|
|
(
|
|
|
|
RequestQuotaExceededError,
|
|
|
|
RFC18,
|
|
|
|
InvalidIp,
|
|
|
|
InvalidDomain,
|
|
|
|
InvalidQueryType,
|
|
|
|
VersionNotFound,
|
2021-04-22 18:11:55 +02:00
|
|
|
InvalidAsn,
|
2021-01-26 17:11:30 +01:00
|
|
|
),
|
2021-01-26 10:21:39 +01:00
|
|
|
):
|
|
|
|
await ctx.send(_(str(error), ctx, self.bot.config))
|
2021-01-25 17:28:59 +01:00
|
|
|
|
2021-04-21 17:59:43 +02:00
|
|
|
async def cog_before_invoke(self, ctx: ContextPlus):
|
|
|
|
await ctx.trigger_typing()
|
|
|
|
|
2021-04-23 23:47:35 +02:00
|
|
|
def cog_unload(self):
|
|
|
|
self._update_peering_db.cancel() # pylint: disable=no-member
|
|
|
|
|
2021-04-25 00:40:46 +02:00
|
|
|
@tasks.loop(hours=1.0)
|
2021-04-23 23:47:35 +02:00
|
|
|
async def _update_peering_db(self):
|
2021-04-25 00:40:46 +02:00
|
|
|
try:
|
2021-04-25 18:48:21 +02:00
|
|
|
async with aiohttp.ClientSession(
|
|
|
|
connector=TCPConnector(verify_ssl=False)
|
|
|
|
) as cs:
|
2021-04-25 00:40:46 +02:00
|
|
|
async with cs.get(
|
2021-04-25 01:22:39 +02:00
|
|
|
"https://3.233.208.117/api/net",
|
2021-04-25 00:40:46 +02:00
|
|
|
timeout=aiohttp.ClientTimeout(total=60),
|
|
|
|
) as s:
|
|
|
|
self._peeringdb_net = await s.json()
|
|
|
|
except asyncio.exceptions.TimeoutError:
|
|
|
|
pass
|
2021-05-16 17:07:26 +02:00
|
|
|
else:
|
|
|
|
log.log(logging.INFO, "_update_peering_db")
|
2021-04-23 23:47:35 +02:00
|
|
|
|
2021-01-25 17:28:59 +01:00
|
|
|
# =========================================================================
|
|
|
|
# =========================================================================
|
|
|
|
|
|
|
|
@command_extra(name="iplocalise", aliases=["localiseip"], deletable=True)
|
|
|
|
async def _iplocalise(
|
|
|
|
self,
|
|
|
|
ctx: ContextPlus,
|
|
|
|
ip: IPConverter,
|
2021-04-24 22:34:53 +02:00
|
|
|
*,
|
|
|
|
params: Optional[IPParamsConverter] = None,
|
2021-01-25 17:28:59 +01:00
|
|
|
):
|
2021-04-24 22:34:53 +02:00
|
|
|
# noinspection PyUnresolvedReferences
|
|
|
|
check_ip_version_or_raise(params) # type: ignore
|
2021-03-02 19:00:08 +01:00
|
|
|
|
2021-04-24 22:34:53 +02:00
|
|
|
# noinspection PyUnresolvedReferences
|
|
|
|
ip_address = await get_ip(
|
|
|
|
self.bot.loop, str(ip), params # type: ignore
|
|
|
|
)
|
2021-04-22 18:11:55 +02:00
|
|
|
|
2021-04-22 00:16:37 +02:00
|
|
|
ip_hostname = await get_hostname(self.bot.loop, str(ip_address))
|
2021-01-25 17:28:59 +01:00
|
|
|
|
2021-01-26 10:21:39 +01:00
|
|
|
ipinfo_result = await get_ipinfo_result(
|
2021-05-16 17:07:26 +02:00
|
|
|
self.bot.loop, self.__config.ipinfoKey, ip_address
|
2021-01-26 10:21:39 +01:00
|
|
|
)
|
2021-04-22 00:16:37 +02:00
|
|
|
ipwhois_result = await get_ipwhois_result(self.bot.loop, ip_address)
|
2021-01-25 17:28:59 +01:00
|
|
|
|
2021-01-26 15:43:16 +01:00
|
|
|
merged_results = merge_ipinfo_ipwhois(ipinfo_result, ipwhois_result)
|
|
|
|
|
2021-01-25 17:28:59 +01:00
|
|
|
e = discord.Embed(
|
|
|
|
title=_(
|
|
|
|
"Information for ``{ip} ({ip_address})``", ctx, self.bot.config
|
|
|
|
).format(ip=ip, ip_address=ip_address),
|
|
|
|
color=0x5858D7,
|
|
|
|
)
|
|
|
|
|
2021-01-26 15:43:16 +01:00
|
|
|
e.add_field(
|
|
|
|
name=_("Belongs to:", ctx, self.bot.config),
|
|
|
|
value=merged_results["belongs"],
|
|
|
|
inline=True,
|
|
|
|
)
|
|
|
|
e.add_field(
|
|
|
|
name="RIR :",
|
|
|
|
value=merged_results["rir"],
|
|
|
|
inline=True,
|
|
|
|
)
|
|
|
|
e.add_field(
|
|
|
|
name=_("Region:", ctx, self.bot.config),
|
|
|
|
value=merged_results["region"],
|
|
|
|
inline=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
e.set_thumbnail(url=merged_results["flag"])
|
2021-01-25 17:28:59 +01:00
|
|
|
|
|
|
|
e.set_footer(
|
|
|
|
text=_("Hostname: {hostname}", ctx, self.bot.config).format(
|
|
|
|
hostname=ip_hostname
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2021-04-24 22:34:53 +02:00
|
|
|
kwargs: dict = {}
|
|
|
|
|
|
|
|
# noinspection PyUnresolvedReferences
|
|
|
|
if (
|
|
|
|
params is not None
|
|
|
|
and params["map"]
|
|
|
|
and ( # type: ignore
|
|
|
|
map_bytes := await get_map_bytes(
|
|
|
|
self.__config.geoapifyKey, merged_results["map"]
|
|
|
|
)
|
|
|
|
)
|
|
|
|
):
|
|
|
|
file = discord.File(map_bytes, "map.png")
|
|
|
|
e.set_image(url="attachment://map.png")
|
|
|
|
|
|
|
|
kwargs["file"] = file
|
|
|
|
|
|
|
|
kwargs["embed"] = e
|
|
|
|
|
2021-04-24 23:01:10 +02:00
|
|
|
return await ctx.send(f"https://ipinfo.io/{ip_address}#", **kwargs)
|
2021-01-26 15:24:10 +01:00
|
|
|
|
2021-04-24 22:34:53 +02:00
|
|
|
@command_extra(
|
|
|
|
name="cloudflare", aliases=["cf", "crimeflare"], deletable=True
|
|
|
|
)
|
2021-04-21 17:59:43 +02:00
|
|
|
async def _cloudflare(
|
|
|
|
self,
|
|
|
|
ctx: ContextPlus,
|
|
|
|
ip: DomainConverter,
|
|
|
|
):
|
2021-04-23 00:52:23 +02:00
|
|
|
crimeflare_result = await get_crimeflare_result(str(ip))
|
2021-04-21 17:59:43 +02:00
|
|
|
|
|
|
|
if crimeflare_result:
|
|
|
|
alt_ctx = await copy_context_with(
|
|
|
|
ctx, content=f"{ctx.prefix}iplocalise {crimeflare_result}"
|
|
|
|
)
|
|
|
|
return await alt_ctx.command.reinvoke(alt_ctx)
|
|
|
|
|
|
|
|
await ctx.send(
|
|
|
|
_(
|
|
|
|
"Unable to collect information through CloudFlare",
|
|
|
|
ctx,
|
|
|
|
self.bot.config,
|
2021-04-21 18:28:09 +02:00
|
|
|
)
|
2021-04-21 17:59:43 +02:00
|
|
|
)
|
|
|
|
|
2021-01-26 15:24:10 +01:00
|
|
|
@command_extra(name="getheaders", aliases=["headers"], deletable=True)
|
|
|
|
async def _getheaders(
|
2021-03-02 19:00:08 +01:00
|
|
|
self, ctx: ContextPlus, ip: DomainConverter, *, user_agent: str = ""
|
2021-01-26 15:24:10 +01:00
|
|
|
):
|
|
|
|
try:
|
|
|
|
headers = {"User-Agent": user_agent}
|
2021-01-26 17:11:30 +01:00
|
|
|
colors = {
|
|
|
|
"1": 0x17A2B8,
|
|
|
|
"2": 0x28A745,
|
|
|
|
"3": 0xFFC107,
|
|
|
|
"4": 0xDC3545,
|
|
|
|
"5": 0x343A40,
|
|
|
|
}
|
2021-01-26 15:24:10 +01:00
|
|
|
|
2021-04-23 00:52:23 +02:00
|
|
|
async with aiohttp.ClientSession() as cs:
|
|
|
|
async with cs.get(
|
|
|
|
str(ip),
|
|
|
|
headers=headers,
|
|
|
|
timeout=aiohttp.ClientTimeout(total=8),
|
|
|
|
) as s:
|
|
|
|
e = discord.Embed(
|
|
|
|
title=f"Headers : {ip}",
|
|
|
|
color=colors.get(str(s.status)[0], 0x6C757D),
|
|
|
|
)
|
|
|
|
e.add_field(
|
|
|
|
name="Status", value=f"```{s.status}```", inline=True
|
|
|
|
)
|
|
|
|
e.set_thumbnail(url=f"https://http.cat/{s.status}")
|
|
|
|
|
|
|
|
headers = dict(s.headers.items())
|
|
|
|
headers.pop("Set-Cookie", headers)
|
|
|
|
|
|
|
|
fail = False
|
|
|
|
|
|
|
|
for key, value in headers.items():
|
|
|
|
fail, output = await shorten(value, 50, fail)
|
|
|
|
|
|
|
|
if output["link"]:
|
|
|
|
value = _(
|
|
|
|
"[show all]({})", ctx, self.bot.config
|
|
|
|
).format(output["link"])
|
|
|
|
else:
|
|
|
|
value = f"```\n{output['text']}```"
|
|
|
|
|
|
|
|
e.add_field(name=key, value=value, inline=True)
|
|
|
|
|
|
|
|
await ctx.send(embed=e)
|
2021-04-22 14:54:46 +02:00
|
|
|
except (
|
|
|
|
ClientConnectorError,
|
|
|
|
InvalidURL,
|
|
|
|
asyncio.exceptions.TimeoutError,
|
|
|
|
):
|
2021-01-26 15:24:10 +01:00
|
|
|
await ctx.send(
|
|
|
|
_("Cannot connect to host {}", ctx, self.bot.config).format(ip)
|
|
|
|
)
|
2021-01-26 17:11:30 +01:00
|
|
|
|
2021-04-20 15:42:59 +02:00
|
|
|
@command_extra(name="dig", deletable=True)
|
|
|
|
async def _dig(
|
|
|
|
self,
|
|
|
|
ctx: ContextPlus,
|
|
|
|
domain: IPConverter,
|
|
|
|
query_type: QueryTypeConverter,
|
2021-05-16 23:21:27 +02:00
|
|
|
dnssec: Union[str, bool] = False,
|
2021-04-20 15:42:59 +02:00
|
|
|
):
|
|
|
|
check_query_type_or_raise(str(query_type))
|
2021-03-02 19:00:08 +01:00
|
|
|
|
2021-04-20 15:42:59 +02:00
|
|
|
pydig_result = await get_pydig_result(
|
2021-04-22 18:11:55 +02:00
|
|
|
self.bot.loop, str(domain), str(query_type), dnssec
|
2021-04-20 15:42:59 +02:00
|
|
|
)
|
2021-01-26 17:11:30 +01:00
|
|
|
|
2021-04-20 15:42:59 +02:00
|
|
|
e = discord.Embed(title=f"DIG {domain} {query_type}", color=0x5858D7)
|
2021-01-26 17:11:30 +01:00
|
|
|
|
2021-04-20 15:42:59 +02:00
|
|
|
for i, value in enumerate(pydig_result):
|
|
|
|
e.add_field(name=f"#{i}", value=f"```{value}```")
|
2021-01-30 16:43:17 +01:00
|
|
|
|
2021-04-20 15:42:59 +02:00
|
|
|
if not pydig_result:
|
|
|
|
e.add_field(
|
|
|
|
name=f"DIG {domain} IN {query_type}",
|
|
|
|
value=_("No result...", ctx, self.bot.config),
|
|
|
|
)
|
2021-01-30 16:43:17 +01:00
|
|
|
|
2021-04-20 15:42:59 +02:00
|
|
|
await ctx.send(embed=e)
|
2021-01-30 16:43:17 +01:00
|
|
|
|
2021-04-20 15:42:59 +02:00
|
|
|
@command_extra(name="ping", deletable=True)
|
|
|
|
async def _ping(self, ctx: ContextPlus):
|
|
|
|
start = time.perf_counter()
|
|
|
|
await ctx.trigger_typing()
|
|
|
|
end = time.perf_counter()
|
2021-03-31 21:57:37 +02:00
|
|
|
|
2021-04-20 15:42:59 +02:00
|
|
|
latency = round(self.bot.latency * 1000, 2)
|
|
|
|
typing = round((end - start) * 1000, 2)
|
2021-03-31 21:57:37 +02:00
|
|
|
|
2021-04-20 15:42:59 +02:00
|
|
|
e = discord.Embed(title="Ping", color=discord.Color.teal())
|
|
|
|
e.add_field(name="Websocket", value=f"{latency}ms")
|
|
|
|
e.add_field(name="Typing", value=f"{typing}ms")
|
|
|
|
await ctx.send(embed=e)
|
2021-04-22 15:31:01 +02:00
|
|
|
|
|
|
|
@command_extra(name="isdown", aliases=["is_down", "down?"], deletable=True)
|
|
|
|
async def _isdown(self, ctx: ContextPlus, domain: IPConverter):
|
|
|
|
try:
|
2021-04-25 18:48:21 +02:00
|
|
|
url = f"https://www.isthissitedown.org/site/{domain}"
|
|
|
|
|
2021-04-23 00:52:23 +02:00
|
|
|
async with aiohttp.ClientSession() as cs:
|
|
|
|
async with cs.get(
|
2021-04-25 18:48:21 +02:00
|
|
|
url,
|
2021-04-23 00:52:23 +02:00
|
|
|
timeout=aiohttp.ClientTimeout(total=8),
|
|
|
|
) as s:
|
2021-04-25 18:48:21 +02:00
|
|
|
text = await s.text()
|
2021-04-23 00:52:23 +02:00
|
|
|
|
2021-04-25 18:48:21 +02:00
|
|
|
if "is up!" in text:
|
2021-04-23 00:52:23 +02:00
|
|
|
title = _("Up!", ctx, self.bot.config)
|
|
|
|
color = 0x28A745
|
2021-04-25 18:48:21 +02:00
|
|
|
else:
|
|
|
|
title = _("Down...", ctx, self.bot.config)
|
|
|
|
color = 0xDC3545
|
2021-04-23 00:52:23 +02:00
|
|
|
|
|
|
|
e = discord.Embed(title=title, color=color)
|
2021-04-22 15:31:01 +02:00
|
|
|
|
2021-04-25 18:48:21 +02:00
|
|
|
await ctx.send(url, embed=e)
|
2021-04-22 15:31:01 +02:00
|
|
|
|
|
|
|
except (
|
|
|
|
ClientConnectorError,
|
|
|
|
InvalidURL,
|
|
|
|
asyncio.exceptions.TimeoutError,
|
|
|
|
):
|
|
|
|
await ctx.send(
|
|
|
|
_("Cannot connect to host {}", ctx, self.bot.config).format(
|
|
|
|
domain
|
|
|
|
)
|
|
|
|
)
|
2021-04-22 18:11:55 +02:00
|
|
|
|
|
|
|
@command_extra(
|
|
|
|
name="peeringdb", aliases=["peer", "peering"], deletable=True
|
|
|
|
)
|
|
|
|
async def _peeringdb(self, ctx: ContextPlus, asn: ASConverter):
|
|
|
|
check_asn_or_raise(str(asn))
|
|
|
|
|
2021-04-25 00:40:46 +02:00
|
|
|
data = {}
|
|
|
|
|
|
|
|
if self._peeringdb_net is None:
|
|
|
|
return await ctx.send(
|
|
|
|
_(
|
|
|
|
"Please retry in few minutes",
|
|
|
|
ctx,
|
|
|
|
self.bot.config,
|
|
|
|
).format(asn=asn)
|
|
|
|
)
|
|
|
|
|
|
|
|
for _data in self._peeringdb_net["data"]:
|
|
|
|
if _data.get("asn", None) == int(str(asn)):
|
|
|
|
data = _data
|
|
|
|
break
|
2021-04-23 00:47:58 +02:00
|
|
|
|
|
|
|
if not data:
|
|
|
|
return await ctx.send(
|
|
|
|
_(
|
|
|
|
"AS{asn} could not be found in PeeringDB's database.",
|
|
|
|
ctx,
|
|
|
|
self.bot.config,
|
|
|
|
).format(asn=asn)
|
|
|
|
)
|
|
|
|
|
|
|
|
filtered = {
|
|
|
|
"info_type": "Type",
|
|
|
|
"info_traffic": "Traffic",
|
|
|
|
"info_ratio": "Ratio",
|
|
|
|
"info_prefixes4": "Prefixes IPv4",
|
|
|
|
"info_prefixes6": "Prefixes IPv6",
|
|
|
|
}
|
|
|
|
filtered_link = {
|
|
|
|
"website": ("Site", "website"),
|
|
|
|
"looking_glass": ("Looking Glass", "looking_glass"),
|
|
|
|
"policy_general": ("Peering", "policy_url"),
|
|
|
|
}
|
|
|
|
|
|
|
|
e = discord.Embed(
|
|
|
|
title=f"{data['name']} ({str_if_empty(data['aka'], f'AS{asn}')})",
|
|
|
|
color=0x5858D7,
|
|
|
|
)
|
|
|
|
|
|
|
|
for key, name in filtered.items():
|
|
|
|
e.add_field(
|
|
|
|
name=name, value=f"```{str_if_empty(data.get(key), 'N/A')}```"
|
|
|
|
)
|
|
|
|
|
|
|
|
for key, names in filtered_link.items():
|
|
|
|
if data.get(key):
|
|
|
|
e.add_field(
|
|
|
|
name=names[0],
|
|
|
|
value=f"[{str_if_empty(data.get(key), 'N/A')}]"
|
|
|
|
f"({str_if_empty(data.get(names[1]), 'N/A')})",
|
|
|
|
)
|
|
|
|
|
|
|
|
if data["notes"]:
|
2021-04-23 00:52:23 +02:00
|
|
|
output = (await shorten(data["notes"], 550))[1]
|
2021-04-23 00:47:58 +02:00
|
|
|
e.description = output["text"]
|
|
|
|
if data["created"]:
|
|
|
|
e.timestamp = datetime.strptime(
|
|
|
|
data["created"], "%Y-%m-%dT%H:%M:%SZ"
|
|
|
|
)
|
|
|
|
|
2021-04-23 23:47:35 +02:00
|
|
|
await ctx.send(f"https://www.peeringdb.com/net/{data['id']}", embed=e)
|