feat(commands:peeringdb|Network): add peeringdb command

This commit is contained in:
Romain J 2021-04-23 00:47:58 +02:00
parent 2afd3af540
commit 2e7934148e
3 changed files with 122 additions and 72 deletions

View file

@ -13,7 +13,7 @@ from ipinfo.exceptions import RequestQuotaExceededError
from ipwhois import Net
from ipwhois.asn import IPASN
from aiocache import cached
from aiocache import cached, Cache
from aiocache.serializers import PickleSerializer
from tuxbot.cogs.Network.functions.exceptions import (
@ -29,7 +29,12 @@ def _(x):
return x
@cached(ttl=15 * 60, serializer=PickleSerializer())
@cached(
ttl=24 * 3600,
serializer=PickleSerializer(),
cache=Cache.MEMORY,
namespace="network",
)
async def get_ip(loop, ip: str, inet: str = "") -> str:
_inet: socket.AddressFamily | int = 0 # pylint: disable=no-member
@ -52,7 +57,12 @@ async def get_ip(loop, ip: str, inet: str = "") -> str:
return await loop.run_in_executor(None, _get_ip, str(ip))
@cached(ttl=15 * 60, serializer=PickleSerializer())
@cached(
ttl=24 * 3600,
serializer=PickleSerializer(),
cache=Cache.MEMORY,
namespace="network",
)
async def get_hostname(loop, ip: str) -> str:
def _get_hostname(_ip: str):
try:
@ -71,11 +81,16 @@ async def get_hostname(loop, ip: str) -> str:
return "N/A"
@cached(ttl=15 * 60, serializer=PickleSerializer())
async def get_ipwhois_result(loop, ip_address: str) -> NoReturn | dict:
def _get_ipwhois_result(_ip_address: str) -> NoReturn | dict:
@cached(
ttl=24 * 3600,
serializer=PickleSerializer(),
cache=Cache.MEMORY,
namespace="network",
)
async def get_ipwhois_result(loop, ip: str) -> NoReturn | dict:
def _get_ipwhois_result(_ip: str) -> NoReturn | dict:
try:
net = Net(ip_address)
net = Net(ip)
obj = IPASN(net)
return obj.lookup()
except ipwhois.exceptions.ASNRegistryError:
@ -90,38 +105,48 @@ async def get_ipwhois_result(loop, ip_address: str) -> NoReturn | dict:
try:
return await asyncio.wait_for(
loop.run_in_executor(None, _get_ipwhois_result, str(ip_address)),
loop.run_in_executor(None, _get_ipwhois_result, str(ip)),
timeout=0.200,
)
except asyncio.exceptions.TimeoutError:
return {}
@cached(ttl=15 * 60, serializer=PickleSerializer())
async def get_ipinfo_result(apikey: str, ip_address: str) -> dict:
@cached(
ttl=24 * 3600,
serializer=PickleSerializer(),
cache=Cache.MEMORY,
namespace="network",
)
async def get_ipinfo_result(apikey: str, ip: str) -> dict:
try:
handler = ipinfo.getHandlerAsync(
apikey, request_options={"timeout": 7}
)
return (await handler.getDetails(ip_address)).all
return (await handler.getDetails(ip)).all
except RequestQuotaExceededError:
return {}
@cached(ttl=15 * 60, serializer=PickleSerializer())
@cached(
ttl=24 * 3600,
serializer=PickleSerializer(),
cache=Cache.MEMORY,
namespace="network",
)
async def get_crimeflare_result(
session: aiohttp.ClientSession, ip_address: str
session: aiohttp.ClientSession, ip: str
) -> Optional[str]:
try:
async with session.post(
"http://www.crimeflare.org:82/cgi-bin/cfsearch.cgi",
data=f"cfS={ip_address}",
data=f"cfS={ip}",
timeout=aiohttp.ClientTimeout(total=15),
) as s:
ip = re.search(r"(\d*\.\d*\.\d*\.\d*)", await s.text())
result = re.search(r"(\d*\.\d*\.\d*\.\d*)", await s.text())
if ip:
return ip.group()
if result:
return result.group()
except (aiohttp.ClientError, asyncio.exceptions.TimeoutError):
pass
@ -161,7 +186,12 @@ def merge_ipinfo_ipwhois(ipinfo_result: dict, ipwhois_result: dict) -> dict:
return output
@cached(ttl=15 * 60, serializer=PickleSerializer())
@cached(
ttl=24 * 3600,
serializer=PickleSerializer(),
cache=Cache.MEMORY,
namespace="network",
)
async def get_pydig_result(
loop, domain: str, query_type: str, dnssec: str | bool
) -> list:
@ -187,48 +217,25 @@ async def get_pydig_result(
return []
@cached(ttl=15 * 60, serializer=PickleSerializer())
async def get_peeringdb_as_set_result(
@cached(
ttl=24 * 3600,
serializer=PickleSerializer(),
cache=Cache.MEMORY,
namespace="network",
)
async def get_peeringdb_net_result(
session: aiohttp.ClientSession, asn: str
) -> Optional[dict]:
) -> dict:
try:
async with session.get(
f"https://www.peeringdb.com/api/as_set/{asn}",
timeout=aiohttp.ClientTimeout(total=5),
f"https://peeringdb.com/api/net?asn={asn}",
timeout=aiohttp.ClientTimeout(total=8),
) as s:
return await s.json()
except (
aiohttp.ClientError,
aiohttp.ContentTypeError,
asyncio.exceptions.TimeoutError,
):
except (asyncio.exceptions.TimeoutError,):
pass
return None
@cached(ttl=15 * 60, serializer=PickleSerializer())
async def get_peeringdb_net_irr_as_set_result(
session: aiohttp.ClientSession, asn: str
) -> Optional[dict]:
try:
async with session.get(
f"https://www.peeringdb.com/api/net?irr_as_set={asn}",
timeout=aiohttp.ClientTimeout(total=10),
) as s:
json = await s.json()
for data in json:
if data["asn"] == int(asn):
return data
except (
aiohttp.ClientError,
aiohttp.ContentTypeError,
asyncio.exceptions.TimeoutError,
):
pass
return None
return {"data": []}
def check_ip_version_or_raise(version: str) -> bool | NoReturn:

View file

@ -1,6 +1,7 @@
import asyncio
import logging
import time
from datetime import datetime
from typing import Optional
import aiohttp
@ -34,7 +35,7 @@ from tuxbot.core.utils.functions.extra import (
ContextPlus,
command_extra,
)
from tuxbot.core.utils.functions.utils import shorten
from tuxbot.core.utils.functions.utils import shorten, str_if_empty
from .config import NetworkConfig
from .functions.utils import (
get_ip,
@ -43,8 +44,7 @@ from .functions.utils import (
get_ipinfo_result,
get_ipwhois_result,
get_pydig_result,
# get_peeringdb_as_set_result,
# get_peeringdb_net_irr_as_set_result,
get_peeringdb_net_result,
merge_ipinfo_ipwhois,
check_query_type_or_raise,
check_ip_version_or_raise,
@ -297,19 +297,58 @@ class Network(commands.Cog):
async def _peeringdb(self, ctx: ContextPlus, asn: ASConverter):
check_asn_or_raise(str(asn))
return await ctx.send("Not implemented yet")
data: dict = (
await get_peeringdb_net_result(self.bot.session, str(asn))
)["data"]
# peeringdb_as_set_result = await get_peeringdb_as_set_result(
# self.bot.session, str(asn)
# )
# peeringdb_net_irr_as_set_result = (
# await get_peeringdb_net_irr_as_set_result(
# self.bot.session, peeringdb_as_set_result["data"][0][asn]
# )
# )["data"]
#
# data = peeringdb_net_irr_as_set_result
#
# self.bot.console.log(data)
#
# await ctx.send("done")
if not data:
return await ctx.send(
_(
"AS{asn} could not be found in PeeringDB's database.",
ctx,
self.bot.config,
).format(asn=asn)
)
data = data[0]
filtered = {
"info_type": "Type",
"info_traffic": "Traffic",
"info_ratio": "Ratio",
"info_prefixes4": "Prefixes IPv4",
"info_prefixes6": "Prefixes IPv6",
}
filtered_link = {
"website": ("Site", "website"),
"looking_glass": ("Looking Glass", "looking_glass"),
"policy_general": ("Peering", "policy_url"),
}
e = discord.Embed(
title=f"{data['name']} ({str_if_empty(data['aka'], f'AS{asn}')})",
color=0x5858D7,
)
for key, name in filtered.items():
e.add_field(
name=name, value=f"```{str_if_empty(data.get(key), 'N/A')}```"
)
for key, names in filtered_link.items():
if data.get(key):
e.add_field(
name=names[0],
value=f"[{str_if_empty(data.get(key), 'N/A')}]"
f"({str_if_empty(data.get(names[1]), 'N/A')})",
)
if data["notes"]:
output = (await shorten(self.bot.session, data["notes"], 550))[1]
e.description = output["text"]
if data["created"]:
e.timestamp = datetime.strptime(
data["created"], "%Y-%m-%dT%H:%M:%SZ"
)
await ctx.send(embed=e)

View file

@ -1,6 +1,6 @@
import asyncio
import functools
from typing import Dict
from typing import Dict, Optional
import aiohttp
from discord.ext import commands
@ -81,3 +81,7 @@ def replace_in_list(value: list, search: str, replace: str) -> list:
clean.append(v)
return clean
def str_if_empty(value: Optional[str], replacement: str) -> str:
return value if value else replacement