2021-04-24 22:34:53 +02:00
|
|
|
import io
|
2021-01-26 10:21:39 +01:00
|
|
|
import socket
|
2021-05-16 23:21:27 +02:00
|
|
|
from typing import NoReturn, Optional, Union
|
2021-01-26 10:21:39 +01:00
|
|
|
|
2021-04-21 17:59:43 +02:00
|
|
|
import asyncio
|
|
|
|
import re
|
2021-01-26 10:21:39 +01:00
|
|
|
import ipinfo
|
|
|
|
import ipwhois
|
2021-01-26 17:11:30 +01:00
|
|
|
import pydig
|
2021-04-21 17:59:43 +02:00
|
|
|
import aiohttp
|
|
|
|
|
2021-01-26 10:21:39 +01:00
|
|
|
from ipinfo.exceptions import RequestQuotaExceededError
|
|
|
|
|
|
|
|
from ipwhois import Net
|
|
|
|
from ipwhois.asn import IPASN
|
|
|
|
|
2021-04-23 00:47:58 +02:00
|
|
|
from aiocache import cached, Cache
|
2021-04-22 18:11:55 +02:00
|
|
|
from aiocache.serializers import PickleSerializer
|
|
|
|
|
2021-03-02 19:00:08 +01:00
|
|
|
from tuxbot.cogs.Network.functions.exceptions import (
|
|
|
|
VersionNotFound,
|
|
|
|
RFC18,
|
|
|
|
InvalidIp,
|
|
|
|
InvalidQueryType,
|
2021-04-22 18:11:55 +02:00
|
|
|
InvalidAsn,
|
2021-03-02 19:00:08 +01:00
|
|
|
)
|
2021-01-26 10:21:39 +01:00
|
|
|
|
|
|
|
|
|
|
|
def _(x):
|
|
|
|
return x
|
|
|
|
|
|
|
|
|
2021-04-23 00:47:58 +02:00
|
|
|
@cached(
|
|
|
|
ttl=24 * 3600,
|
|
|
|
serializer=PickleSerializer(),
|
|
|
|
cache=Cache.MEMORY,
|
|
|
|
namespace="network",
|
|
|
|
)
|
2021-04-24 22:34:53 +02:00
|
|
|
async def get_ip(loop, ip: str, inet: Optional[dict]) -> str:
|
2021-05-16 23:21:27 +02:00
|
|
|
_inet: Union[socket.AddressFamily, int] = 0 # pylint: disable=no-member
|
2021-03-31 18:08:41 +02:00
|
|
|
|
2021-04-24 22:34:53 +02:00
|
|
|
if inet:
|
|
|
|
if inet["inet"] == "6":
|
|
|
|
_inet = socket.AF_INET6
|
|
|
|
elif inet["inet"] == "4":
|
|
|
|
_inet = socket.AF_INET
|
2021-01-26 10:21:39 +01:00
|
|
|
|
2021-04-22 18:11:55 +02:00
|
|
|
def _get_ip(_ip: str):
|
|
|
|
try:
|
|
|
|
return socket.getaddrinfo(_ip, None, _inet)[1][4][0]
|
|
|
|
except socket.gaierror as e:
|
|
|
|
raise VersionNotFound(
|
|
|
|
_(
|
|
|
|
"Unable to collect information on this in the given "
|
|
|
|
"version",
|
|
|
|
)
|
|
|
|
) from e
|
2021-01-26 10:21:39 +01:00
|
|
|
|
2021-04-22 18:11:55 +02:00
|
|
|
return await loop.run_in_executor(None, _get_ip, str(ip))
|
2021-01-26 10:21:39 +01:00
|
|
|
|
2021-04-22 18:11:55 +02:00
|
|
|
|
2021-04-23 00:47:58 +02:00
|
|
|
@cached(
|
|
|
|
ttl=24 * 3600,
|
|
|
|
serializer=PickleSerializer(),
|
|
|
|
cache=Cache.MEMORY,
|
|
|
|
namespace="network",
|
|
|
|
)
|
2021-04-22 00:16:37 +02:00
|
|
|
async def get_hostname(loop, ip: str) -> str:
|
|
|
|
def _get_hostname(_ip: str):
|
|
|
|
try:
|
|
|
|
return socket.gethostbyaddr(ip)[0]
|
|
|
|
except socket.herror:
|
|
|
|
return "N/A"
|
|
|
|
|
2021-01-26 10:21:39 +01:00
|
|
|
try:
|
2021-04-22 00:16:37 +02:00
|
|
|
return await asyncio.wait_for(
|
|
|
|
loop.run_in_executor(None, _get_hostname, str(ip)),
|
|
|
|
timeout=0.200,
|
|
|
|
)
|
|
|
|
# assuming that if the hostname isn't retrieved in first .3sec,
|
|
|
|
# it doesn't exists
|
|
|
|
except asyncio.exceptions.TimeoutError:
|
2021-01-26 10:21:39 +01:00
|
|
|
return "N/A"
|
|
|
|
|
|
|
|
|
2021-04-23 00:47:58 +02:00
|
|
|
@cached(
|
|
|
|
ttl=24 * 3600,
|
|
|
|
serializer=PickleSerializer(),
|
|
|
|
cache=Cache.MEMORY,
|
|
|
|
namespace="network",
|
|
|
|
)
|
2021-05-16 23:21:27 +02:00
|
|
|
async def get_ipwhois_result(loop, ip: str) -> Union[NoReturn, dict]:
|
|
|
|
def _get_ipwhois_result(_ip: str) -> Union[NoReturn, dict]:
|
2021-04-22 00:16:37 +02:00
|
|
|
try:
|
2021-04-23 00:47:58 +02:00
|
|
|
net = Net(ip)
|
2021-04-22 00:16:37 +02:00
|
|
|
obj = IPASN(net)
|
|
|
|
return obj.lookup()
|
|
|
|
except ipwhois.exceptions.ASNRegistryError:
|
|
|
|
return {}
|
|
|
|
except ipwhois.exceptions.IPDefinedError as e:
|
|
|
|
raise RFC18(
|
|
|
|
_(
|
|
|
|
"IP address {ip_address} is already defined as Private-Use"
|
|
|
|
" Networks via RFC 1918."
|
|
|
|
)
|
|
|
|
) from e
|
|
|
|
|
2021-01-26 10:21:39 +01:00
|
|
|
try:
|
2021-04-22 00:16:37 +02:00
|
|
|
return await asyncio.wait_for(
|
2021-04-23 00:47:58 +02:00
|
|
|
loop.run_in_executor(None, _get_ipwhois_result, str(ip)),
|
2021-04-22 00:16:37 +02:00
|
|
|
timeout=0.200,
|
|
|
|
)
|
|
|
|
except asyncio.exceptions.TimeoutError:
|
2021-01-26 10:21:39 +01:00
|
|
|
return {}
|
|
|
|
|
|
|
|
|
2021-04-23 00:47:58 +02:00
|
|
|
@cached(
|
|
|
|
ttl=24 * 3600,
|
|
|
|
serializer=PickleSerializer(),
|
|
|
|
cache=Cache.MEMORY,
|
|
|
|
namespace="network",
|
|
|
|
)
|
2021-05-16 17:07:26 +02:00
|
|
|
async def get_ipinfo_result(loop, apikey: str, ip: str) -> dict:
|
2021-05-16 23:21:27 +02:00
|
|
|
def _get_ipinfo_result(_ip: str) -> Union[NoReturn, dict]:
|
2021-05-16 17:07:26 +02:00
|
|
|
"""
|
|
|
|
Q. Why no getHandlerAsync ?
|
|
|
|
A. Use of this return "Unclosed client session" and "Unclosed connector"
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
handler = ipinfo.getHandler(apikey, request_options={"timeout": 7})
|
|
|
|
return (handler.getDetails(ip)).all
|
|
|
|
except RequestQuotaExceededError:
|
|
|
|
return {}
|
|
|
|
|
2021-01-26 10:21:39 +01:00
|
|
|
try:
|
2021-05-16 17:07:26 +02:00
|
|
|
return await asyncio.wait_for(
|
|
|
|
loop.run_in_executor(None, _get_ipinfo_result, str(ip)),
|
|
|
|
timeout=8,
|
2021-04-20 15:42:59 +02:00
|
|
|
)
|
2021-05-16 17:07:26 +02:00
|
|
|
except asyncio.exceptions.TimeoutError:
|
2021-01-26 10:21:39 +01:00
|
|
|
return {}
|
2021-01-26 15:43:16 +01:00
|
|
|
|
|
|
|
|
2021-04-23 00:47:58 +02:00
|
|
|
@cached(
|
|
|
|
ttl=24 * 3600,
|
|
|
|
serializer=PickleSerializer(),
|
|
|
|
cache=Cache.MEMORY,
|
|
|
|
namespace="network",
|
|
|
|
)
|
2021-04-23 00:52:23 +02:00
|
|
|
async def get_crimeflare_result(ip: str) -> Optional[str]:
|
2021-04-21 17:59:43 +02:00
|
|
|
try:
|
2021-04-23 00:52:23 +02:00
|
|
|
async with aiohttp.ClientSession() as cs:
|
|
|
|
async with cs.post(
|
|
|
|
"http://www.crimeflare.org:82/cgi-bin/cfsearch.cgi",
|
|
|
|
data=f"cfS={ip}",
|
|
|
|
timeout=aiohttp.ClientTimeout(total=21),
|
|
|
|
) as s:
|
|
|
|
result = re.search(r"(\d*\.\d*\.\d*\.\d*)", await s.text())
|
|
|
|
|
|
|
|
if result:
|
|
|
|
return result.group()
|
2021-04-21 17:59:43 +02:00
|
|
|
except (aiohttp.ClientError, asyncio.exceptions.TimeoutError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2021-01-26 15:43:16 +01:00
|
|
|
def merge_ipinfo_ipwhois(ipinfo_result: dict, ipwhois_result: dict) -> dict:
|
2021-04-24 22:34:53 +02:00
|
|
|
output = {
|
|
|
|
"belongs": "N/A",
|
|
|
|
"rir": "N/A",
|
|
|
|
"region": "N/A",
|
|
|
|
"flag": "N/A",
|
|
|
|
"map": "N/A",
|
|
|
|
}
|
2021-01-26 15:43:16 +01:00
|
|
|
|
|
|
|
if ipinfo_result:
|
2021-02-11 18:11:19 +01:00
|
|
|
org = ipinfo_result.get("org", "N/A")
|
|
|
|
asn = org.split()[0] if len(org.split()) > 1 else "N/A"
|
2021-01-26 15:43:16 +01:00
|
|
|
|
|
|
|
output["belongs"] = f"[{org}](https://bgp.he.net/{asn})"
|
|
|
|
output["rir"] = f"```{ipwhois_result.get('asn_registry', 'N/A')}```"
|
|
|
|
output["region"] = (
|
|
|
|
f"```{ipinfo_result.get('city', 'N/A')} - "
|
|
|
|
f"{ipinfo_result.get('region', 'N/A')} "
|
|
|
|
f"({ipinfo_result.get('country', 'N/A')})```"
|
|
|
|
)
|
2021-04-24 22:34:53 +02:00
|
|
|
output[
|
|
|
|
"flag"
|
|
|
|
] = f"https://flagcdn.com/144x108/{ipinfo_result['country'].lower()}.png"
|
|
|
|
output["map"] = ipinfo_result["loc"]
|
2021-01-26 15:43:16 +01:00
|
|
|
elif ipwhois_result:
|
|
|
|
org = ipwhois_result.get("asn_description", "N/A")
|
|
|
|
asn = ipwhois_result.get("asn", "N/A")
|
|
|
|
asn_country = ipwhois_result.get("asn_country_code", "N/A")
|
|
|
|
|
|
|
|
output["belongs"] = f"{org} ([AS{asn}](https://bgp.he.net/{asn}))"
|
|
|
|
output["rir"] = f"```{ipwhois_result['asn_registry']}```"
|
|
|
|
output["region"] = f"```{asn_country}```"
|
|
|
|
output[
|
|
|
|
"flag"
|
2021-04-24 22:34:53 +02:00
|
|
|
] = f"https://flagcdn.com/144x108/{asn_country.lower()}.png"
|
2021-01-26 15:43:16 +01:00
|
|
|
|
|
|
|
return output
|
2021-01-26 17:11:30 +01:00
|
|
|
|
|
|
|
|
2021-04-24 22:34:53 +02:00
|
|
|
@cached(
|
|
|
|
ttl=24 * 3600,
|
|
|
|
serializer=PickleSerializer(),
|
|
|
|
cache=Cache.MEMORY,
|
|
|
|
namespace="network",
|
|
|
|
)
|
|
|
|
async def get_map_bytes(apikey: str, latlon: str) -> Optional[io.BytesIO]:
|
|
|
|
if latlon == "N/A":
|
|
|
|
return None
|
|
|
|
|
|
|
|
url = (
|
|
|
|
"https://maps.geoapify.com/v1/staticmap"
|
|
|
|
"?style=osm-carto"
|
2021-05-16 15:46:52 +02:00
|
|
|
"&width=333"
|
|
|
|
"&height=250"
|
2021-04-24 22:34:53 +02:00
|
|
|
"¢er=lonlat:{lonlat}"
|
|
|
|
"&zoom=12"
|
|
|
|
"&marker=lonlat:{lonlat};color:%23ff0000;size:small"
|
|
|
|
"&apiKey={apikey}"
|
|
|
|
)
|
|
|
|
|
|
|
|
lonlat = ",".join(latlon.split(",")[::-1])
|
|
|
|
|
|
|
|
url = url.format(lonlat=lonlat, apikey=apikey)
|
|
|
|
|
2021-05-16 17:28:49 +02:00
|
|
|
try:
|
2021-05-16 18:03:33 +02:00
|
|
|
async with aiohttp.ClientSession(
|
|
|
|
timeout=aiohttp.ClientTimeout(total=5)
|
|
|
|
) as cs:
|
2021-05-16 17:28:49 +02:00
|
|
|
async with cs.get(url) as s:
|
|
|
|
if s.status != 200:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return io.BytesIO(await s.read())
|
|
|
|
except asyncio.exceptions.TimeoutError:
|
|
|
|
from ..images.load_fail import value
|
2021-04-24 22:34:53 +02:00
|
|
|
|
2021-05-16 17:28:49 +02:00
|
|
|
return io.BytesIO(value)
|
2021-04-24 22:34:53 +02:00
|
|
|
|
|
|
|
|
2021-04-23 00:47:58 +02:00
|
|
|
@cached(
|
|
|
|
ttl=24 * 3600,
|
|
|
|
serializer=PickleSerializer(),
|
|
|
|
cache=Cache.MEMORY,
|
|
|
|
namespace="network",
|
|
|
|
)
|
2021-03-02 19:00:08 +01:00
|
|
|
async def get_pydig_result(
|
2021-05-16 23:21:27 +02:00
|
|
|
loop, domain: str, query_type: str, dnssec: Union[str, bool]
|
2021-01-26 17:11:30 +01:00
|
|
|
) -> list:
|
|
|
|
additional_args = [] if dnssec is False else ["+dnssec"]
|
|
|
|
|
2021-05-16 23:21:27 +02:00
|
|
|
def _get_pydig_result(_domain: str) -> Union[NoReturn, dict]:
|
2021-04-22 18:11:55 +02:00
|
|
|
resolver = pydig.Resolver(
|
|
|
|
nameservers=[
|
|
|
|
"80.67.169.40",
|
|
|
|
"80.67.169.12",
|
|
|
|
],
|
|
|
|
additional_args=additional_args,
|
|
|
|
)
|
|
|
|
|
|
|
|
return resolver.query(_domain, query_type)
|
|
|
|
|
|
|
|
try:
|
|
|
|
return await asyncio.wait_for(
|
|
|
|
loop.run_in_executor(None, _get_pydig_result, str(domain)),
|
|
|
|
timeout=0.500,
|
|
|
|
)
|
|
|
|
except asyncio.exceptions.TimeoutError:
|
|
|
|
return []
|
|
|
|
|
2021-01-26 17:11:30 +01:00
|
|
|
|
2021-05-16 23:21:27 +02:00
|
|
|
def check_ip_version_or_raise(
|
|
|
|
version: Optional[dict],
|
|
|
|
) -> Union[bool, NoReturn]:
|
2021-04-24 22:34:53 +02:00
|
|
|
if version is None or version["inet"] in ("4", "6", ""):
|
2021-03-02 19:00:08 +01:00
|
|
|
return True
|
|
|
|
|
|
|
|
raise InvalidIp(_("Invalid ip version"))
|
|
|
|
|
|
|
|
|
2021-05-16 23:21:27 +02:00
|
|
|
def check_query_type_or_raise(query_type: str) -> Union[bool, NoReturn]:
|
2021-04-21 18:28:09 +02:00
|
|
|
query_types = (
|
2021-03-02 19:00:08 +01:00
|
|
|
"a",
|
|
|
|
"aaaa",
|
|
|
|
"cname",
|
|
|
|
"ns",
|
|
|
|
"ds",
|
|
|
|
"dnskey",
|
|
|
|
"soa",
|
|
|
|
"txt",
|
|
|
|
"ptr",
|
|
|
|
"mx",
|
2021-04-21 18:28:09 +02:00
|
|
|
)
|
2021-03-02 19:00:08 +01:00
|
|
|
|
|
|
|
if query_type in query_types:
|
|
|
|
return True
|
|
|
|
|
|
|
|
raise InvalidQueryType(
|
|
|
|
_(
|
|
|
|
"Supported queries : A, AAAA, CNAME, NS, DS, DNSKEY, SOA, TXT, PTR, MX"
|
|
|
|
)
|
|
|
|
)
|
2021-04-22 18:11:55 +02:00
|
|
|
|
|
|
|
|
2021-05-16 23:21:27 +02:00
|
|
|
def check_asn_or_raise(asn: str) -> Union[bool, NoReturn]:
|
2021-04-22 18:11:55 +02:00
|
|
|
if asn.isdigit() and int(asn) < 4_294_967_295:
|
|
|
|
return True
|
|
|
|
|
|
|
|
raise InvalidAsn(_("Invalid ASN provided"))
|