1from urllib.parse import urlparse
2import aiohttp
3
4
5# this is a crude/partial filter that looks at HTTPS URLs and checks if they seem "safe" for server-side requests (SSRF). This is only a partial mitigation, the actual HTTP client also needs to prevent other attacks and behaviors.
6# this isn't a fully complete or secure implementation
7def is_safe_url(url: str) -> bool:
8 parts = urlparse(url)
9 if not (
10 parts.scheme == "https"
11 and parts.hostname is not None
12 and parts.hostname == parts.netloc
13 and parts.username is None
14 and parts.password is None
15 and parts.port is None
16 ):
17 return False
18
19 segments = parts.hostname.split(".")
20 if not (
21 len(segments) >= 2
22 and segments[-1] not in ["local", "arpa", "internal", "localhost"]
23 ):
24 return False
25
26 if segments[-1].isdigit():
27 return False
28
29 return True
30
31
32class HardenedHttp:
33 def get_session(self) -> aiohttp.ClientSession:
34 return aiohttp.ClientSession(
35 timeout=aiohttp.ClientTimeout(20, connect=5),
36 headers={
37 "User-Agent": "ligo.at/0",
38 },
39 )
40
41
42hardened_http = HardenedHttp()