exponential backoff to avoid ratelimiting

master
Nicholas Hope 2023-01-04 22:05:29 -05:00
parent f1caba62f7
commit 1d886763f7
1 changed files with 27 additions and 11 deletions

View File

@ -6,6 +6,7 @@ from os import chdir
from datetime import datetime, timezone from datetime import datetime, timezone
from dateutil import tz from dateutil import tz
import pickle import pickle
from time import sleep
class IPCache: class IPCache:
picklefile_name = 'cached_ips.pkl' picklefile_name = 'cached_ips.pkl'
@ -17,16 +18,28 @@ class IPCache:
def get(self, ip, /): def get(self, ip, /):
if ip in self.cache: if ip in self.cache:
return self.cache[ip] return self.cache[ip]
print(f'{ip} not in cache')
addr = f'http://ip-api.com/json/{ip}' addr = f'http://ip-api.com/json/{ip}'
response = requests.get(addr) attempts = 1
if not response.ok: max_attempts = 5
raise RuntimeError(f'request for ip failed with {response.status_code}') while attempts <= max_attempts:
response = requests.get(addr)
sleep(2 ** attempts)
if not response.ok:
print(f'request for {ip} failed with {response.status_code}')
attempts += 1
continue
break
else:
raise RuntimeError(f'critical failure (> 5 retries)')
resulting_dict = eval(response.content) resulting_dict = eval(response.content)
if resulting_dict['status'] == 'fail': if resulting_dict['status'] == 'fail':
raise RuntimeError(f'ip was invalid') raise RuntimeError(f'ip was invalid')
# the given timezone is like, 'Australia/Sydney'. we need to convert to # the given timezone is like, 'Australia/Sydney'. we need to convert to
# a datetime.timezone type # a datetime.timezone type
timezone_str = resulting_dict['timezone'] timezone_str = resulting_dict['timezone']
if timezone_str == 'Europe/Kyiv':
timezone_str = 'Europe/Kiev'
tzfile = tz.gettz(timezone_str) tzfile = tz.gettz(timezone_str)
as_timedelta = tzfile.utcoffset(datetime.utcnow()) as_timedelta = tzfile.utcoffset(datetime.utcnow())
as_timezone_type = timezone(as_timedelta) as_timezone_type = timezone(as_timedelta)
@ -135,15 +148,17 @@ def main(args: list) -> int:
outfile = 'analysis.csv' outfile = 'analysis.csv'
start_dir = Path('.').resolve() start_dir = Path('.').resolve()
logdir = args[1]
with Path(logdir) as p:
logdir = p.resolve()
with open(outfile, 'w') as f: with open(outfile, 'w') as f:
for logdir in args[1:]: chdir(logdir)
chdir(logdir) serverdir = Path('.')
serverdir = Path('.') for subdir in serverdir.iterdir():
for subdir in serverdir.iterdir(): csv_lines = analyze_server(subdir)
csv_lines = analyze_server(subdir) f.write(csv_lines)
f.write(csv_lines) chdir(start_dir)
chdir(start_dir)
return 0 return 0
@ -151,4 +166,5 @@ log_date_format = r'%y/%m/%d %H:%M:%S %z'
if __name__ == '__main__': if __name__ == '__main__':
with IPCache() as ip_cache: with IPCache() as ip_cache:
sys.exit(main(sys.argv)) code = main(sys.argv)
sys.exit(code)