add proxies support

This commit is contained in:
norohind 2021-10-28 23:53:39 +03:00
parent 65a03b679f
commit bdcaec9b90
Signed by: norohind
GPG Key ID: 01C3BECC26FB59E1
2 changed files with 67 additions and 4 deletions

View File

@ -22,6 +22,7 @@ TODO:
3. Proper shutdown (done)
4. capi.demb.design special api
5. FID tracking system
6. Log level as argument
=========================DONT RELAY ON news_view=========================
@ -126,8 +127,6 @@ def discover():
failed.append(id_to_try)
tries = tries + 1
time.sleep(3)
def update(squad_id: int = None, amount_to_update: int = 1):
"""
@ -154,7 +153,6 @@ def update(squad_id: int = None, amount_to_update: int = 1):
id_to_update: int = single_squad_to_update[0]
logger.debug(f'Updating {id_to_update} ID')
utils.update_squad_info(id_to_update, db)
time.sleep(3)
if __name__ == '__main__':

View File

@ -1,6 +1,7 @@
import json
import os
import sqlite3
import time
from typing import Union
import requests
@ -15,6 +16,12 @@ BASE_URL = 'https://api.orerve.net/2.0/website/squadron/'
INFO_ENDPOINT = 'info'
NEWS_ENDPOINT = 'news/list'
# proxy: last request time
# ssh -C2 -T -n -N -D 2081 patagonia
PROXIES_DICT: list[dict] = [{'url': 'socks5://127.0.0.1:2080', 'last_req': int(time.time())},
{'url': 'socks5://127.0.0.1:2081', 'last_req': int(time.time())},
{'url': None, 'last_req': int(time.time())}]
class FAPIDownForMaintenance(Exception):
pass
@ -24,6 +31,58 @@ class FAPIUnknownStatusCode(Exception):
pass
def proxied_request(url: str, method: str = 'get', **kwargs) -> requests.Response:
"""Makes request through one of proxies in round robin manner, respects fdev request kd for every proxy
:param url: url to request
:param method: method to use in request
:param kwargs: kwargs
:return: requests.Response object
detect oldest used proxy
detect how many we have to sleep to respect it 3 sec timeout for each proxy
sleep it
perform request with it
update last_req
"""
global PROXIES_DICT
TIME_BETWEEN_REQUESTS: int = 3
selected_proxy = min(PROXIES_DICT, key=lambda x: x['last_req'])
logger.debug(f'Using {selected_proxy["url"]} proxy')
# let's detect how much we have to wait
time_to_sleep: int = int(time.time()) - selected_proxy['last_req']
if 0 < time_to_sleep <= TIME_BETWEEN_REQUESTS:
logger.debug(f'Sleeping {time_to_sleep} s')
time.sleep(time_to_sleep)
if selected_proxy['url'] is None:
proxies: dict = None # noqa
else:
proxies: dict = {'https': selected_proxy['url']}
proxiedFapiRequest: requests.Response = requests.request(
method=method,
url=url,
proxies=proxies,
**kwargs
)
for i, proxy in enumerate(PROXIES_DICT):
if proxy['url'] == selected_proxy["url"]:
PROXIES_DICT[i]['last_req'] = int(time.time())
break
return proxiedFapiRequest
def authed_request(url: str, method: str = 'get', **kwargs) -> requests.Response:
"""Makes request to any url with valid bearer token
@ -31,12 +90,18 @@ def authed_request(url: str, method: str = 'get', **kwargs) -> requests.Response
:param method: method to make request, case insensitive, get by default
:param kwargs: will be passed to requests.request
:return: requests.Response object
"""
"""
Proxies support
We want to respect 3 sec limit for every proxy
"""
bearer: str = _get_bearer()
logger.debug(f'Requesting {method.upper()} {url!r}, kwargs: {kwargs}')
fapiRequest: requests.Response = requests.request(
fapiRequest: requests.Response = proxied_request(
method=method,
url=url,
headers={'Authorization': f'Bearer {bearer}'},