mirror of
https://github.com/norohind/jubilant-system-core.git
synced 2025-04-14 02:27:13 +03:00
Done requests proxying
This commit is contained in:
parent
98918f88fd
commit
a3a87edea1
@ -27,7 +27,7 @@ class BearerManager:
|
||||
except Exception as e:
|
||||
logger.exception(f'Unable to parse capi.demb.design answer\nrequested: {bearer_request.url!r}\n'
|
||||
f'code: {bearer_request.status_code!r}\nresponse: {bearer_request.content!r}', exc_info=e)
|
||||
raise e
|
||||
raise BearerManagerException(e)
|
||||
|
||||
return bearer
|
||||
|
||||
@ -36,4 +36,9 @@ class BearerManager:
|
||||
return requests.get(url=endpoint, headers={'auth': self.demb_capi_auth})
|
||||
|
||||
|
||||
class BearerManagerException(Exception):
|
||||
def __init__(self, parent_exception: Exception):
|
||||
self.parent_exception = parent_exception
|
||||
|
||||
|
||||
bearer_manager = BearerManager(os.environ['DEMB_CAPI_AUTH'], 'https://capi.demb.design')
|
||||
|
@ -1,3 +1,6 @@
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
import requests
|
||||
from .BearerManager import bearer_manager
|
||||
from loguru import logger
|
||||
@ -13,18 +16,91 @@ BASE_URL = 'https://api.orerve.net/2.0/website/squadron/'
|
||||
INFO_ENDPOINT = 'info'
|
||||
NEWS_ENDPOINT = 'news/list'
|
||||
|
||||
try:
|
||||
PROXIES_DICT: list[dict] = json.load(open('proxies.json', 'r'))
|
||||
|
||||
except FileNotFoundError:
|
||||
PROXIES_DICT: list[dict] = [{'url': None, 'last_try': 0}]
|
||||
|
||||
|
||||
TIME_BETWEEN_REQUESTS: float = 3.0
|
||||
if os.getenv("JUBILANT_TIME_BETWEEN_REQUESTS") is not None:
|
||||
try:
|
||||
TIME_BETWEEN_REQUESTS = float(os.getenv("JUBILANT_TIME_BETWEEN_REQUESTS"))
|
||||
|
||||
except TypeError: # env doesn't contain a float
|
||||
pass
|
||||
|
||||
|
||||
def request(url: str, method: str = 'get', **kwargs) -> requests.Response:
|
||||
_request: requests.Response = requests.request(
|
||||
method=method,
|
||||
url=url,
|
||||
headers={'Authorization': f'Bearer {bearer_manager.get_random_bearer()}'},
|
||||
**kwargs
|
||||
)
|
||||
"""Makes request through one of proxies in round-robin manner, respects fdev request kd for every proxy
|
||||
|
||||
if _request.status_code == 418: # FAPI is on maintenance
|
||||
logger.warning(f'{method.upper()} {_request.url} returned 418, content dump:\n{_request.content}')
|
||||
:param url: url to request
|
||||
:param method: method to use in request
|
||||
:param kwargs: kwargs
|
||||
:return: requests.Response object
|
||||
|
||||
raise Exceptions.FAPIDownForMaintenance
|
||||
detect the oldest used proxy
|
||||
if selected proxy is banned, then switch to next
|
||||
detect how many we have to sleep to respect 3 sec timeout for each proxy
|
||||
sleep it
|
||||
perform request with it
|
||||
if request failed -> write last_try for current proxy and try next proxy
|
||||
"""
|
||||
|
||||
return _request
|
||||
global PROXIES_DICT
|
||||
|
||||
while True:
|
||||
|
||||
selected_proxy = min(PROXIES_DICT, key=lambda x: x['last_try'])
|
||||
logger.debug(f'Requesting {method.upper()} {url!r}, kwargs: {kwargs}; Using {selected_proxy["url"]} proxy')
|
||||
|
||||
# let's detect how much we have to wait
|
||||
time_to_sleep: float = (selected_proxy['last_try'] + TIME_BETWEEN_REQUESTS) - time.time()
|
||||
|
||||
if 0 < time_to_sleep <= TIME_BETWEEN_REQUESTS:
|
||||
logger.debug(f'Sleeping {time_to_sleep} s')
|
||||
time.sleep(time_to_sleep)
|
||||
|
||||
proxies: None | dict
|
||||
if selected_proxy['url'] is None:
|
||||
proxies = None
|
||||
|
||||
else:
|
||||
proxies = {'https': selected_proxy['url']}
|
||||
|
||||
try:
|
||||
proxiedFapiRequest: requests.Response = requests.request(
|
||||
method=method,
|
||||
url=url,
|
||||
proxies=proxies,
|
||||
headers={'Authorization': f'Bearer {bearer_manager.get_random_bearer()}'},
|
||||
**kwargs
|
||||
)
|
||||
|
||||
logger.debug(f'Request complete, code {proxiedFapiRequest.status_code!r}, len '
|
||||
f'{len(proxiedFapiRequest.content)}')
|
||||
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
logger.error(f'Proxy {selected_proxy["url"]} is invalid: {str(e.__class__.__name__)}')
|
||||
selected_proxy['last_try'] = time.time() # Anyway set last try to now
|
||||
continue
|
||||
|
||||
selected_proxy['last_try'] = time.time() # Set last try to now
|
||||
|
||||
if proxiedFapiRequest.status_code == 418: # FAPI is on maintenance
|
||||
logger.warning(f'{method.upper()} {proxiedFapiRequest.url} returned 418, content dump:\n{proxiedFapiRequest.content!r}')
|
||||
raise Exceptions.FAPIDownForMaintenance
|
||||
|
||||
if proxiedFapiRequest.status_code == 504:
|
||||
# Rate limited
|
||||
selected_proxy['last_try'] = time.time() # Anyway set last try to now
|
||||
logger.info(f'Rate limited to {url!r} via {selected_proxy["url"]}')
|
||||
continue
|
||||
|
||||
elif proxiedFapiRequest.status_code != 200:
|
||||
logger.warning(f"Request to {method.upper()} {url!r} with kwargs: {kwargs}, using {selected_proxy['url']} "
|
||||
f"proxy ends with {proxiedFapiRequest.status_code} status code, content: "
|
||||
f"{proxiedFapiRequest.content}")
|
||||
|
||||
return proxiedFapiRequest
|
||||
|
Loading…
x
Reference in New Issue
Block a user