DuplicateMessages: Full flake8 and mypy pass

This commit is contained in:
Athanasius 2021-11-04 16:28:45 +00:00
parent 2d896ce37f
commit f81fdf91ee

View File

@ -6,6 +6,7 @@ import re
from datetime import datetime, timedelta
from threading import Lock, Thread
from time import sleep
from typing import Dict
import simplejson
@ -17,15 +18,15 @@ class DuplicateMessages(Thread):
max_minutes = Settings.RELAY_DUPLICATE_MAX_MINUTES
caches = {}
caches: Dict = {}
lock = Lock()
def __init__(self):
def __init__(self) -> None:
super(DuplicateMessages, self).__init__()
self.daemon = True
def run(self):
def run(self) -> None:
"""Expire duplicate messages."""
while True:
sleep(60)
@ -36,35 +37,37 @@ class DuplicateMessages(Thread):
if self.caches[key] + timedelta(minutes=self.max_minutes) < max_time:
del self.caches[key]
def is_duplicated(self, json):
def is_duplicated(self, json: Dict) -> bool:
"""Detect if the given message is in the duplicates cache."""
with self.lock:
# Test messages are never duplicate, would be a pain to wait for another test :D
if re.search('test', json['$schemaRef'], re.I):
return False
# Shallow copy, minus headers
jsonTest = {
json_test = {
'$schemaRef': json['$schemaRef'],
'message': dict(json['message']),
}
# Remove timestamp (Mainly to avoid multiple scan messages and faction influences)
jsonTest['message'].pop('timestamp')
json_test['message'].pop('timestamp')
# Convert journal starPos to avoid software modification in dupe messages
if 'StarPos' in jsonTest['message']:
jsonTest['message']['StarPos'] = [int(round(x * 32)) for x in jsonTest['message']['StarPos']]
if 'StarPos' in json_test['message']:
json_test['message']['StarPos'] = [int(round(x * 32)) for x in json_test['message']['StarPos']]
# Prevent journal Docked event with small difference in distance from start
if 'DistFromStarLS' in jsonTest['message']:
jsonTest['message']['DistFromStarLS'] = int(jsonTest['message']['DistFromStarLS'] + 0.5)
if 'DistFromStarLS' in json_test['message']:
json_test['message']['DistFromStarLS'] = int(json_test['message']['DistFromStarLS'] + 0.5)
# Remove journal ScanType and DistanceFromArrivalLS (Avoid duplicate scan messages after SAAScanComplete)
jsonTest['message'].pop('ScanType', None)
jsonTest['message'].pop('DistanceFromArrivalLS', None)
json_test['message'].pop('ScanType', None)
json_test['message'].pop('DistanceFromArrivalLS', None)
message = simplejson.dumps(jsonTest, sort_keys=True) # Ensure most duplicate messages will get the same key
key = hashlib.sha256(message).hexdigest()
message = simplejson.dumps(json_test, sort_keys=True) # Ensure most duplicate messages will get the same
# key
key = hashlib.sha256(message.encode('utf8')).hexdigest()
if key not in self.caches:
self.caches[key] = datetime.utcnow()