Moved _Core/_Conf to core/conf and some UTF8 fixes

This commit is contained in:
AnthorNet 2015-06-05 16:08:32 +02:00
parent 131b36ac3a
commit 2f72fe84fe
15 changed files with 92 additions and 79 deletions

2
.gitignore vendored
View File

@ -52,5 +52,5 @@ docs/_build/
# PyBuilder
target/
*.prices
*.prices

View File

@ -3,7 +3,7 @@ import re
import glob
VERSIONFILE = "src/eddn/_Conf/Version.py"
VERSIONFILE = "src/eddn/conf/Version.py"
verstr = "unknown"
try:
verstrline = open(VERSIONFILE, "rt").read()

View File

@ -1,3 +1,5 @@
# coding: utf8
"""
Contains the necessary ZeroMQ socket and a helper function to publish
market data to the Announcer daemons.
@ -13,8 +15,8 @@ from datetime import datetime
import os
from eddn._Conf.Settings import Settings, loadConfig
from eddn._Core.Validator import Validator, ValidationSeverity
from eddn.conf.Settings import Settings, loadConfig
from eddn.core.Validator import Validator, ValidationSeverity
from gevent import monkey
monkey.patch_all()
@ -29,7 +31,7 @@ sender = context.socket(zmq.PUB)
validator = Validator()
# This import must be done post-monkey-patching!
from eddn._Core.StatsCollector import StatsCollector
from eddn.core.StatsCollector import StatsCollector
statsCollector = StatsCollector()
statsCollector.start()

View File

@ -1,3 +1,5 @@
# coding: utf8
"""
Monitor sit below gateways, or another relay, and simply parse what it receives over SUB.
"""
@ -10,13 +12,13 @@ import datetime
import collections
import zmq.green as zmq
from bottle import get, request, response, run as bottle_run
from eddn._Conf.Settings import Settings, loadConfig
from eddn.conf.Settings import Settings, loadConfig
from gevent import monkey
monkey.patch_all()
if Settings.RELAY_DUPLICATE_MAX_MINUTES:
from eddn._Core.DuplicateMessages import DuplicateMessages
from eddn.core.DuplicateMessages import DuplicateMessages
duplicateMessages = DuplicateMessages()
duplicateMessages.start()
@ -30,138 +32,138 @@ def getTotalSoftwares():
response.set_header("Access-Control-Allow-Origin", "*")
db = sqlite3.connect(Settings.MONITOR_DB)
softwares = collections.OrderedDict()
maxDays = request.GET.get('maxDays', '31').strip()
maxDays = int(maxDays) -1;
query = """SELECT name, SUM(hits) AS total, MAX(dateStats) AS maxDate
FROM softwares
GROUP BY name
HAVING maxDate >= DATE('now', '""" + '-' + str(maxDays) + """ day')
ORDER BY total DESC"""
results = db.execute(query)
for row in results:
softwares[str(row[0])] = str(row[1])
softwares[row[0].encode('utf8')] = str(row[1])
db.close()
return simplejson.dumps(softwares)
@get('/getSoftwares/')
def getSoftwares():
response.set_header("Access-Control-Allow-Origin", "*")
db = sqlite3.connect(Settings.MONITOR_DB)
softwares = collections.OrderedDict()
dateStart = request.GET.get('dateStart', str(date('%Y-%m-%d'))).strip()
dateEnd = request.GET.get('dateEnd', str(date('%Y-%m-%d'))).strip()
query = """SELECT *
FROM softwares
WHERE dateStats BETWEEN ? AND ?
ORDER BY hits DESC, dateStats ASC"""
results = db.execute(query, (dateStart, dateEnd))
for row in results:
if not str(row[2]) in softwares.keys():
softwares[str(row[2])] = collections.OrderedDict()
softwares[str(row[2])][str(row[0])] = str(row[1])
if not str(row[2].encode('utf8')) in softwares.keys():
softwares[row[2].encode('utf8')] = collections.OrderedDict()
softwares[row[2].encode('utf8')][str(row[0])] = str(row[1])
db.close()
return simplejson.dumps(softwares)
@get('/getTotalUploaders/')
def getTotalUploaders():
response.set_header("Access-Control-Allow-Origin", "*")
db = sqlite3.connect(Settings.MONITOR_DB)
uploaders = collections.OrderedDict()
limit = request.GET.get('limit', '20').strip()
query = """SELECT name, SUM(hits) AS total
FROM uploaders
GROUP BY name
ORDER BY total DESC
LIMIT """ + limit
results = db.execute(query)
for row in results:
uploaders[str(row[0])] = str(row[1])
uploaders[row[0].encode('utf8')] = row[1]
db.close()
return simplejson.dumps(uploaders)
@get('/getUploaders/')
def getUploaders():
response.set_header("Access-Control-Allow-Origin", "*")
db = sqlite3.connect(Settings.MONITOR_DB)
uploaders = collections.OrderedDict()
dateStart = request.GET.get('dateStart', str(date('%Y-%m-%d'))).strip()
dateEnd = request.GET.get('dateEnd', str(date('%Y-%m-%d'))).strip()
query = """SELECT *
FROM uploaders
WHERE dateStats BETWEEN ? AND ?
ORDER BY hits DESC, dateStats ASC"""
results = db.execute(query, (dateStart, dateEnd))
for row in results:
if not str(row[2]) in uploaders.keys():
uploaders[str(row[2])] = collections.OrderedDict()
uploaders[str(row[2])][str(row[0])] = str(row[1])
if not row[2].encode('utf8') in uploaders.keys():
uploaders[row[2].encode('utf8')] = collections.OrderedDict()
uploaders[row[2]][row[0].encode('utf8')] = row[1]
db.close()
return simplejson.dumps(uploaders)
@get('/getTotalSchemas/')
def getTotalSchemas():
response.set_header("Access-Control-Allow-Origin", "*")
db = sqlite3.connect(Settings.MONITOR_DB)
schemas = collections.OrderedDict()
query = """SELECT name, SUM(hits) AS total
FROM schemas
GROUP BY name
ORDER BY total DESC"""
results = db.execute(query)
for row in results:
schemas[str(row[0])] = str(row[1])
db.close()
return simplejson.dumps(schemas)
@get('/getSchemas/')
def getSchemas():
response.set_header("Access-Control-Allow-Origin", "*")
db = sqlite3.connect(Settings.MONITOR_DB)
schemas = collections.OrderedDict()
dateStart = request.GET.get('dateStart', str(date('%Y-%m-%d'))).strip()
dateEnd = request.GET.get('dateEnd', str(date('%Y-%m-%d'))).strip()
query = """SELECT *
FROM schemas
WHERE dateStats BETWEEN ? AND ?
ORDER BY hits DESC, dateStats ASC"""
results = db.execute(query, (dateStart, dateEnd))
for row in results:
if not str(row[2]) in schemas.keys():
schemas[str(row[2])] = collections.OrderedDict()
schemas[str(row[2])][str(row[0])] = str(row[1])
db.close()
return simplejson.dumps(schemas)
@ -170,66 +172,69 @@ class Monitor(Thread):
def run(self):
context = zmq.Context()
receiver = context.socket(zmq.SUB)
receiver.setsockopt(zmq.SUBSCRIBE, '')
for binding in Settings.MONITOR_RECEIVER_BINDINGS:
receiver.connect(binding)
def monitor_worker(message):
db = sqlite3.connect(Settings.MONITOR_DB)
# Separate topic from message
message = message.split(' |-| ')
# Handle gateway not sending topic
if len(message) > 1:
message = message[1]
else:
message = message[0]
if Settings.RELAY_DUPLICATE_MAX_MINUTES:
if duplicateMessages.isDuplicated(message):
schemaID = 'DUPLICATE MESSAGE'
c = db.cursor()
c.execute('UPDATE schemas SET hits = hits + 1 WHERE `name` = ? AND `dateStats` = DATE("now", "utc")', (schemaID, ))
c.execute('INSERT OR IGNORE INTO schemas (name, dateStats) VALUES (?, DATE("now", "utc"))', (schemaID, ))
db.commit()
return
if Settings.MONITOR_DECOMPRESS_MESSAGES:
message = zlib.decompress(message)
json = simplejson.loads(message)
# Update software count
softwareID = json['header']['softwareName'] + ' | ' + json['header']['softwareVersion']
softwareID = json['header']['softwareName'].encode('utf8') + ' | ' + json['header']['softwareVersion'].encode('utf8')
c = db.cursor()
c.execute('UPDATE softwares SET hits = hits + 1 WHERE `name` = ? AND `dateStats` = DATE("now", "utc")', (softwareID, ))
c.execute('INSERT OR IGNORE INTO softwares (name, dateStats) VALUES (?, DATE("now", "utc"))', (softwareID, ))
db.commit()
# Update uploader count
uploaderID = json['header']['uploaderID']
uploaderID = json['header']['uploaderID'].encode('utf8')
if uploaderID: # Don't get empty uploaderID
c = db.cursor()
c.execute('UPDATE uploaders SET hits = hits + 1 WHERE `name` = ? AND `dateStats` = DATE("now", "utc")', (uploaderID, ))
c.execute('INSERT OR IGNORE INTO uploaders (name, dateStats) VALUES (?, DATE("now", "utc"))', (uploaderID, ))
db.commit()
# Update schemas count
schemaID = json['$schemaRef']
c = db.cursor()
c.execute('UPDATE schemas SET hits = hits + 1 WHERE `name` = ? AND `dateStats` = DATE("now", "utc")', (schemaID, ))
c.execute('INSERT OR IGNORE INTO schemas (name, dateStats) VALUES (?, DATE("now", "utc"))', (schemaID, ))
db.commit()
db.close()
while True:

View File

@ -1,3 +1,5 @@
# coding: utf8
"""
Relays sit below an announcer, or another relay, and simply repeat what
they receive over PUB/SUB.
@ -13,18 +15,18 @@ import gevent
import simplejson
import zmq.green as zmq
from bottle import get, response, run as bottle_run
from eddn._Conf.Settings import Settings, loadConfig
from eddn.conf.Settings import Settings, loadConfig
from gevent import monkey
monkey.patch_all()
from eddn._Core.StatsCollector import StatsCollector
from eddn.core.StatsCollector import StatsCollector
statsCollector = StatsCollector()
statsCollector.start()
if Settings.RELAY_DUPLICATE_MAX_MINUTES:
from eddn._Core.DuplicateMessages import DuplicateMessages
from eddn.core.DuplicateMessages import DuplicateMessages
duplicateMessages = DuplicateMessages()
duplicateMessages.start()

View File

@ -1,12 +1,8 @@
'''
Created on 15 Nov 2014
@author: james
'''
# coding: utf8
import argparse
import simplejson
from eddn._Conf.Version import __version__ as version
from eddn.conf.Version import __version__ as version
class _Settings(object):

View File

@ -1 +1,3 @@
# coding: utf8
__version__ = "0.4"

View File

@ -1,10 +1,12 @@
# coding: utf8
from datetime import datetime, timedelta
from threading import Lock, Thread
from time import sleep
import hashlib
import zlib
import simplejson
from eddn._Conf.Settings import Settings, loadConfig
from eddn.conf.Settings import Settings, loadConfig
class DuplicateMessages(Thread):

View File

@ -1,3 +1,5 @@
# coding: utf8
from collections import deque
from datetime import datetime
from itertools import islice

View File

@ -1,3 +1,5 @@
# coding: utf8
import simplejson
from enum import IntEnum
from jsonschema import validate as jsValidate, ValidationError