From 7a50cbc45f923bace1bb947310d14844d72b7e98 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 23 Sep 2021 19:45:29 +0000 Subject: [PATCH 01/15] schemas: codexentry: Remove extraneous comma --- schemas/codexentry-v1.0.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schemas/codexentry-v1.0.json b/schemas/codexentry-v1.0.json index 0a100a4..98a4637 100644 --- a/schemas/codexentry-v1.0.json +++ b/schemas/codexentry-v1.0.json @@ -95,7 +95,7 @@ "minLength" : 1 }, "VoucherAmount": { - "type" : "integer", + "type" : "integer" }, "Traits": { "type" : "array", From 29e5bba91b0491e5d9018fd5cd177addf07fd079 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 23 Sep 2021 19:45:50 +0000 Subject: [PATCH 02/15] schemas: scanbarycentre: Remove extraneouis ': "BodyID"' --- schemas/scanbarycentre-v1.0.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schemas/scanbarycentre-v1.0.json b/schemas/scanbarycentre-v1.0.json index fc6e3a7..960d50d 100644 --- a/schemas/scanbarycentre-v1.0.json +++ b/schemas/scanbarycentre-v1.0.json @@ -35,7 +35,7 @@ "additionalProperties" : false, "required" : [ "timestamp", "event", "StarSystem", "StarPos", "SystemAddress", "BodyID" ], "properties" : { - "timestamp": , "BodyID"{ + "timestamp":{ "type" : "string", "format" : "date-time" }, From 9703b1e4757fba1d4b763f7a5391a4cc2ff546f1 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 26 Sep 2021 13:58:49 +0000 Subject: [PATCH 03/15] schemas: Correct 'id' in navbeaconscan-v1.0.json It still had 'fssdiscoveryscan' in there, presumably from a copying. --- schemas/navbeaconscan-v1.0.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schemas/navbeaconscan-v1.0.json b/schemas/navbeaconscan-v1.0.json index a2cb398..64d55bf 100644 --- a/schemas/navbeaconscan-v1.0.json +++ b/schemas/navbeaconscan-v1.0.json @@ -1,6 +1,6 @@ { "$schema" : "http://json-schema.org/draft-04/schema#", - "id" : "https://eddn.edcd.io/schemas/fssdiscoveryscan/1#", + "id" : "https://eddn.edcd.io/schemas/navbeaconscan/1#", "type" : "object", "additionalProperties" : false, "required": [ "$schemaRef", "header", "message" ], From d8d16afbc41e458480b3f67e52d64edabe625cf9 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 26 Sep 2021 15:29:02 +0000 Subject: [PATCH 04/15] schemas: Fix event name in navroute schema to be 'NavRoute' This was/is documented as 'Route', but it has always actually been 'NavRoute' in the produced log files, separate filename, and contents of that separate file. --- schemas/navroute-v1.0.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schemas/navroute-v1.0.json b/schemas/navroute-v1.0.json index 98a4ade..321be80 100644 --- a/schemas/navroute-v1.0.json +++ b/schemas/navroute-v1.0.json @@ -39,7 +39,7 @@ "format" : "date-time" }, "event": { - "enum" : [ "Route" ] + "enum" : [ "NavRoute" ] }, "Route": { "type" : "array", From fa118d01832234294bae7423267f18fa59de213c Mon Sep 17 00:00:00 2001 From: Athanasius Date: Tue, 21 Dec 2021 09:37:08 +0000 Subject: [PATCH 05/15] schemas/codexentry: Clarify that what unset `status_body_name` means --- schemas/codexentry-README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schemas/codexentry-README.md b/schemas/codexentry-README.md index 1f3725e..58da99e 100644 --- a/schemas/codexentry-README.md +++ b/schemas/codexentry-README.md @@ -64,7 +64,7 @@ release, Update 7, plus one patch). away. 5. If Status.json does **not** have `BodyName` then clear `status_body_name`. 6. For a `CodexEntry` event: - 1. Check that `status_body_name` is set. If it is not, exit. + 1. Only if `status_body_name` is set: 1. Set the EDDN `codexentry` schema message `BodyName` to this value. 2. Check if it matches the `journal_body_name` value, and ONLY if they match, set `BodyID` in the EDDN `codexentry` @@ -121,4 +121,4 @@ So you might receive any of: 3. Both `BodyName` and `BodyID` keys present, with values. This SHOULD indicate a codex entry object which is on a body surface. -Adjust your local processing accordingly. \ No newline at end of file +Adjust your local processing accordingly. From 0e80c76cb564771465f61825e694227dcc3be312 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 6 Jan 2022 13:07:53 +0000 Subject: [PATCH 06/15] Gateway: Set bottle request limit to 1MiB --- src/eddn/Gateway.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/eddn/Gateway.py b/src/eddn/Gateway.py index 63d10d7..028e2f7 100644 --- a/src/eddn/Gateway.py +++ b/src/eddn/Gateway.py @@ -21,7 +21,9 @@ from eddn.core.Validator import Validator, ValidationSeverity from gevent import monkey monkey.patch_all() +import bottle from bottle import Bottle, run, request, response, get, post +bottle.BaseRequest.MEMFILE_MAX = 1024 * 1024 # 1MiB, default is/was 100KiB app = Bottle() logger = logging.getLogger(__name__) From 377bdd3833a721fc639376308745b17c2f248465 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 6 Jan 2022 13:54:33 +0000 Subject: [PATCH 07/15] Gateway: Add logging per Accepted request Actually some logging was already there, just the logger had never been set up properly, but then I decided to make the format of this message more useful. --- src/eddn/Gateway.py | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/src/eddn/Gateway.py b/src/eddn/Gateway.py index 028e2f7..93d22bc 100644 --- a/src/eddn/Gateway.py +++ b/src/eddn/Gateway.py @@ -27,6 +27,10 @@ bottle.BaseRequest.MEMFILE_MAX = 1024 * 1024 # 1MiB, default is/was 100KiB app = Bottle() logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) +logger.addHandler(logging.StreamHandler()) +logger.info('Made logger') + # This socket is used to push market data out to the Announcers over ZeroMQ. context = zmq.Context() @@ -151,10 +155,25 @@ def parse_and_error_handle(data): # Sends the parsed message to the Relay/Monitor as compressed JSON. gevent.spawn(push_message, parsed_message, parsed_message['$schemaRef']) - logger.info("Accepted %s upload from %s" % ( - parsed_message, get_remote_address() - )) + + try: + logger.info('Accepted (%d, "%s", "%s", "%s", "%s", "%s") upload from %s' % ( + request.content_length, + parsed_message['header']['uploaderID'], + parsed_message['header']['softwareName'], + parsed_message['header']['softwareVersion'], + parsed_message['$schemaRef'], + parsed_message['message']['event'] if '/journal' in + parsed_message['$schemaRef'] else '-', + get_remote_address() + )) + + except Exception as e: + print('Logging of Accepted request failed: %s' % (e.message)) + pass + return 'OK' + else: response.status = 400 statsCollector.tally("invalid") From 1371f71217286c2cb2f26ea0f7040fa28919922b Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 6 Jan 2022 14:06:59 +0000 Subject: [PATCH 08/15] Gateway: Set up proper logger formatting For some reason the milliseconds portion of the %S timestamp is using a comma for decimals separator, despite 'locale' saying we're set to (US) English. /tableflip --- src/eddn/Gateway.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/eddn/Gateway.py b/src/eddn/Gateway.py index 93d22bc..9423c3b 100644 --- a/src/eddn/Gateway.py +++ b/src/eddn/Gateway.py @@ -28,7 +28,14 @@ app = Bottle() logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) -logger.addHandler(logging.StreamHandler()) +__logger_channel = logging.StreamHandler() +__logger_formatter = logging.Formatter( + '%(asctime)s - %(levelname)s - %(module)s:%(lineno)d: %(message)s' +) +__logger_formatter.default_time_format = '%Y-%m-%d %H:%M:%S' +__logger_formatter.default_msec_format = '%s.%03d' +__logger_channel.setFormatter(__logger_formatter) +logger.addHandler(__logger_channel) logger.info('Made logger') From 9f219da6a6cafba3d1b09fda2703270a401744cb Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 6 Jan 2022 17:36:39 +0000 Subject: [PATCH 09/15] Gateway: Expand on logging for interpretation of errors This includes logging the *full* (uncompressed) message if it fails to parse as JSON. --- src/eddn/Gateway.py | 94 +++++++++++++++++++++++++++++++++++++++------ 1 file changed, 83 insertions(+), 11 deletions(-) diff --git a/src/eddn/Gateway.py b/src/eddn/Gateway.py index 9423c3b..96cb90e 100644 --- a/src/eddn/Gateway.py +++ b/src/eddn/Gateway.py @@ -51,6 +51,37 @@ statsCollector = StatsCollector() statsCollector.start() +def extract_message_details(parsed_message): + uploader_id = '<>' + software_name = '<>' + software_version = '<>' + schema_ref = '<>' + journal_event = '<>' + + if 'header' in parsed_message: + if 'uploaderID' in parsed_message['header']: + uploader_id = parsed_message['header']['uploaderID'] + + if 'softwareName' in parsed_message['header']: + software_name = parsed_message['header']['softwareName'] + + if 'softwareVersion' in parsed_message['header']: + software_version = parsed_message['header']['softwareVersion'] + + if '$schemaRef' in parsed_message: + schema_ref = parsed_message['$schemaRef'] + + + if '/journal/' in schema_ref: + if 'message' in parsed_message: + if 'event' in parsed_message['message']: + journal_event = parsed_message['message']['event'] + + else: + journal_event = '-' + + return uploader_id, software_name, software_version, schema_ref, journal_event + def configure(): # Get the list of transports to bind from settings. This allows us to PUB # messages to multiple announcers over a variety of socket types @@ -144,9 +175,24 @@ def parse_and_error_handle(data): ) as exc: # Something bad happened. We know this will return at least a # semi-useful error message, so do so. + try: + logger.error('Error - JSON parse failed (%d, "%s", "%s", "%s", "%s", "%s") from %s:\n%s\n' % ( + request.content_length, + '<>', + '<>', + '<>', + '<>', + '<>', + get_remote_address(), + data + )) + + except Exception as e: + print('Logging of "JSON parse failed" failed: %s' % (e.message)) + pass + response.status = 400 - logger.error("Error to %s: %s" % (get_remote_address(), exc.message)) - return str(exc) + return 'FAIL: ' + str(exc) # Here we check if an outdated schema has been passed if parsed_message["$schemaRef"] in Settings.GATEWAY_OUTDATED_SCHEMAS: @@ -164,14 +210,10 @@ def parse_and_error_handle(data): gevent.spawn(push_message, parsed_message, parsed_message['$schemaRef']) try: - logger.info('Accepted (%d, "%s", "%s", "%s", "%s", "%s") upload from %s' % ( + uploader_id, software_name, software_version, schema_ref, journal_event = extract_message_details(parsed_message) + logger.info('Accepted (%d, "%s", "%s", "%s", "%s", "%s") from %s' % ( request.content_length, - parsed_message['header']['uploaderID'], - parsed_message['header']['softwareName'], - parsed_message['header']['softwareVersion'], - parsed_message['$schemaRef'], - parsed_message['message']['event'] if '/journal' in - parsed_message['$schemaRef'] else '-', + uploader_id, software_name, software_version, schema_ref, journal_event, get_remote_address() )) @@ -182,6 +224,19 @@ def parse_and_error_handle(data): return 'OK' else: + try: + uploader_id, software_name, software_version, schema_ref, journal_event = extract_message_details(parsed_message) + logger.error('Failed Validation "%s" (%d, "%s", "%s", "%s", "%s", "%s") from %s' % ( + str(validationResults.messages), + request.content_length, + uploader_id, software_name, software_version, schema_ref, journal_event, + get_remote_address() + )) + + except Exception as e: + print('Logging of Failed Validation failed: %s' % (e.message)) + pass + response.status = 400 statsCollector.tally("invalid") return "FAIL: " + str(validationResults.messages) @@ -192,17 +247,34 @@ def upload(): try: # Body may or may not be compressed. message_body = get_decompressed_message() + except zlib.error as exc: # Some languages and libs do a crap job zlib compressing stuff. Provide # at least some kind of feedback for them to try to get pointed in # the correct direction. response.status = 400 - logger.error("gzip error with %s: %s" % (get_remote_address(), exc.message)) + try: + logger.error('gzip error (%d, "%s", "%s", "%s", "%s", "%s") from %s' % ( + request.content_length, + '<>', + '<>', + '<>', + '<>', + '<>', + get_remote_address() + )) + + except Exception as e: + print('Logging of "gzip error" failed: %s' % (e.message)) + pass + return exc.message + except MalformedUploadError as exc: # They probably sent an encoded POST, but got the key/val wrong. response.status = 400 - logger.error("Error to %s: %s" % (get_remote_address(), exc.message)) + logger.error("MalformedUploadError from %s: %s" % (get_remote_address(), exc.message)) + return exc.message statsCollector.tally("inbound") From 1134a6c9b46baf5704029e2ed3167661015c9a8f Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 6 Jan 2022 17:39:01 +0000 Subject: [PATCH 10/15] Gateway: Only log first 512 characters of invalid JSON Don't want to spam the logs with up to 1MiB per bad message. --- src/eddn/Gateway.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/eddn/Gateway.py b/src/eddn/Gateway.py index 96cb90e..9aee4f5 100644 --- a/src/eddn/Gateway.py +++ b/src/eddn/Gateway.py @@ -184,7 +184,7 @@ def parse_and_error_handle(data): '<>', '<>', get_remote_address(), - data + data[:512] )) except Exception as e: From d2c4c98c2bac6ac5120cf737cdb5daced2c10142 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Fri, 7 Jan 2022 15:20:05 +0000 Subject: [PATCH 11/15] docs: Possible server responses, and handling them --- schemas/README-EDDN-schemas.md | 105 ++++++++++++++++++++++++++++++++- 1 file changed, 104 insertions(+), 1 deletion(-) diff --git a/schemas/README-EDDN-schemas.md b/schemas/README-EDDN-schemas.md index 83f6742..6840a06 100644 --- a/schemas/README-EDDN-schemas.md +++ b/schemas/README-EDDN-schemas.md @@ -103,7 +103,8 @@ available from time to time as necessary, e.g. for testing new schemas or changes to existing ones. ### Sending data -To upload market data to EDDN, you'll need to make a POST request to the URL: +To upload market data to EDDN, you'll need to make a **POST** request to the +URL: * https://eddn.edcd.io:4430/upload/ @@ -114,6 +115,38 @@ The body of this is a JSON object, so you SHOULD set a `Content-Type` header of * `multipart/form-data` * `text/plain` +You *MAY* use gzip compression on the body of the message, but it is not +required. + +You should be prepared to handle all scenarios where sending of a message +fails: + +1. Connect refused. +2. Connection timed out. +3. Other possible responses as documented in + [Server responses](#server-responses). + +Carefully consider whether you should queue a 'failed' message for later +retry. In particular, you should ensure that one 'bad' message does not +block other messages from being successfully sent. + +You **MUST** wait some reasonable time (minimum 1 minute) before retrying +any failed message. + +You **MUST NOT** retry any message that received a HTTP `400` or `426` code. +An exception can be made if, **and only if**, *you have manually verified that +you have fixed the issues with it (i.e. updated the schema/version to a +currently supported one and adjusted the data to fit that schema/version).* + +You **MAY** retry a message that initially received a `413` response (in +the hopes that the EDDN service admins decided to increase the maximum +allowed request size), but should not do so too quickly or in perpetuity. + +In general: + +- No data is better than bad data. +- Delayed good data is better than degrading the EDDN service for others. + ### Format of uploaded messages Each message is a JSON object in utf-8 encoding containing the following key+value pairs: @@ -202,6 +235,76 @@ the schemas enforce might not be explicitly called out here, so **do** check what you're sending against the schema when implementing sending new events. +### Server responses +There are three possible sources of HTTP responses when sending an upload +to EDDN. + +1. The reverse proxy that initially accepts the request. +2. The python `bottle` module that the Gateway uses to process the + forwarded requests. This might object to a message before the actual + EDDN code gets to process it at all. +3. The actual EDDN Gateway code. + +Once a message has cleared the EDDN Gateway then there is no mechanism for any +further issue (such as a message being detected as a duplicate in the +Monitor downstream of the Gateway) to be reported back to the sender. + +To state the obvious, if there are no issues with a request then an HTTP +200 response will be received by the sender. + +#### Reverse Proxy responses +In addition to generic "you typoed the URL" and other such "you just didn't +make a valid request" responses you might experience the following: + +1. `408` - `Request Timed Out` - the sender took too long to make/complete + its request and the reverse proxy rejected it as a result. +2. `503` - `Service Unavailable` - the EDDN Gateway process is either not + running, or not responding. + +#### bottle responses +1. `413` - `Payload Too Large` - `bottle` enforces a maximum request size + and the request exceeds that. As of 2022-01-07 the limit is 1MiB, and + pertains to the plain-text size, not after gzip compression if used. + To verify the current limit check for the line that looks like: + + ``` + bottle.BaseRequest.MEMFILE_MAX = 1024 * 1024 # 1MiB, default is/was 100KiB + ``` + + in + [src/eddn/Gateway.py](https://github.com/EDCD/EDDN/blob/master/src/eddn/Gateway.py), + as added in + [commit 0e80c76cb564771465f61825e694227dcc3be312](https://github.com/EDCD/EDDN/commit/0e80c76cb564771465f61825e694227dcc3be312). + +#### EDDN Gateway responses +1. `400` - `Bad Request` - this can be for a variety of reasons, and should + come with a response body with prefix `OK: ` or `FAIL: `: + 1. `FAIL: ` - the request couldn't be + parsed as valid JSON. e.g. + + ``` + FAIL: Expecting property name enclosed in double quotes: line 1 column 2 (char 1) + ``` + 2. `FAIL: ["]` - the JSON + message failed to pass schema validation. e.g. + + ``` + FAIL: [] + ``` + + 3. Other python exception message, e.g. if a message appeared to be + gzip compressed, but a failure was experienced when attempting to + decompress it. **NB: As of 2022-07-01 such messages won't have the + `FAIL: ` prefix.** + +2. `426` - `Upgrade Required` - You sent a message with an outdated + `$schemaRef` value. This could be either an old, deprecated version of + a schema, or an entirely deprecated schema. e.g. + + ``` + FAIL: The schema you have used is no longer supported. Please check for an updated version of your application. + ``` + ## Receiving messages EDDN provides a continuous stream of information from uploaders. To use this From 207068f156d630768d9b26be4e3dec14366b3e06 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Fri, 7 Jan 2022 15:37:07 +0000 Subject: [PATCH 12/15] docs: Cite issue on "some error bodies don't have FAIL: prefix" --- schemas/README-EDDN-schemas.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/schemas/README-EDDN-schemas.md b/schemas/README-EDDN-schemas.md index 6840a06..1fc48de 100644 --- a/schemas/README-EDDN-schemas.md +++ b/schemas/README-EDDN-schemas.md @@ -295,7 +295,9 @@ make a valid request" responses you might experience the following: 3. Other python exception message, e.g. if a message appeared to be gzip compressed, but a failure was experienced when attempting to decompress it. **NB: As of 2022-07-01 such messages won't have the - `FAIL: ` prefix.** + `FAIL: ` prefix.** See + [#161 - Gateway: Improve reporting of 'misc' errors ](https://github.com/EDCD/EDDN/issues/161) + for any progress/resolution on this. 2. `426` - `Upgrade Required` - You sent a message with an outdated `$schemaRef` value. This could be either an old, deprecated version of From 36de2145d7d8c3c789c9b270032a1e050447da2e Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 9 Jan 2022 14:52:41 +0000 Subject: [PATCH 13/15] docs: schemas: General improvement pass to aid new developers * Make lots of 'obvious' things explicit, e.g. HTTP 1.1, not HTTP/2, and HTTPS not plain HTTP. * The live service should always be using the schemas as present in the live branch, not master or another branch. * A 'good' message will receive 'HTTP 200' status *and* a body of `OK`. --- schemas/README-EDDN-schemas.md | 74 +++++++++++++++++++++++----------- 1 file changed, 50 insertions(+), 24 deletions(-) diff --git a/schemas/README-EDDN-schemas.md b/schemas/README-EDDN-schemas.md index 1fc48de..172224a 100644 --- a/schemas/README-EDDN-schemas.md +++ b/schemas/README-EDDN-schemas.md @@ -98,30 +98,43 @@ value, e.g. "$schemaRef": "https://eddn.edcd.io/schemas/shipyard/2/test", You MUST also utilise these test forms of the schemas when first testing your -code. There might also be a beta.eddn.edcd.io, or dev.eddn.edcd.io, service +code. + +There might also be a beta.eddn.edcd.io, or dev.eddn.edcd.io, service available from time to time as necessary, e.g. for testing new schemas or -changes to existing ones. +changes to existing ones. Ask on the `#eddn` channel of the EDCD Discord +(see https://edcd.github.io/ for an invite link). + +Alternatively you could attempt +[running your own test instance of EDDN](../docs/Running-this-software.md). ### Sending data -To upload market data to EDDN, you'll need to make a **POST** request to the -URL: +Messages sent to EDDN **MUST**: -* https://eddn.edcd.io:4430/upload/ +- Use the URL: `https://eddn.edcd.io:4430/upload/`. Note the use of + TLS-encrypted HTTPS. A plain HTTP request will elicit a `400 Bad + Request` response. +- Use the HTTP 1.1 protocol. HTTP/2 is not supported at this time. +- Use a **POST** request, with the body containing the EDDN message. No + query parameters in the URL are supported or necessary. -The body of this is a JSON object, so you SHOULD set a `Content-Type` header of -`applicaton/json`, and NOT any of: +The body of an EDDN message is a JSON object in UTF-8 encoding. You SHOULD +set a `Content-Type` header of `applicaton/json`, and NOT any of: * `application/x-www-form-urlencoded` * `multipart/form-data` * `text/plain` +For historical reasons URL form-encoded data *is* supported, **but this is +deprecated and no new software should attempt this method**. + You *MAY* use gzip compression on the body of the message, but it is not required. You should be prepared to handle all scenarios where sending of a message fails: -1. Connect refused. +1. Connection refused. 2. Connection timed out. 3. Other possible responses as documented in [Server responses](#server-responses). @@ -145,10 +158,10 @@ allowed request size), but should not do so too quickly or in perpetuity. In general: - No data is better than bad data. -- Delayed good data is better than degrading the EDDN service for others. +- *Delayed* good data is better than degrading the EDDN service for others. ### Format of uploaded messages -Each message is a JSON object in utf-8 encoding containing the following +Each message is a JSON object in UTF-8 encoding containing the following key+value pairs: 1. `$schemaRef` - Which schema (including version) this message is for. @@ -202,10 +215,17 @@ For example, a shipyard message, version 2, might look like: ``` ### Contents of `message` +Every message MUST comply with the schema its `$schemaRef` value cites. + +Apart from short time windows during deployment of a new version the live +EDDN service should always be using +[the schemas as present in the live branch](https://github.com/EDCD/EDDN/tree/live/schemas). +So, be sure you're checking those and not, e.g. those in the `master` or +other branches. Each `message` object must have, at bare minimum: -1. `timestamp` - string date and time in ISO8601 format. Whilst that +1. `timestamp` - string date and time in ISO8601 format. Whilst this technically allows for any timezone to be cited you SHOULD provide this in UTC, aka 'Zulu Time' as in the example above. You MUST ensure that you are doing this properly. Do not claim 'Z' whilst actually using a local time @@ -213,27 +233,32 @@ Each `message` object must have, at bare minimum: Listeners MAY make decisions on accepting data based on this time stamp, i.e. "too old". -2. One other key/value pair representing the data. In general there will be - much more than this. Again, consult the +2. At least one other key/value pair representing the data. In general there + will be much more than this. Consult the [schemas and their documentation](./). -Note that many of the key names chosen in the schemas are based on the CAPI -data, not Journal events, because the CAPI came first. This means renaming -many of the keys from Journal events to match the schema. +Because the first versions of some schemas were defined when only the CAPI +data was available, before Journal files existed, many of the key names chosen +in the schemas are based on the equivalent in CAPI data, not Journal events. +This means ouy MUST rename many of the keys from Journal events to match the +schemas. EDDN is intended to transport generic data not specific to any particular Cmdr -and to reflect the data that a player would see in-game in station services or -the local map. To that end, uploading applications MUST ensure that messages do -not contain any Cmdr-specific data (other than "uploaderID" and the "horizons" -flag). +and to reflect only the data that every player would see in-game in station +services or the local map. To that end, uploading applications MUST ensure +that messages do not contain any Cmdr-specific data (other than "uploaderID", +the "horizons" flag, and the "odyssey" flag). The individual schemas will instruct you on various elisions (removals) to be made to comply with this. Some of these requirements are also enforced by the schemas, and some things -the schemas enforce might not be explicitly called out here, so **do** -check what you're sending against the schema when implementing sending new -events. +the schemas enforce might not be explicitly called out here. So, **do** +check what you're sending against the relevant schema(s) when making any +changes to your code. + +It is also advisable to Watch this repository on GitHub so as to be aware +of any changes to schemas. ### Server responses There are three possible sources of HTTP responses when sending an upload @@ -250,7 +275,8 @@ further issue (such as a message being detected as a duplicate in the Monitor downstream of the Gateway) to be reported back to the sender. To state the obvious, if there are no issues with a request then an HTTP -200 response will be received by the sender. +200 response will be received by the sender. The body of the response +should be the string `OK`. #### Reverse Proxy responses In addition to generic "you typoed the URL" and other such "you just didn't From 44b5a1d789ecef5a1246a6e2c2089a97bdccb079 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 9 Jan 2022 14:57:05 +0000 Subject: [PATCH 14/15] docs/schemas: slight wording tweak about live schemas --- schemas/README-EDDN-schemas.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schemas/README-EDDN-schemas.md b/schemas/README-EDDN-schemas.md index 172224a..1c51fc8 100644 --- a/schemas/README-EDDN-schemas.md +++ b/schemas/README-EDDN-schemas.md @@ -220,8 +220,8 @@ Every message MUST comply with the schema its `$schemaRef` value cites. Apart from short time windows during deployment of a new version the live EDDN service should always be using [the schemas as present in the live branch](https://github.com/EDCD/EDDN/tree/live/schemas). -So, be sure you're checking those and not, e.g. those in the `master` or -other branches. +So, be sure you're checking the live versions and not, e.g. those in the +`master` or other branches. Each `message` object must have, at bare minimum: From c6a63c5a9339ce96bd96dd3cd9b7f15bdf66a200 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 9 Jan 2022 15:25:55 +0000 Subject: [PATCH 15/15] docs/schemas: Journal timestamps *are* trustworthy as UTC --- schemas/README-EDDN-schemas.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/schemas/README-EDDN-schemas.md b/schemas/README-EDDN-schemas.md index 1c51fc8..0a1ead9 100644 --- a/schemas/README-EDDN-schemas.md +++ b/schemas/README-EDDN-schemas.md @@ -231,6 +231,13 @@ Each `message` object must have, at bare minimum: doing this properly. Do not claim 'Z' whilst actually using a local time that is offset from UTC. + If you are only utilising Journal-sourced data then simply using the + value from there should be sufficient as the PC game client is meant to + always be correctly citing UTC for this. Indeed it has been observed, + in the Odyssey 4.0.0.1002 client, that with the Windows clock behind UTC + by 21 seconds both the in-game UI clock *and* the Journal event + timestamps are still properly UTC to the nearest second. + Listeners MAY make decisions on accepting data based on this time stamp, i.e. "too old". 2. At least one other key/value pair representing the data. In general there