From 7ca650ba5bddd2d01fd700d3a29f0b10ebf0a50f Mon Sep 17 00:00:00 2001 From: Athanasius Date: Mon, 26 Sep 2022 12:46:28 +0100 Subject: [PATCH 01/95] develop: post-release version bump to 5.5.1-alpha0 --- config/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/__init__.py b/config/__init__.py index 73d55975..cc125129 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -52,7 +52,7 @@ appcmdname = 'EDMC' # # Major.Minor.Patch(-prerelease)(+buildmetadata) # NB: Do *not* import this, use the functions appversion() and appversion_nobuild() -_static_appversion = '5.5.0' +_static_appversion = '5.5.1-alpha0' _cached_version: Optional[semantic_version.Version] = None copyright = '© 2015-2019 Jonathan Harris, 2020-2022 EDCD' From 765658a7fb24b92340a466df800e8df949bf8015 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Sep 2022 17:07:05 +0000 Subject: [PATCH 02/95] build(deps): bump certifi from 2022.9.14 to 2022.9.24 Bumps [certifi](https://github.com/certifi/python-certifi) from 2022.9.14 to 2022.9.24. - [Release notes](https://github.com/certifi/python-certifi/releases) - [Commits](https://github.com/certifi/python-certifi/compare/2022.09.14...2022.09.24) --- updated-dependencies: - dependency-name: certifi dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 04102949..d014f67c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -certifi==2022.9.14 +certifi==2022.9.24 requests==2.28.1 watchdog==2.1.9 # Commented out because this doesn't package well with py2exe From 761392c5a4ffcba0e45bee69dbae5c2fa0e3e2a6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Sep 2022 17:07:13 +0000 Subject: [PATCH 03/95] build(deps-dev): bump setuptools from 65.3.0 to 65.4.0 Bumps [setuptools](https://github.com/pypa/setuptools) from 65.3.0 to 65.4.0. - [Release notes](https://github.com/pypa/setuptools/releases) - [Changelog](https://github.com/pypa/setuptools/blob/main/CHANGES.rst) - [Commits](https://github.com/pypa/setuptools/compare/v65.3.0...v65.4.0) --- updated-dependencies: - dependency-name: setuptools dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 7fb4f90f..215926c9 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -5,7 +5,7 @@ wheel # We can't rely on just picking this up from either the base (not venv), # or venv-init-time version. Specify here so that dependabot will prod us # about new versions. -setuptools==65.3.0 +setuptools==65.4.0 # Static analysis tools flake8==5.0.4 From 2492bdcaeaee64927ebf9f5f8e21dfdac7aea407 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Sep 2022 17:03:34 +0000 Subject: [PATCH 04/95] build(deps-dev): bump mypy from 0.971 to 0.981 Bumps [mypy](https://github.com/python/mypy) from 0.971 to 0.981. - [Release notes](https://github.com/python/mypy/releases) - [Commits](https://github.com/python/mypy/compare/v0.971...v0.981) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 7fb4f90f..d931165f 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -20,7 +20,7 @@ flake8-noqa==1.2.9 flake8-polyfill==1.0.2 flake8-use-fstring==1.4 -mypy==0.971 +mypy==0.981 pep8-naming==0.13.2 safety==2.2.0 types-requests==2.28.11 From 8095b9adc9adb820ea390cc7f278b83cb2c810f2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 29 Sep 2022 17:04:28 +0000 Subject: [PATCH 05/95] build(deps-dev): bump pytest-cov from 3.0.0 to 4.0.0 Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 3.0.0 to 4.0.0. - [Release notes](https://github.com/pytest-dev/pytest-cov/releases) - [Changelog](https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-cov/compare/v3.0.0...v4.0.0) --- updated-dependencies: - dependency-name: pytest-cov dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 1fa5e0ee..6fd38c97 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -41,7 +41,7 @@ py2exe==0.12.0.1; sys_platform == 'win32' # Testing pytest==7.1.3 -pytest-cov==3.0.0 # Pytest code coverage support +pytest-cov==4.0.0 # Pytest code coverage support coverage[toml]==6.4.4 # pytest-cov dep. This is here to ensure that it includes TOML support for pyproject.toml configs # For manipulating folder permissions and the like. pywin32==304; sys_platform == 'win32' From a38afbf2ac8a091027dde2e65184442dae2d079f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 30 Sep 2022 17:03:50 +0000 Subject: [PATCH 06/95] build(deps-dev): bump coverage[toml] from 6.4.4 to 6.5.0 Bumps [coverage[toml]](https://github.com/nedbat/coveragepy) from 6.4.4 to 6.5.0. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/6.4.4...6.5.0) --- updated-dependencies: - dependency-name: coverage[toml] dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 6fd38c97..cb032ce9 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -42,7 +42,7 @@ py2exe==0.12.0.1; sys_platform == 'win32' # Testing pytest==7.1.3 pytest-cov==4.0.0 # Pytest code coverage support -coverage[toml]==6.4.4 # pytest-cov dep. This is here to ensure that it includes TOML support for pyproject.toml configs +coverage[toml]==6.5.0 # pytest-cov dep. This is here to ensure that it includes TOML support for pyproject.toml configs # For manipulating folder permissions and the like. pywin32==304; sys_platform == 'win32' From 32ba10400812da5f1c3562167a1000fc2a438572 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 30 Sep 2022 17:04:02 +0000 Subject: [PATCH 07/95] build(deps-dev): bump setuptools from 65.4.0 to 65.4.1 Bumps [setuptools](https://github.com/pypa/setuptools) from 65.4.0 to 65.4.1. - [Release notes](https://github.com/pypa/setuptools/releases) - [Changelog](https://github.com/pypa/setuptools/blob/main/CHANGES.rst) - [Commits](https://github.com/pypa/setuptools/compare/v65.4.0...v65.4.1) --- updated-dependencies: - dependency-name: setuptools dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 6fd38c97..235b6143 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -5,7 +5,7 @@ wheel # We can't rely on just picking this up from either the base (not venv), # or venv-init-time version. Specify here so that dependabot will prod us # about new versions. -setuptools==65.4.0 +setuptools==65.4.1 # Static analysis tools flake8==5.0.4 From 1b0bbb9a560fea11f71f567384ffd52375d4ef7c Mon Sep 17 00:00:00 2001 From: Athanasius Date: Fri, 30 Sep 2022 19:25:22 +0100 Subject: [PATCH 08/95] tests: Improved coverage reporting * Always report on coverage, if no tests failed. * Remove `.coveragerc`, in favour of `pyproject.toml`. * Use `coverage-conditional-plugin`: - Two rules added, `sys-platform-win32` and `sys-platform-not-win32`. - Those rules used so non-win32 code run on win32 doesn't cause coverage to be reported as less than 100%. There's the assumption that !win32 means Linux, probably. --- .coveragerc | 6 ------ journal_lock.py | 8 ++++---- pyproject.toml | 9 ++++++++- requirements-dev.txt | 1 + 4 files changed, 13 insertions(+), 11 deletions(-) delete mode 100644 .coveragerc diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 019facb4..00000000 --- a/.coveragerc +++ /dev/null @@ -1,6 +0,0 @@ -[run] -omit = - # The tests themselves - tests/* - # Any venv files - venv/* diff --git a/journal_lock.py b/journal_lock.py index 91a7895f..b96fd9e9 100644 --- a/journal_lock.py +++ b/journal_lock.py @@ -94,7 +94,7 @@ class JournalLock: :return: LockResult - See the class Enum definition """ - if sys.platform == 'win32': + if sys.platform == 'win32': # pragma: sys-platform-not-win32 logger.trace_if('journal-lock', 'win32, using msvcrt') # win32 doesn't have fcntl, so we have to use msvcrt import msvcrt @@ -107,7 +107,7 @@ class JournalLock: f", assuming another process running: {e!r}") return JournalLockResult.ALREADY_LOCKED - else: # pytest coverage only sees this on !win32 + else: # pragma: sys-platform-win32 logger.trace_if('journal-lock', 'NOT win32, using fcntl') try: import fcntl @@ -143,7 +143,7 @@ class JournalLock: return True # We weren't locked, and still aren't unlocked = False - if sys.platform == 'win32': + if sys.platform == 'win32': # pragma: sys-platform-not-win32 logger.trace_if('journal-lock', 'win32, using msvcrt') # win32 doesn't have fcntl, so we have to use msvcrt import msvcrt @@ -160,7 +160,7 @@ class JournalLock: else: unlocked = True - else: # pytest coverage only sees this on !win32 + else: # pragma: sys-platform-win32 logger.trace_if('journal-lock', 'NOT win32, using fcntl') try: import fcntl diff --git a/pyproject.toml b/pyproject.toml index a7f41ad4..387f00c7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,9 +7,16 @@ line_length = 119 [tool.pytest.ini_options] testpaths = ["tests"] # Search for tests in tests/ +addopts = "--cov . --cov plugins --cov-report=term-missing --no-cov-on-fail" +# --cov-fail-under 80" [tool.coverage.run] -omit = ["venv/*"] # when running pytest --cov, dont report coverage in venv directories +omit = [ "tests/*", "venv/*", "dist.win32/*" ] +plugins = [ "coverage_conditional_plugin" ] + +[tool.coverage.coverage_conditional_plugin.rules] +sys-platform-win32 = "sys_platform == 'win32'" +sys-platform-not-win32 = "sys_platform != 'win32'" [tool.pyright] # pythonPlatform = 'Darwin' diff --git a/requirements-dev.txt b/requirements-dev.txt index a9f569f5..2ae38e0a 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -43,6 +43,7 @@ py2exe==0.12.0.1; sys_platform == 'win32' pytest==7.1.3 pytest-cov==4.0.0 # Pytest code coverage support coverage[toml]==6.5.0 # pytest-cov dep. This is here to ensure that it includes TOML support for pyproject.toml configs +coverage-conditional-plugin==0.7.0 # For manipulating folder permissions and the like. pywin32==304; sys_platform == 'win32' From 5efd27a83cbabdc716f096c1cc6fd069460f2f55 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Fri, 30 Sep 2022 19:45:12 +0100 Subject: [PATCH 09/95] tests: Attempt to fix config/ coverage * Define `darwin` and `linux` *and* "platform known" pragmas. * Use per-platform pragmas in `config/__init__.py` selection of implementation. * Attempt, and fail, to use pragma in `config/darwin.py` to ignore it on other platforms. --- config/__init__.py | 8 ++++---- config/darwin.py | 4 ++++ pyproject.toml | 5 +++++ 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/config/__init__.py b/config/__init__.py index cc125129..6f5bfa12 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -454,19 +454,19 @@ def get_config(*args, **kwargs) -> AbstractConfig: :param kwargs: Args to be passed through to implementation. :return: Instance of the implementation. """ - if sys.platform == "darwin": + if sys.platform == "darwin": # pragma: sys-platform-not-darwin from .darwin import MacConfig return MacConfig(*args, **kwargs) - elif sys.platform == "win32": + elif sys.platform == "win32": # pragma: sys-platform-not-win32 from .windows import WinConfig return WinConfig(*args, **kwargs) - elif sys.platform == "linux": + elif sys.platform == "linux": # pragma: sys-platform-not-linux from .linux import LinuxConfig return LinuxConfig(*args, **kwargs) - else: + else: # pragma: sys-platform-known raise ValueError(f'Unknown platform: {sys.platform=}') diff --git a/config/darwin.py b/config/darwin.py index eb2b887f..a492b8a1 100644 --- a/config/darwin.py +++ b/config/darwin.py @@ -1,3 +1,7 @@ +"""Darwin/macOS implementation of AbstractConfig.""" +# This doesn't actually work: +# +# pragma: sys-platform-not-darwin import pathlib import sys from typing import Any, Dict, List, Union diff --git a/pyproject.toml b/pyproject.toml index 387f00c7..bf2fcc52 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,11 @@ plugins = [ "coverage_conditional_plugin" ] [tool.coverage.coverage_conditional_plugin.rules] sys-platform-win32 = "sys_platform == 'win32'" sys-platform-not-win32 = "sys_platform != 'win32'" +sys-platform-darwin = "sys_platform == 'darwin'" +sys-platform-not-darwin = "sys_platform != 'darwin'" +sys-platform-linux = "sys_platform == 'linux'" +sys-platform-not-linux = "sys_platform != 'linux'" +sys-platform-known = "sys_platform in ('darwin', 'linux', 'win32')" [tool.pyright] # pythonPlatform = 'Darwin' From baf62f03fd5cef12fa7bb9fbc2239e7f93450042 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 2 Oct 2022 12:11:16 +0100 Subject: [PATCH 10/95] pytest/coverage: Resolve the "which way around to have pragmas" issue 1. You end up either inverting the sense of a `coverage_conditional_plugin` pragma's name (versus what it actually tests), *or* where you put it in the code. 2. As the pragmas are only defined in once, in one place, it's better to invert the sense there, rather than in *every single use case*. Then technically any 'other' branch isn't guaranteed to --- config/__init__.py | 8 ++++---- journal_lock.py | 8 ++++---- pyproject.toml | 16 +++++++++------- 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/config/__init__.py b/config/__init__.py index 6f5bfa12..56679019 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -454,19 +454,19 @@ def get_config(*args, **kwargs) -> AbstractConfig: :param kwargs: Args to be passed through to implementation. :return: Instance of the implementation. """ - if sys.platform == "darwin": # pragma: sys-platform-not-darwin + if sys.platform == "darwin": # pragma: sys-platform-darwin from .darwin import MacConfig return MacConfig(*args, **kwargs) - elif sys.platform == "win32": # pragma: sys-platform-not-win32 + elif sys.platform == "win32": # pragma: sys-platform-win32 from .windows import WinConfig return WinConfig(*args, **kwargs) - elif sys.platform == "linux": # pragma: sys-platform-not-linux + elif sys.platform == "linux": # pragma: sys-platform-linux from .linux import LinuxConfig return LinuxConfig(*args, **kwargs) - else: # pragma: sys-platform-known + else: # pragma: sys-platform-not-known raise ValueError(f'Unknown platform: {sys.platform=}') diff --git a/journal_lock.py b/journal_lock.py index b96fd9e9..ef5cf983 100644 --- a/journal_lock.py +++ b/journal_lock.py @@ -94,7 +94,7 @@ class JournalLock: :return: LockResult - See the class Enum definition """ - if sys.platform == 'win32': # pragma: sys-platform-not-win32 + if sys.platform == 'win32': # pragma: sys-platform-win32 logger.trace_if('journal-lock', 'win32, using msvcrt') # win32 doesn't have fcntl, so we have to use msvcrt import msvcrt @@ -107,7 +107,7 @@ class JournalLock: f", assuming another process running: {e!r}") return JournalLockResult.ALREADY_LOCKED - else: # pragma: sys-platform-win32 + else: # pragma: sys-platform-not-win32 logger.trace_if('journal-lock', 'NOT win32, using fcntl') try: import fcntl @@ -143,7 +143,7 @@ class JournalLock: return True # We weren't locked, and still aren't unlocked = False - if sys.platform == 'win32': # pragma: sys-platform-not-win32 + if sys.platform == 'win32': # pragma: sys-platform-win32 logger.trace_if('journal-lock', 'win32, using msvcrt') # win32 doesn't have fcntl, so we have to use msvcrt import msvcrt @@ -160,7 +160,7 @@ class JournalLock: else: unlocked = True - else: # pragma: sys-platform-win32 + else: # pragma: sys-platform-not-win32 logger.trace_if('journal-lock', 'NOT win32, using fcntl') try: import fcntl diff --git a/pyproject.toml b/pyproject.toml index bf2fcc52..7086d166 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,13 +15,15 @@ omit = [ "tests/*", "venv/*", "dist.win32/*" ] plugins = [ "coverage_conditional_plugin" ] [tool.coverage.coverage_conditional_plugin.rules] -sys-platform-win32 = "sys_platform == 'win32'" -sys-platform-not-win32 = "sys_platform != 'win32'" -sys-platform-darwin = "sys_platform == 'darwin'" -sys-platform-not-darwin = "sys_platform != 'darwin'" -sys-platform-linux = "sys_platform == 'linux'" -sys-platform-not-linux = "sys_platform != 'linux'" -sys-platform-known = "sys_platform in ('darwin', 'linux', 'win32')" +# Yes, the sense of all of these is inverted, because else it ends up +# inverted at *every* use. +sys-platform-win32 = "sys_platform != 'win32'" +sys-platform-not-win32 = "sys_platform == 'win32'" +sys-platform-darwin = "sys_platform != 'darwin'" +sys-platform-not-darwin = "sys_platform == 'darwin'" +sys-platform-linux = "sys_platform != 'linux'" +sys-platform-not-linux = "sys_platform == 'linux'" +sys-platform-not-known = "sys_platform in ('darwin', 'linux', 'win32')" [tool.pyright] # pythonPlatform = 'Darwin' From 7aa832e3d10122f788394e4792453094ce4814d5 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 2 Oct 2022 12:31:11 +0100 Subject: [PATCH 11/95] Contributing.md: test coverage notes --- Contributing.md | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/Contributing.md b/Contributing.md index 4d7fc9fc..6d60065a 100644 --- a/Contributing.md +++ b/Contributing.md @@ -244,6 +244,42 @@ handy if you want to step through the testing code to be sure of anything. Otherwise, see the [pytest documentation](https://docs.pytest.org/en/stable/contents.html). +### Test Coverage +As we work towards actually having tests for as much of the code as possible +it is useful to monitor the current test coverage. + +Running `pytest` will also produce the overall coverage report, see the +configured options in `pyproject.toml`. + +One issue you might run into is where there is code that only runs on one +platform. By default `pytest-cov`/`coverage` will count this code as not +tested when run on a different platform. We utilise the +`coverage-conditional-plugin` module so that `#pragma` comments can be used +to give hints to coverage about this. + +The pragmas are defined in the +`tool.coverage.coverage_conditional_plugin.rules` section of `pyproject.toml`, +e.g. + +```toml +[tool.coverage.coverage_conditional_plugin.rules] +# Yes, the sense of all of these is inverted, because else it ends up +# inverted at *every* use. +sys-platform-win32 = "sys_platform != 'win32'" +... +``` +And are used as in: +```python +import sys + +if sys.platform == 'win32': # pragma: sys-platform-win32 + ... +else: # pragma: sys-platform-not-win32 + ... +``` +Note the inverted sense of the pragma definitions, as the comments cause +`coverage` to *not* consider that code block on this platform. + --- ## Imports used only in core plugins From 80a8a32666808dc875b0d8bc00a2c0670a52c4cb Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 2 Oct 2022 12:38:24 +0100 Subject: [PATCH 12/95] pytest/coverage: Improve the .toml comment about pragma 'inversion' --- Contributing.md | 8 ++++++-- pyproject.toml | 6 ++++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/Contributing.md b/Contributing.md index 6d60065a..29135804 100644 --- a/Contributing.md +++ b/Contributing.md @@ -263,8 +263,6 @@ e.g. ```toml [tool.coverage.coverage_conditional_plugin.rules] -# Yes, the sense of all of these is inverted, because else it ends up -# inverted at *every* use. sys-platform-win32 = "sys_platform != 'win32'" ... ``` @@ -280,6 +278,12 @@ else: # pragma: sys-platform-not-win32 Note the inverted sense of the pragma definitions, as the comments cause `coverage` to *not* consider that code block on this platform. +As of 2022-10-02 and `coverage-conditional-plugin==0.7.0` there is no way to +signal that an entire file should be excluded from coverage reporting on the +current platform. See +[this GitHub issue comment](https://github.com/wemake-services/coverage-conditional-plugin/issues/2#issuecomment-1263918296) +. + --- ## Imports used only in core plugins diff --git a/pyproject.toml b/pyproject.toml index 7086d166..bc3a32b0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,8 +15,10 @@ omit = [ "tests/*", "venv/*", "dist.win32/*" ] plugins = [ "coverage_conditional_plugin" ] [tool.coverage.coverage_conditional_plugin.rules] -# Yes, the sense of all of these is inverted, because else it ends up -# inverted at *every* use. +# NB: The name versus content of all of these are inverted because of the way +# they're used. When a pragma cites one it causes that code block to +# **NOT** be considered for code coverage. +# See Contributin.md#test-coverage for more details. sys-platform-win32 = "sys_platform != 'win32'" sys-platform-not-win32 = "sys_platform == 'win32'" sys-platform-darwin = "sys_platform != 'darwin'" From bf7be4dc5ae55d7351bfe0c70569ba83d31f4122 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 2 Oct 2022 12:39:08 +0100 Subject: [PATCH 13/95] config/darwin: Remove non-functional coverage pragma --- config/darwin.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/config/darwin.py b/config/darwin.py index a492b8a1..895218a8 100644 --- a/config/darwin.py +++ b/config/darwin.py @@ -1,7 +1,4 @@ """Darwin/macOS implementation of AbstractConfig.""" -# This doesn't actually work: -# -# pragma: sys-platform-not-darwin import pathlib import sys from typing import Any, Dict, List, Union From eceaa4f0be4fb89229430a5eb32ec42205886df0 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 2 Oct 2022 19:46:27 +0100 Subject: [PATCH 14/95] pyproject.toml: Fix typoed 'Contributing.md' --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index bc3a32b0..91c6e7e3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ plugins = [ "coverage_conditional_plugin" ] # NB: The name versus content of all of these are inverted because of the way # they're used. When a pragma cites one it causes that code block to # **NOT** be considered for code coverage. -# See Contributin.md#test-coverage for more details. +# See Contributing.md#test-coverage for more details. sys-platform-win32 = "sys_platform != 'win32'" sys-platform-not-win32 = "sys_platform == 'win32'" sys-platform-darwin = "sys_platform != 'darwin'" From b34cb46c41fce5e1e0d5f188885ccb545f2c1122 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Oct 2022 17:03:57 +0000 Subject: [PATCH 15/95] build(deps-dev): bump mypy from 0.981 to 0.982 Bumps [mypy](https://github.com/python/mypy) from 0.981 to 0.982. - [Release notes](https://github.com/python/mypy/releases) - [Commits](https://github.com/python/mypy/compare/v0.981...v0.982) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 2ae38e0a..081812e0 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -20,7 +20,7 @@ flake8-noqa==1.2.9 flake8-polyfill==1.0.2 flake8-use-fstring==1.4 -mypy==0.981 +mypy==0.982 pep8-naming==0.13.2 safety==2.2.0 types-requests==2.28.11 From c1b18389172f70da935ab96fa45771b38e30471d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 4 Oct 2022 17:04:01 +0000 Subject: [PATCH 16/95] build(deps-dev): bump types-requests from 2.28.11 to 2.28.11.1 Bumps [types-requests](https://github.com/python/typeshed) from 2.28.11 to 2.28.11.1. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-requests dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 081812e0..3865c7d2 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -23,7 +23,7 @@ flake8-use-fstring==1.4 mypy==0.982 pep8-naming==0.13.2 safety==2.2.0 -types-requests==2.28.11 +types-requests==2.28.11.1 # Code formatting tools autopep8==1.7.0 From 7c80b9adc6fbf9a44928e34a86d3e3d135316cbe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 5 Oct 2022 17:11:42 +0000 Subject: [PATCH 17/95] build(deps-dev): bump safety from 2.2.0 to 2.2.1 Bumps [safety](https://github.com/pyupio/safety) from 2.2.0 to 2.2.1. - [Release notes](https://github.com/pyupio/safety/releases) - [Changelog](https://github.com/pyupio/safety/blob/main/CHANGELOG.md) - [Commits](https://github.com/pyupio/safety/compare/2.2.0...2.2.1) --- updated-dependencies: - dependency-name: safety dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 3865c7d2..b69805e9 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -22,7 +22,7 @@ flake8-use-fstring==1.4 mypy==0.982 pep8-naming==0.13.2 -safety==2.2.0 +safety==2.2.1 types-requests==2.28.11.1 # Code formatting tools From ac55aca3e76b660f6db24e077128bcf6f112e5b4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 6 Oct 2022 17:05:02 +0000 Subject: [PATCH 18/95] build(deps-dev): bump safety from 2.2.1 to 2.3.1 Bumps [safety](https://github.com/pyupio/safety) from 2.2.1 to 2.3.1. - [Release notes](https://github.com/pyupio/safety/releases) - [Changelog](https://github.com/pyupio/safety/blob/main/CHANGELOG.md) - [Commits](https://github.com/pyupio/safety/compare/2.2.1...2.3.1) --- updated-dependencies: - dependency-name: safety dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index b69805e9..1a392c42 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -22,7 +22,7 @@ flake8-use-fstring==1.4 mypy==0.982 pep8-naming==0.13.2 -safety==2.2.1 +safety==2.3.1 types-requests==2.28.11.1 # Code formatting tools From 0abecba136a1ca1e0b63676563c89c400e9299f4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Oct 2022 10:02:54 +0000 Subject: [PATCH 19/95] build(deps-dev): bump types-requests from 2.28.11.1 to 2.28.11.2 Bumps [types-requests](https://github.com/python/typeshed) from 2.28.11.1 to 2.28.11.2. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-requests dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 1a392c42..7b1f9001 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -23,7 +23,7 @@ flake8-use-fstring==1.4 mypy==0.982 pep8-naming==0.13.2 safety==2.3.1 -types-requests==2.28.11.1 +types-requests==2.28.11.2 # Code formatting tools autopep8==1.7.0 From c8cd02e47e61b890508290f31fcba57da6750938 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Oct 2022 17:06:35 +0000 Subject: [PATCH 20/95] build(deps-dev): bump flake8-isort from 4.2.0 to 5.0.0 Bumps [flake8-isort](https://github.com/gforcada/flake8-isort) from 4.2.0 to 5.0.0. - [Release notes](https://github.com/gforcada/flake8-isort/releases) - [Changelog](https://github.com/gforcada/flake8-isort/blob/master/CHANGES.rst) - [Commits](https://github.com/gforcada/flake8-isort/compare/4.2.0...5.0.0) --- updated-dependencies: - dependency-name: flake8-isort dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 7b1f9001..c1f09d4b 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -14,7 +14,7 @@ flake8-cognitive-complexity==0.1.0 flake8-comprehensions==3.10.0 flake8-docstrings==1.6.0 isort==5.10.1 -flake8-isort==4.2.0 +flake8-isort==5.0.0 flake8-json==21.7.0 flake8-noqa==1.2.9 flake8-polyfill==1.0.2 From be2b47d4faf0cb165f345b4ed0fae0f7f97683e3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 14 Oct 2022 17:03:09 +0000 Subject: [PATCH 21/95] build(deps-dev): bump setuptools from 65.4.1 to 65.5.0 Bumps [setuptools](https://github.com/pypa/setuptools) from 65.4.1 to 65.5.0. - [Release notes](https://github.com/pypa/setuptools/releases) - [Changelog](https://github.com/pypa/setuptools/blob/main/CHANGES.rst) - [Commits](https://github.com/pypa/setuptools/compare/v65.4.1...v65.5.0) --- updated-dependencies: - dependency-name: setuptools dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index c1f09d4b..31b3b23d 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -5,7 +5,7 @@ wheel # We can't rely on just picking this up from either the base (not venv), # or venv-init-time version. Specify here so that dependabot will prod us # about new versions. -setuptools==65.4.1 +setuptools==65.5.0 # Static analysis tools flake8==5.0.4 From 6f5e7aa8bc077760579afa4884799737965bfedb Mon Sep 17 00:00:00 2001 From: Athanasius Date: Tue, 18 Oct 2022 14:27:33 +0100 Subject: [PATCH 22/95] python: Bump to version 3.10.8 --- .python-version | 2 +- ChangeLog.md | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.python-version b/.python-version index 1281604a..36435ac6 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.10.7 +3.10.8 diff --git a/ChangeLog.md b/ChangeLog.md index efb069cc..4c04a484 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -9,7 +9,7 @@ produce the Windows executables and installer. --- -* We now test against, and package with, Python 3.10.7. +* We now test against, and package with, Python 3.10.8. **As a consequence of this we no longer support Windows 7. This is due to @@ -27,6 +27,12 @@ produce the Windows executables and installer. --- +Pre-Release 5.5.1-alpha0 +=== +* We now test against, and package with, Python 3.10.8. + +--- + Release 5.5.0 === From b8471ab5453b870bf3a294b159209c1fe3210fba Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Oct 2022 17:05:09 +0000 Subject: [PATCH 23/95] build(deps-dev): bump pytest from 7.1.3 to 7.2.0 Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.1.3 to 7.2.0. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.1.3...7.2.0) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 31b3b23d..2871023b 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -40,7 +40,7 @@ lxml==4.9.1 py2exe==0.12.0.1; sys_platform == 'win32' # Testing -pytest==7.1.3 +pytest==7.2.0 pytest-cov==4.0.0 # Pytest code coverage support coverage[toml]==6.5.0 # pytest-cov dep. This is here to ensure that it includes TOML support for pyproject.toml configs coverage-conditional-plugin==0.7.0 From efaa648b3edba53af8de65bbf53b3b06c4bdec86 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 27 Oct 2022 17:04:39 +0000 Subject: [PATCH 24/95] build(deps-dev): bump py2exe from 0.12.0.1 to 0.12.0.2 Bumps [py2exe](https://github.com/py2exe/py2exe) from 0.12.0.1 to 0.12.0.2. - [Release notes](https://github.com/py2exe/py2exe/releases) - [Changelog](https://github.com/py2exe/py2exe/blob/master/ChangeLog) - [Commits](https://github.com/py2exe/py2exe/compare/v0.12.0.1...v0.12.0.2) --- updated-dependencies: - dependency-name: py2exe dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 2871023b..a30e63c3 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -37,7 +37,7 @@ lxml==4.9.1 # We only need py2exe on windows. # Pre-release version addressing semantic_version 2.9.0+ issues: # -py2exe==0.12.0.1; sys_platform == 'win32' +py2exe==0.12.0.2; sys_platform == 'win32' # Testing pytest==7.2.0 From 0aa74b87557fd59f9ed10d19d661ecdc44f019a1 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sat, 29 Oct 2022 16:01:39 +0100 Subject: [PATCH 25/95] tests: journal: Change `py` LocalPath type to be `_pytest.tmpdir.TempPathFactory` This is more correct, and means we don't need the `py` module, which wasn't in requirements-dev.txt, causing GitHub workflow `pytest` runs to fail. --- tests/journal_lock.py/test_journal_lock.py | 25 +++++++++++----------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/tests/journal_lock.py/test_journal_lock.py b/tests/journal_lock.py/test_journal_lock.py index 016a8f1b..72252e9f 100644 --- a/tests/journal_lock.py/test_journal_lock.py +++ b/tests/journal_lock.py/test_journal_lock.py @@ -8,7 +8,6 @@ import pytest # Import as other names else they get picked up when used as fixtures from _pytest import monkeypatch as _pytest_monkeypatch from _pytest import tmpdir as _pytest_tmpdir -from py._path.local import LocalPath as py_path_local_LocalPath from config import config from journal_lock import JournalLock, JournalLockResult @@ -120,7 +119,7 @@ class TestJournalLock: def mock_journaldir( self, monkeypatch: _pytest_monkeypatch, tmp_path_factory: _pytest_tmpdir.TempPathFactory - ) -> py_path_local_LocalPath: + ) -> _pytest_tmpdir.TempPathFactory: """Fixture for mocking config.get_str('journaldir').""" def get_str(key: str, *, default: str = None) -> str: """Mock config.*Config get_str to provide fake journaldir.""" @@ -139,7 +138,7 @@ class TestJournalLock: self, monkeypatch: _pytest_monkeypatch, tmp_path_factory: _pytest_tmpdir.TempPathFactory - ) -> py_path_local_LocalPath: + ) -> _pytest_tmpdir.TempPathFactory: """Fixture for mocking config.get_str('journaldir').""" def get_str(key: str, *, default: str = None) -> str: """Mock config.*Config get_str to provide fake journaldir.""" @@ -155,7 +154,7 @@ class TestJournalLock: ########################################################################### # Tests against JournalLock.__init__() - def test_journal_lock_init(self, mock_journaldir: py_path_local_LocalPath): + def test_journal_lock_init(self, mock_journaldir: _pytest_tmpdir.TempPathFactory): """Test JournalLock instantiation.""" print(f'{type(mock_journaldir)=}') tmpdir = str(mock_journaldir.getbasetemp()) @@ -177,7 +176,7 @@ class TestJournalLock: jlock.set_path_from_journaldir() assert jlock.journal_dir_path is None - def test_path_from_journaldir_with_tmpdir(self, mock_journaldir: py_path_local_LocalPath): + def test_path_from_journaldir_with_tmpdir(self, mock_journaldir: _pytest_tmpdir.TempPathFactory): """Test JournalLock.set_path_from_journaldir() with tmpdir.""" tmpdir = mock_journaldir @@ -201,7 +200,7 @@ class TestJournalLock: locked = jlock.obtain_lock() assert locked == JournalLockResult.JOURNALDIR_IS_NONE - def test_obtain_lock_with_tmpdir(self, mock_journaldir: py_path_local_LocalPath): + def test_obtain_lock_with_tmpdir(self, mock_journaldir: _pytest_tmpdir.TempPathFactory): """Test JournalLock.obtain_lock() with tmpdir.""" jlock = JournalLock() @@ -214,7 +213,7 @@ class TestJournalLock: assert jlock.release_lock() os.unlink(str(jlock.journal_dir_lockfile_name)) - def test_obtain_lock_with_tmpdir_ro(self, mock_journaldir: py_path_local_LocalPath): + def test_obtain_lock_with_tmpdir_ro(self, mock_journaldir: _pytest_tmpdir.TempPathFactory): """Test JournalLock.obtain_lock() with read-only tmpdir.""" tmpdir = str(mock_journaldir.getbasetemp()) print(f'{tmpdir=}') @@ -281,7 +280,7 @@ class TestJournalLock: assert locked == JournalLockResult.JOURNALDIR_READONLY - def test_obtain_lock_already_locked(self, mock_journaldir: py_path_local_LocalPath): + def test_obtain_lock_already_locked(self, mock_journaldir: _pytest_tmpdir.TempPathFactory): """Test JournalLock.obtain_lock() with tmpdir.""" continue_q: mp.Queue = mp.Queue() exit_q: mp.Queue = mp.Queue() @@ -313,7 +312,7 @@ class TestJournalLock: ########################################################################### # Tests against JournalLock.release_lock() - def test_release_lock(self, mock_journaldir: py_path_local_LocalPath): + def test_release_lock(self, mock_journaldir: _pytest_tmpdir.TempPathFactory): """Test JournalLock.release_lock().""" # First actually obtain the lock, and check it worked jlock = JournalLock() @@ -331,12 +330,12 @@ class TestJournalLock: # Cleanup, to avoid side-effect on other tests os.unlink(str(jlock.journal_dir_lockfile_name)) - def test_release_lock_not_locked(self, mock_journaldir: py_path_local_LocalPath): + def test_release_lock_not_locked(self, mock_journaldir: _pytest_tmpdir.TempPathFactory): """Test JournalLock.release_lock() when not locked.""" jlock = JournalLock() assert jlock.release_lock() - def test_release_lock_lie_locked(self, mock_journaldir: py_path_local_LocalPath): + def test_release_lock_lie_locked(self, mock_journaldir: _pytest_tmpdir.TempPathFactory): """Test JournalLock.release_lock() when not locked, but lie we are.""" jlock = JournalLock() jlock.locked = True @@ -346,7 +345,7 @@ class TestJournalLock: # Tests against JournalLock.update_lock() def test_update_lock( self, - mock_journaldir_changing: py_path_local_LocalPath): + mock_journaldir_changing: _pytest_tmpdir.TempPathFactory): """ Test JournalLock.update_lock(). @@ -374,7 +373,7 @@ class TestJournalLock: # And the old_journaldir's lockfile too os.unlink(str(old_journaldir_lockfile_name)) - def test_update_lock_same(self, mock_journaldir: py_path_local_LocalPath): + def test_update_lock_same(self, mock_journaldir: _pytest_tmpdir.TempPathFactory): """ Test JournalLock.update_lock(). From 9a209c33ec475e6fc9e833bf97d281e1dcb1be80 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 29 Oct 2022 15:05:59 +0000 Subject: [PATCH 26/95] build(deps-dev): bump autopep8 from 1.7.0 to 2.0.0 Bumps [autopep8](https://github.com/hhatto/autopep8) from 1.7.0 to 2.0.0. - [Release notes](https://github.com/hhatto/autopep8/releases) - [Commits](https://github.com/hhatto/autopep8/compare/v1.7.0...v2.0.0) --- updated-dependencies: - dependency-name: autopep8 dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 2871023b..5acbeced 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -26,7 +26,7 @@ safety==2.3.1 types-requests==2.28.11.2 # Code formatting tools -autopep8==1.7.0 +autopep8==2.0.0 # HTML changelogs grip==4.6.1 From a607d45fa34cd157433363f53793f52976cb6177 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 Oct 2022 17:05:11 +0000 Subject: [PATCH 27/95] build(deps-dev): bump flake8-comprehensions from 3.10.0 to 3.10.1 Bumps [flake8-comprehensions](https://github.com/adamchainz/flake8-comprehensions) from 3.10.0 to 3.10.1. - [Release notes](https://github.com/adamchainz/flake8-comprehensions/releases) - [Changelog](https://github.com/adamchainz/flake8-comprehensions/blob/main/HISTORY.rst) - [Commits](https://github.com/adamchainz/flake8-comprehensions/compare/3.10.0...3.10.1) --- updated-dependencies: - dependency-name: flake8-comprehensions dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index a3a64d5e..1ff123a8 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -11,7 +11,7 @@ setuptools==65.5.0 flake8==5.0.4 flake8-annotations-coverage==0.0.6 flake8-cognitive-complexity==0.1.0 -flake8-comprehensions==3.10.0 +flake8-comprehensions==3.10.1 flake8-docstrings==1.6.0 isort==5.10.1 flake8-isort==5.0.0 From e177e73f67d41d767abe5c5f56024e53f86b3606 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Nov 2022 17:00:46 +0000 Subject: [PATCH 28/95] build(deps-dev): bump setuptools from 65.5.0 to 65.5.1 Bumps [setuptools](https://github.com/pypa/setuptools) from 65.5.0 to 65.5.1. - [Release notes](https://github.com/pypa/setuptools/releases) - [Changelog](https://github.com/pypa/setuptools/blob/main/CHANGES.rst) - [Commits](https://github.com/pypa/setuptools/compare/v65.5.0...v65.5.1) --- updated-dependencies: - dependency-name: setuptools dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index a3a64d5e..dffaebcb 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -5,7 +5,7 @@ wheel # We can't rely on just picking this up from either the base (not venv), # or venv-init-time version. Specify here so that dependabot will prod us # about new versions. -setuptools==65.5.0 +setuptools==65.5.1 # Static analysis tools flake8==5.0.4 From 11316c8647a833708552b2a39d831011af11fe56 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Nov 2022 17:00:49 +0000 Subject: [PATCH 29/95] build(deps-dev): bump pywin32 from 304 to 305 Bumps [pywin32](https://github.com/mhammond/pywin32) from 304 to 305. - [Release notes](https://github.com/mhammond/pywin32/releases) - [Changelog](https://github.com/mhammond/pywin32/blob/main/CHANGES.txt) - [Commits](https://github.com/mhammond/pywin32/commits) --- updated-dependencies: - dependency-name: pywin32 dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index a3a64d5e..dbc080aa 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -45,7 +45,7 @@ pytest-cov==4.0.0 # Pytest code coverage support coverage[toml]==6.5.0 # pytest-cov dep. This is here to ensure that it includes TOML support for pyproject.toml configs coverage-conditional-plugin==0.7.0 # For manipulating folder permissions and the like. -pywin32==304; sys_platform == 'win32' +pywin32==305; sys_platform == 'win32' # All of the normal requirements From 6d298ae62651f9a87dd199a8aa87eec57c1b1409 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Nov 2022 17:01:02 +0000 Subject: [PATCH 30/95] build(deps-dev): bump py2exe from 0.12.0.2 to 0.13.0.0 Bumps [py2exe](https://github.com/py2exe/py2exe) from 0.12.0.2 to 0.13.0.0. - [Release notes](https://github.com/py2exe/py2exe/releases) - [Changelog](https://github.com/py2exe/py2exe/blob/master/ChangeLog) - [Commits](https://github.com/py2exe/py2exe/compare/v0.12.0.2...v0.13.0.0) --- updated-dependencies: - dependency-name: py2exe dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index a3a64d5e..1370d925 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -37,7 +37,7 @@ lxml==4.9.1 # We only need py2exe on windows. # Pre-release version addressing semantic_version 2.9.0+ issues: # -py2exe==0.12.0.2; sys_platform == 'win32' +py2exe==0.13.0.0; sys_platform == 'win32' # Testing pytest==7.2.0 From c5a1ee648fbf1f65cec4a46d8777cadaf9a1e29f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 14 Nov 2022 17:01:23 +0000 Subject: [PATCH 31/95] build(deps-dev): bump mypy from 0.982 to 0.991 Bumps [mypy](https://github.com/python/mypy) from 0.982 to 0.991. - [Release notes](https://github.com/python/mypy/releases) - [Commits](https://github.com/python/mypy/compare/v0.982...v0.991) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index a3a64d5e..a5a2aa16 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -20,7 +20,7 @@ flake8-noqa==1.2.9 flake8-polyfill==1.0.2 flake8-use-fstring==1.4 -mypy==0.982 +mypy==0.991 pep8-naming==0.13.2 safety==2.3.1 types-requests==2.28.11.2 From 04f49dad4f73e0ddb63a015352ec37698656250e Mon Sep 17 00:00:00 2001 From: Athanasius Date: Tue, 15 Nov 2022 10:22:36 +0000 Subject: [PATCH 32/95] pre-commit: Update flake8 repo URL We're using our local version *anyway*, but update this in case we decide to switch. --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 20a49627..6737328d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,7 +21,7 @@ repos: # - id: autopep8 ### # flake8 --show-source -### - repo: https://gitlab.com/pycqa/flake8 +### - repo: https://github.com/PyCQA/flake8 ### rev: '' ### hooks: ### - id: flake8 From 8e7724d7cac2723ac51b123c39bf1315fa6865df Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 17 Nov 2022 17:02:56 +0000 Subject: [PATCH 33/95] build(deps-dev): bump types-requests from 2.28.11.2 to 2.28.11.5 Bumps [types-requests](https://github.com/python/typeshed) from 2.28.11.2 to 2.28.11.5. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-requests dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index a7480f12..8af1f0d7 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -23,7 +23,7 @@ flake8-use-fstring==1.4 mypy==0.991 pep8-naming==0.13.2 safety==2.3.1 -types-requests==2.28.11.2 +types-requests==2.28.11.5 # Code formatting tools autopep8==2.0.0 From 4f0a6ee12d345cbe72a611187592b2cfa56bbc92 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Nov 2022 17:04:28 +0000 Subject: [PATCH 34/95] build(deps-dev): bump setuptools from 65.5.1 to 65.6.0 Bumps [setuptools](https://github.com/pypa/setuptools) from 65.5.1 to 65.6.0. - [Release notes](https://github.com/pypa/setuptools/releases) - [Changelog](https://github.com/pypa/setuptools/blob/main/CHANGES.rst) - [Commits](https://github.com/pypa/setuptools/compare/v65.5.1...v65.6.0) --- updated-dependencies: - dependency-name: setuptools dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index a7480f12..56706198 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -5,7 +5,7 @@ wheel # We can't rely on just picking this up from either the base (not venv), # or venv-init-time version. Specify here so that dependabot will prod us # about new versions. -setuptools==65.5.1 +setuptools==65.6.0 # Static analysis tools flake8==5.0.4 From 3ce5418bc28c869b51be6dba11d7a3a677690f10 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Nov 2022 17:04:33 +0000 Subject: [PATCH 35/95] build(deps-dev): bump flake8-isort from 5.0.0 to 5.0.3 Bumps [flake8-isort](https://github.com/gforcada/flake8-isort) from 5.0.0 to 5.0.3. - [Release notes](https://github.com/gforcada/flake8-isort/releases) - [Changelog](https://github.com/gforcada/flake8-isort/blob/master/CHANGES.rst) - [Commits](https://github.com/gforcada/flake8-isort/compare/5.0.0...5.0.3) --- updated-dependencies: - dependency-name: flake8-isort dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index a7480f12..77713d11 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -14,7 +14,7 @@ flake8-cognitive-complexity==0.1.0 flake8-comprehensions==3.10.1 flake8-docstrings==1.6.0 isort==5.10.1 -flake8-isort==5.0.0 +flake8-isort==5.0.3 flake8-json==21.7.0 flake8-noqa==1.2.9 flake8-polyfill==1.0.2 From 9faae8b9bca0d66f9f3bb2639cf3b84d40d9591a Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 28 Sep 2022 17:08:41 +0100 Subject: [PATCH 36/95] EDDN: Open & create sqlite3 db for replay * sqlite3 open, and creation of table. * Change `load_journal_replay()` to `load_journal_replay_file()` and change the semantics to just return the `list[str]` loaded from it. It also now catches no exceptions. * Remove the "lock the journal cache" on init as it's not necessary. There's still a lot more changes to come on this. --- plugins/eddn.py | 98 ++++++++++++++++++++++++++++++------------------- 1 file changed, 60 insertions(+), 38 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index cd1d53ff..50cb402c 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -26,16 +26,16 @@ import itertools import json import pathlib import re +import sqlite3 import sys import tkinter as tk from collections import OrderedDict from os import SEEK_SET -from os.path import join from platform import system from textwrap import dedent from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Mapping, MutableMapping, Optional from typing import OrderedDict as OrderedDictT -from typing import TextIO, Tuple, Union +from typing import Tuple, Union import requests @@ -52,10 +52,6 @@ from prefs import prefsVersion from ttkHyperlinkLabel import HyperlinkLabel from util import text -if sys.platform != 'win32': - from fcntl import LOCK_EX, LOCK_NB, lockf - - if TYPE_CHECKING: def _(x: str) -> str: return x @@ -155,8 +151,25 @@ class EDDN: self.parent: tk.Tk = parent self.session = requests.Session() self.session.headers['User-Agent'] = user_agent - self.replayfile: Optional[TextIO] = None # For delayed messages - self.replaylog: List[str] = [] + + ####################################################################### + # EDDN delayed sending/retry + ####################################################################### + self.replaydb = self.journal_replay_sqlite_init() + # Kept only for converting legacy file to sqlite3 + try: + replaylog = self.load_journal_replay_file() + + except FileNotFoundError: + pass + + finally: + # TODO: Convert `replaylog` into the database. + # Remove the file. + ... + + ####################################################################### + self.fss_signals: List[Mapping[str, Any]] = [] if config.eddn_url is not None: @@ -165,36 +178,49 @@ class EDDN: else: self.eddn_url = self.DEFAULT_URL - def load_journal_replay(self) -> bool: + def journal_replay_sqlite_init(self) -> sqlite3.Cursor: + """ + Ensure the sqlite3 database for EDDN replays exists and has schema. + + :return: sqlite3 cursor for the database. + """ + self.replaydb_conn = sqlite3.connect(config.app_dir_path / 'eddn_replay.db') + replaydb = self.replaydb_conn.cursor() + try: + replaydb.execute( + """ + CREATE TABLE messages + ( + id INT PRIMARY KEY NOT NULL, + created TEXT NOT NULL, + cmdr TEXT NOT NULL, + edmc_version TEXT, + game_version TEXT, + game_build TEXT, + message TEXT NOT NULL + ) + """ + ) + + except sqlite3.OperationalError as e: + if str(e) != "table messages already exists": + raise e + + return replaydb + + def load_journal_replay_file(self) -> list[str]: """ Load cached journal entries from disk. - :return: a bool indicating success + Simply let any exceptions propagate up if there's an error. + + :return: Contents of the file as a list. """ # Try to obtain exclusive access to the journal cache - filename = join(config.app_dir, 'replay.jsonl') - try: - try: - # Try to open existing file - self.replayfile = open(filename, 'r+', buffering=1) - - except FileNotFoundError: - self.replayfile = open(filename, 'w+', buffering=1) # Create file - - if sys.platform != 'win32': # open for writing is automatically exclusive on Windows - lockf(self.replayfile, LOCK_EX | LOCK_NB) - - except OSError: - logger.exception('Failed opening "replay.jsonl"') - if self.replayfile: - self.replayfile.close() - - self.replayfile = None - return False - - else: - self.replaylog = [line.strip() for line in self.replayfile] - return True + filename = config.app_dir_path / 'replay.jsonl' + # Try to open existing file + with open(filename, 'r+', buffering=1) as replay_file: + return [line.strip() for line in replay_file] def flush(self): """Flush the replay file, clearing any data currently there that is not in the replaylog list.""" @@ -762,7 +788,7 @@ class EDDN: :param entry: The full journal event dictionary (due to checks in this function). :param msg: The EDDN message body to be sent. """ - if self.replayfile or self.load_journal_replay(): + if self.replayfile or self.load_journal_replay_file(): # Store the entry self.replaylog.append(json.dumps([cmdr, msg])) self.replayfile.write(f'{self.replaylog[-1]}\n') # type: ignore @@ -1621,10 +1647,6 @@ def plugin_app(parent: tk.Tk) -> Optional[tk.Frame]: """ this.parent = parent this.eddn = EDDN(parent) - # Try to obtain exclusive lock on journal cache, even if we don't need it yet - if not this.eddn.load_journal_replay(): - # Shouldn't happen - don't bother localizing - this.parent.children['status']['text'] = 'Error: Is another copy of this app already running?' if config.eddn_tracking_ui: this.ui = tk.Frame(parent) From 072eadd89373de6117a8b348b3d26f04edec37f0 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 28 Sep 2022 17:17:18 +0100 Subject: [PATCH 37/95] EDDN: messages.id AUTOINCREMENT, and index created & cmdr We'll definitely want to query against `cmdr`, and possibly `created`. We shouldn't need to against other fields, they'll just be checked during processing of an already selected message. --- plugins/eddn.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 50cb402c..eee9e8d1 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -191,7 +191,7 @@ class EDDN: """ CREATE TABLE messages ( - id INT PRIMARY KEY NOT NULL, + id INTEGER PRIMARY KEY AUTOINCREMENT, created TEXT NOT NULL, cmdr TEXT NOT NULL, edmc_version TEXT, @@ -202,6 +202,24 @@ class EDDN: """ ) + replaydb.execute( + """ + CREATE INDEX messages_created ON messages + ( + created + ) + """ + ) + + replaydb.execute( + """ + CREATE INDEX messages_cmdr ON messages + ( + cmdr + ) + """ + ) + except sqlite3.OperationalError as e: if str(e) != "table messages already exists": raise e From 03e432034f3a1eb6f56250ee825f213169fc3070 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 28 Sep 2022 17:39:36 +0100 Subject: [PATCH 38/95] EDDN: Moving replay functionality into its own class --- plugins/eddn.py | 161 +++++++++++++++++++++++++----------------------- 1 file changed, 83 insertions(+), 78 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index eee9e8d1..b511be16 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -114,7 +114,6 @@ class This: this = This() - # This SKU is tagged on any module or ship that you must have Horizons for. HORIZONS_SKU = 'ELITE_HORIZONS_V_PLANETARY_LANDINGS' # ELITE_HORIZONS_V_COBRA_MK_IV_1000` is for the Cobra Mk IV, but @@ -126,8 +125,84 @@ HORIZONS_SKU = 'ELITE_HORIZONS_V_PLANETARY_LANDINGS' # one. -# TODO: a good few of these methods are static or could be classmethods. they should be created as such. +class EDDNReplay: + """Store and retry sending of EDDN messages.""" + SQLITE_DB_FILENAME = 'eddn_replay.db' + + def __init__(self) -> None: + """ + Prepare the system for processing messages. + + - Ensure the sqlite3 database for EDDN replays exists and has schema. + - Convert any legacy file into the database. + """ + self.db_conn = sqlite3.connect(config.app_dir_path / self.SQLITE_DB_FILENAME) + self.db = self.db_conn.cursor() + + try: + self.db.execute( + """ + CREATE TABLE messages + ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + created TEXT NOT NULL, + cmdr TEXT NOT NULL, + edmc_version TEXT, + game_version TEXT, + game_build TEXT, + message TEXT NOT NULL + ) + """ + ) + + self.db.execute( + """ + CREATE INDEX messages_created ON messages + ( + created + ) + """ + ) + + self.db.execute( + """ + CREATE INDEX messages_cmdr ON messages + ( + cmdr + ) + """ + ) + + except sqlite3.OperationalError as e: + if str(e) != "table messages already exists": + raise e + + self.convert_legacy_file() + + def convert_legacy_file(self): + """Convert a legacy file's contents into the sqlite3 db.""" + try: + for m in self.load_legacy_file(): + ... + + except FileNotFoundError: + pass + + def load_legacy_file(self) -> list[str]: + """ + Load cached journal entries from disk. + + :return: Contents of the file as a list. + """ + # Try to obtain exclusive access to the journal cache + filename = config.app_dir_path / 'replay.jsonl' + # Try to open existing file + with open(filename, 'r+', buffering=1) as replay_file: + return [line.strip() for line in replay_file] + + +# TODO: a good few of these methods are static or could be classmethods. they should be created as such. class EDDN: """EDDN Data export.""" @@ -155,19 +230,7 @@ class EDDN: ####################################################################### # EDDN delayed sending/retry ####################################################################### - self.replaydb = self.journal_replay_sqlite_init() - # Kept only for converting legacy file to sqlite3 - try: - replaylog = self.load_journal_replay_file() - - except FileNotFoundError: - pass - - finally: - # TODO: Convert `replaylog` into the database. - # Remove the file. - ... - + self.replay = EDDNReplay() ####################################################################### self.fss_signals: List[Mapping[str, Any]] = [] @@ -178,68 +241,6 @@ class EDDN: else: self.eddn_url = self.DEFAULT_URL - def journal_replay_sqlite_init(self) -> sqlite3.Cursor: - """ - Ensure the sqlite3 database for EDDN replays exists and has schema. - - :return: sqlite3 cursor for the database. - """ - self.replaydb_conn = sqlite3.connect(config.app_dir_path / 'eddn_replay.db') - replaydb = self.replaydb_conn.cursor() - try: - replaydb.execute( - """ - CREATE TABLE messages - ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - created TEXT NOT NULL, - cmdr TEXT NOT NULL, - edmc_version TEXT, - game_version TEXT, - game_build TEXT, - message TEXT NOT NULL - ) - """ - ) - - replaydb.execute( - """ - CREATE INDEX messages_created ON messages - ( - created - ) - """ - ) - - replaydb.execute( - """ - CREATE INDEX messages_cmdr ON messages - ( - cmdr - ) - """ - ) - - except sqlite3.OperationalError as e: - if str(e) != "table messages already exists": - raise e - - return replaydb - - def load_journal_replay_file(self) -> list[str]: - """ - Load cached journal entries from disk. - - Simply let any exceptions propagate up if there's an error. - - :return: Contents of the file as a list. - """ - # Try to obtain exclusive access to the journal cache - filename = config.app_dir_path / 'replay.jsonl' - # Try to open existing file - with open(filename, 'r+', buffering=1) as replay_file: - return [line.strip() for line in replay_file] - def flush(self): """Flush the replay file, clearing any data currently there that is not in the replaylog list.""" if self.replayfile is None: @@ -361,6 +362,10 @@ class EDDN: def sendreplay(self) -> None: # noqa: CCR001 """Send cached Journal lines to EDDN.""" + # TODO: Convert to using the sqlite3 db + # **IF** this is moved to a thread worker then we need to ensure + # that we're operating sqlite3 in a thread-safe manner, + # Ref: if not self.replayfile: return # Probably closing app @@ -806,7 +811,7 @@ class EDDN: :param entry: The full journal event dictionary (due to checks in this function). :param msg: The EDDN message body to be sent. """ - if self.replayfile or self.load_journal_replay_file(): + if self.replayfile or self.journal_replay_load_file(): # Store the entry self.replaylog.append(json.dumps([cmdr, msg])) self.replayfile.write(f'{self.replaylog[-1]}\n') # type: ignore From 424d5f023c6b6c6fd404bf006e2a491961d3f58f Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 28 Sep 2022 18:11:12 +0100 Subject: [PATCH 39/95] EDDNReplay.add_message() is now functional And that includes the code to handle legacy `replay.json` messages. --- plugins/eddn.py | 68 +++++++++++++++++++++++++++++++++++++++---------- 1 file changed, 54 insertions(+), 14 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index b511be16..9f0d7e61 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -180,27 +180,67 @@ class EDDNReplay: self.convert_legacy_file() + def add_message(self, cmdr, msg): + """ + Add an EDDN message to the database. + + `msg` absolutely needs to be the **FULL** EDDN message, including all + of `header`, `$schemaRef` and `message`. Code handling this not being + the case is only for loading the legacy `replay.json` file messages. + + :param cmdr: Name of the Commander that created this message. + :param msg: The full, transmission-ready, EDDN message. + """ + # Cater for legacy replay.json messages + if 'header' not in msg: + msg['header'] = { + # We have to lie and say it's *this* version, but denote that + # it might not actually be this version. + 'softwareName': f'{applongname} [{system() if sys.platform != "darwin" else "Mac OS"}]' + ' (legacy replay)', + 'softwareVersion': str(appversion_nobuild()), + 'uploaderID': cmdr, + 'gameversion': '', # Can't add what we don't know + 'gamebuild': '', # Can't add what we don't know + } + + created = msg['message']['timestamp'] + edmc_version = msg['header']['softwareVersion'] + game_version = msg['header']['gameversion'] + game_build = msg['header']['gamebuild'] + uploader = msg['header']['uploaderID'] + + try: + self.db.execute( + """ + INSERT INTO messages ( + created, cmdr, edmc_version, game_version, game_build, message + ) + VALUES ( + ?, ?, ?, ?, ?, ? + ) + """, + (created, uploader, edmc_version, game_version, game_build, json.dumps(msg)) + ) + self.db_conn.commit() + + except Exception: + logger.exception('EDDNReplay INSERT error') + def convert_legacy_file(self): """Convert a legacy file's contents into the sqlite3 db.""" try: - for m in self.load_legacy_file(): - ... + filename = config.app_dir_path / 'replay.jsonl' + with open(filename, 'r+', buffering=1) as replay_file: + for line in replay_file: + j = json.loads(line) + cmdr, msg = j + self.add_message(cmdr, msg) + break except FileNotFoundError: pass - def load_legacy_file(self) -> list[str]: - """ - Load cached journal entries from disk. - - :return: Contents of the file as a list. - """ - # Try to obtain exclusive access to the journal cache - filename = config.app_dir_path / 'replay.jsonl' - # Try to open existing file - with open(filename, 'r+', buffering=1) as replay_file: - return [line.strip() for line in replay_file] - # TODO: a good few of these methods are static or could be classmethods. they should be created as such. class EDDN: From 9a660b3b49de0a06458d8ced46d28bf30b94dcd3 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 29 Sep 2022 16:06:57 +0100 Subject: [PATCH 40/95] EDDN: New class as EDDNSender now, and version the queue database file * It makes more sense for this new class to be concerned with all the 'send it' functionality, not just 'replay', so rename it. * Athough we're trying to get the schema right *first* time, let's plan ahead and version the filename in case of needing to migrations in the future. --- plugins/eddn.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 9f0d7e61..9a0a3324 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -125,10 +125,10 @@ HORIZONS_SKU = 'ELITE_HORIZONS_V_PLANETARY_LANDINGS' # one. -class EDDNReplay: +class EDDNSender: """Store and retry sending of EDDN messages.""" - SQLITE_DB_FILENAME = 'eddn_replay.db' + SQLITE_DB_FILENAME = 'eddn_queue-v1.db' def __init__(self) -> None: """ @@ -188,6 +188,8 @@ class EDDNReplay: of `header`, `$schemaRef` and `message`. Code handling this not being the case is only for loading the legacy `replay.json` file messages. + TODO: Return the unique row id of the added message. + :param cmdr: Name of the Commander that created this message. :param msg: The full, transmission-ready, EDDN message. """ @@ -267,11 +269,7 @@ class EDDN: self.session = requests.Session() self.session.headers['User-Agent'] = user_agent - ####################################################################### - # EDDN delayed sending/retry - ####################################################################### - self.replay = EDDNReplay() - ####################################################################### + self.sender = EDDNSender() self.fss_signals: List[Mapping[str, Any]] = [] From 80129361fe0c569d7093f0c7fd3137fe36c271fe Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 29 Sep 2022 16:30:09 +0100 Subject: [PATCH 41/95] EDDN: Refactor queue db open/creation to own function --- plugins/eddn.py | 45 +++++++++++++++++++++++++++++++++------------ 1 file changed, 33 insertions(+), 12 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 9a0a3324..dfda017e 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -126,22 +126,37 @@ HORIZONS_SKU = 'ELITE_HORIZONS_V_PLANETARY_LANDINGS' class EDDNSender: - """Store and retry sending of EDDN messages.""" + """Handle sending of EDDN messages to the Gateway.""" - SQLITE_DB_FILENAME = 'eddn_queue-v1.db' + SQLITE_DB_FILENAME_V1 = 'eddn_queue-v1.db' - def __init__(self) -> None: + def __init__(self, eddn_endpoint) -> None: """ Prepare the system for processing messages. - Ensure the sqlite3 database for EDDN replays exists and has schema. - Convert any legacy file into the database. """ - self.db_conn = sqlite3.connect(config.app_dir_path / self.SQLITE_DB_FILENAME) + self.db_conn = self.sqlite_queue_v1() self.db = self.db_conn.cursor() + ####################################################################### + # Queue database migration + ####################################################################### + self.convert_legacy_file() + ####################################################################### + + def sqlite_queue_v1(self): + """ + Initialise a v1 EDDN queue database. + + :return: sqlite3 connection + """ + db_conn = sqlite3.connect(config.app_dir_path / self.SQLITE_DB_FILENAME_V1) + db = db_conn.cursor() + try: - self.db.execute( + db.execute( """ CREATE TABLE messages ( @@ -156,7 +171,7 @@ class EDDNSender: """ ) - self.db.execute( + db.execute( """ CREATE INDEX messages_created ON messages ( @@ -165,7 +180,7 @@ class EDDNSender: """ ) - self.db.execute( + db.execute( """ CREATE INDEX messages_cmdr ON messages ( @@ -176,9 +191,15 @@ class EDDNSender: except sqlite3.OperationalError as e: if str(e) != "table messages already exists": + # Cleanup, as schema creation failed + db.close() + db_conn.close() raise e - self.convert_legacy_file() + # We return only the connection, so tidy up + db.close() + + return db_conn def add_message(self, cmdr, msg): """ @@ -269,16 +290,16 @@ class EDDN: self.session = requests.Session() self.session.headers['User-Agent'] = user_agent - self.sender = EDDNSender() - - self.fss_signals: List[Mapping[str, Any]] = [] - if config.eddn_url is not None: self.eddn_url = config.eddn_url else: self.eddn_url = self.DEFAULT_URL + self.sender = EDDNSender(self.eddn_url) + + self.fss_signals: List[Mapping[str, Any]] = [] + def flush(self): """Flush the replay file, clearing any data currently there that is not in the replaylog list.""" if self.replayfile is None: From 089c33002c560cd16543679c9f304c07e5b3ac8b Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 29 Sep 2022 16:34:43 +0100 Subject: [PATCH 42/95] EDDNSender->add_message() returns ID of INSERTed row --- plugins/eddn.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index dfda017e..b71233c6 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -201,7 +201,7 @@ class EDDNSender: return db_conn - def add_message(self, cmdr, msg): + def add_message(self, cmdr, msg) -> int: """ Add an EDDN message to the database. @@ -209,10 +209,12 @@ class EDDNSender: of `header`, `$schemaRef` and `message`. Code handling this not being the case is only for loading the legacy `replay.json` file messages. - TODO: Return the unique row id of the added message. + NB: Although `cmdr` *should* be the same as `msg->header->uploaderID` + we choose not to assume that. :param cmdr: Name of the Commander that created this message. :param msg: The full, transmission-ready, EDDN message. + :return: ID of the successfully inserted row. """ # Cater for legacy replay.json messages if 'header' not in msg: @@ -250,6 +252,8 @@ class EDDNSender: except Exception: logger.exception('EDDNReplay INSERT error') + return self.db.lastrowid + def convert_legacy_file(self): """Convert a legacy file's contents into the sqlite3 db.""" try: From 86ff787aed0723c39278e277ee6fd449ab7c273d Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 29 Sep 2022 16:39:30 +0100 Subject: [PATCH 43/95] EDDNSender: Convert all of a legacy file * I had a `break` in there to only convert the first message. * Also collapsed the assignment to `cmdr, msg` to not go via `j`. --- plugins/eddn.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index b71233c6..9a08bdb2 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -260,10 +260,8 @@ class EDDNSender: filename = config.app_dir_path / 'replay.jsonl' with open(filename, 'r+', buffering=1) as replay_file: for line in replay_file: - j = json.loads(line) - cmdr, msg = j + cmdr, msg = json.loads(line) self.add_message(cmdr, msg) - break except FileNotFoundError: pass From 09f646a249c7f2449df93f10af1d95d416f2b3ca Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 29 Sep 2022 16:59:34 +0100 Subject: [PATCH 44/95] EDDNSender: Add delete_message() method This was tested by temporary code in `convert_legacy_file()` to delete the last added row once all done. --- plugins/eddn.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/plugins/eddn.py b/plugins/eddn.py index 9a08bdb2..2266d4ad 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -254,6 +254,20 @@ class EDDNSender: return self.db.lastrowid + def delete_message(self, row_id: int) -> None: + """ + Delete a queued message by row id. + + :param row_id: + """ + self.db.execute( + """ + DELETE FROM messages WHERE id = :row_id + """, + {'row_id': row_id} + ) + self.db_conn.commit() + def convert_legacy_file(self): """Convert a legacy file's contents into the sqlite3 db.""" try: From c1793ad8399f45b5e99ce7303e23e51ae1e7b310 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 29 Sep 2022 17:01:50 +0100 Subject: [PATCH 45/95] EDDN: Remove EDDN->flush() --- plugins/eddn.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 2266d4ad..0dc8b101 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -316,19 +316,6 @@ class EDDN: self.fss_signals: List[Mapping[str, Any]] = [] - def flush(self): - """Flush the replay file, clearing any data currently there that is not in the replaylog list.""" - if self.replayfile is None: - logger.error('replayfile is None!') - return - - self.replayfile.seek(0, SEEK_SET) - self.replayfile.truncate() - for line in self.replaylog: - self.replayfile.write(f'{line}\n') - - self.replayfile.flush() - def close(self): """Close down the EDDN class instance.""" logger.debug('Closing replayfile...') From 51fb90b999ea0a2f7bdaa08c14ce27841470c898 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 29 Sep 2022 17:04:34 +0100 Subject: [PATCH 46/95] EDDN: Change EDDN.close() to call into EDDNSender.close() --- plugins/eddn.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 0dc8b101..ac71bf8a 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -201,6 +201,14 @@ class EDDNSender: return db_conn + def close(self) -> None: + """Clean up any resources.""" + if self.db: + self.db.close() + + if self.db_conn: + self.db_conn.close() + def add_message(self, cmdr, msg) -> int: """ Add an EDDN message to the database. @@ -318,11 +326,10 @@ class EDDN: def close(self): """Close down the EDDN class instance.""" - logger.debug('Closing replayfile...') - if self.replayfile: - self.replayfile.close() + logger.debug('Closing Sender...') + if self.sender: + self.sender.close() - self.replayfile = None logger.debug('Done.') logger.debug('Closing EDDN requests.Session.') From f3017d40ec1c400fdcf62f0a11189a8f25899a43 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 29 Sep 2022 17:11:49 +0100 Subject: [PATCH 47/95] EDDNSender: Fill out type hints --- plugins/eddn.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index ac71bf8a..a5755b48 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -30,7 +30,6 @@ import sqlite3 import sys import tkinter as tk from collections import OrderedDict -from os import SEEK_SET from platform import system from textwrap import dedent from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Mapping, MutableMapping, Optional @@ -130,13 +129,17 @@ class EDDNSender: SQLITE_DB_FILENAME_V1 = 'eddn_queue-v1.db' - def __init__(self, eddn_endpoint) -> None: + def __init__(self, eddn_endpoint: str) -> None: """ Prepare the system for processing messages. - Ensure the sqlite3 database for EDDN replays exists and has schema. - Convert any legacy file into the database. + + :param eddn_endpoint: Where messages should be sent. """ + self.eddn_endpoint = eddn_endpoint + self.db_conn = self.sqlite_queue_v1() self.db = self.db_conn.cursor() @@ -146,7 +149,7 @@ class EDDNSender: self.convert_legacy_file() ####################################################################### - def sqlite_queue_v1(self): + def sqlite_queue_v1(self) -> sqlite3.Connection: """ Initialise a v1 EDDN queue database. @@ -209,7 +212,7 @@ class EDDNSender: if self.db_conn: self.db_conn.close() - def add_message(self, cmdr, msg) -> int: + def add_message(self, cmdr: str, msg: dict) -> int: """ Add an EDDN message to the database. @@ -266,7 +269,7 @@ class EDDNSender: """ Delete a queued message by row id. - :param row_id: + :param row_id: id of message to be deleted. """ self.db.execute( """ From 0e20f4bc00ed7f71629b0b47c35f1732fc64a96a Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 29 Sep 2022 17:18:39 +0100 Subject: [PATCH 48/95] EDDNSender: Remove legacy file after migration --- plugins/eddn.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index a5755b48..f09a4544 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -24,6 +24,7 @@ # ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# import itertools import json +import os import pathlib import re import sqlite3 @@ -281,8 +282,8 @@ class EDDNSender: def convert_legacy_file(self): """Convert a legacy file's contents into the sqlite3 db.""" + filename = config.app_dir_path / 'replay.jsonl' try: - filename = config.app_dir_path / 'replay.jsonl' with open(filename, 'r+', buffering=1) as replay_file: for line in replay_file: cmdr, msg = json.loads(line) @@ -291,6 +292,13 @@ class EDDNSender: except FileNotFoundError: pass + finally: + # Best effort at removing the file/contents + # NB: The legacy code assumed it could write to the file. + replay_file = open(filename, 'w') # Will truncate + replay_file.close() + os.unlink(filename) + # TODO: a good few of these methods are static or could be classmethods. they should be created as such. class EDDN: From 2b957d140cc66aba6a2487a7bb7e28cf58a5923d Mon Sep 17 00:00:00 2001 From: Athanasius Date: Fri, 30 Sep 2022 13:35:50 +0100 Subject: [PATCH 49/95] EDDNSender: `convert_legacy_file()` belongs with "open the database" --- plugins/eddn.py | 39 ++++++++++++++++++++------------------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index f09a4544..505039c4 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -136,6 +136,7 @@ class EDDNSender: - Ensure the sqlite3 database for EDDN replays exists and has schema. - Convert any legacy file into the database. + - (Future) Handle any database migrations. :param eddn_endpoint: Where messages should be sent. """ @@ -205,6 +206,25 @@ class EDDNSender: return db_conn + def convert_legacy_file(self): + """Convert a legacy file's contents into the sqlite3 db.""" + filename = config.app_dir_path / 'replay.jsonl' + try: + with open(filename, 'r+', buffering=1) as replay_file: + for line in replay_file: + cmdr, msg = json.loads(line) + self.add_message(cmdr, msg) + + except FileNotFoundError: + pass + + finally: + # Best effort at removing the file/contents + # NB: The legacy code assumed it could write to the file. + replay_file = open(filename, 'w') # Will truncate + replay_file.close() + os.unlink(filename) + def close(self) -> None: """Clean up any resources.""" if self.db: @@ -280,25 +300,6 @@ class EDDNSender: ) self.db_conn.commit() - def convert_legacy_file(self): - """Convert a legacy file's contents into the sqlite3 db.""" - filename = config.app_dir_path / 'replay.jsonl' - try: - with open(filename, 'r+', buffering=1) as replay_file: - for line in replay_file: - cmdr, msg = json.loads(line) - self.add_message(cmdr, msg) - - except FileNotFoundError: - pass - - finally: - # Best effort at removing the file/contents - # NB: The legacy code assumed it could write to the file. - replay_file = open(filename, 'w') # Will truncate - replay_file.close() - os.unlink(filename) - # TODO: a good few of these methods are static or could be classmethods. they should be created as such. class EDDN: From f66a98464ee03a9f8a6b3c2e26562ca9beb8b463 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Fri, 30 Sep 2022 16:10:25 +0100 Subject: [PATCH 50/95] EDDNSender: Closer to actually sending messages now --- plugins/eddn.py | 357 +++++++++++++++++++++++++----------------------- 1 file changed, 183 insertions(+), 174 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 505039c4..22152f83 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -22,6 +22,7 @@ # # ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# # ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# ! $# +import http import itertools import json import os @@ -129,8 +130,14 @@ class EDDNSender: """Handle sending of EDDN messages to the Gateway.""" SQLITE_DB_FILENAME_V1 = 'eddn_queue-v1.db' + TIMEOUT = 10 # requests timeout + UNKNOWN_SCHEMA_RE = re.compile( + r"^FAIL: \[JsonValidationException\('Schema " + r"https://eddn.edcd.io/schemas/(?P.+)/(?P[0-9]+) is unknown, " + r"unable to validate.',\)\]$" + ) - def __init__(self, eddn_endpoint: str) -> None: + def __init__(self, eddn: 'EDDN', eddn_endpoint: str) -> None: """ Prepare the system for processing messages. @@ -138,9 +145,13 @@ class EDDNSender: - Convert any legacy file into the database. - (Future) Handle any database migrations. + :param eddn: Reference to the `EDDN` instance this is for. :param eddn_endpoint: Where messages should be sent. """ + self.eddn = eddn self.eddn_endpoint = eddn_endpoint + self.session = requests.Session() + self.session.headers['User-Agent'] = user_agent self.db_conn = self.sqlite_queue_v1() self.db = self.db_conn.cursor() @@ -224,7 +235,7 @@ class EDDNSender: replay_file = open(filename, 'w') # Will truncate replay_file.close() os.unlink(filename) - + def close(self) -> None: """Clean up any resources.""" if self.db: @@ -263,8 +274,8 @@ class EDDNSender: created = msg['message']['timestamp'] edmc_version = msg['header']['softwareVersion'] - game_version = msg['header']['gameversion'] - game_build = msg['header']['gamebuild'] + game_version = msg['header'].get('gameversion', '') + game_build = msg['header'].get('gamebuild', '') uploader = msg['header']['uploaderID'] try: @@ -300,121 +311,103 @@ class EDDNSender: ) self.db_conn.commit() - -# TODO: a good few of these methods are static or could be classmethods. they should be created as such. -class EDDN: - """EDDN Data export.""" - - DEFAULT_URL = 'https://eddn.edcd.io:4430/upload/' - if 'eddn' in debug_senders: - DEFAULT_URL = f'http://{edmc_data.DEBUG_WEBSERVER_HOST}:{edmc_data.DEBUG_WEBSERVER_PORT}/eddn' - - REPLAYPERIOD = 400 # Roughly two messages per second, accounting for send delays [ms] - REPLAYFLUSH = 20 # Update log on disk roughly every 10 seconds - TIMEOUT = 10 # requests timeout - MODULE_RE = re.compile(r'^Hpt_|^Int_|Armour_', re.IGNORECASE) - CANONICALISE_RE = re.compile(r'\$(.+)_name;') - UNKNOWN_SCHEMA_RE = re.compile( - r"^FAIL: \[JsonValidationException\('Schema " - r"https://eddn.edcd.io/schemas/(?P.+)/(?P[0-9]+) is unknown, " - r"unable to validate.',\)\]$" - ) - CAPI_LOCALISATION_RE = re.compile(r'^loc[A-Z].+') - - def __init__(self, parent: tk.Tk): - self.parent: tk.Tk = parent - self.session = requests.Session() - self.session.headers['User-Agent'] = user_agent - - if config.eddn_url is not None: - self.eddn_url = config.eddn_url - - else: - self.eddn_url = self.DEFAULT_URL - - self.sender = EDDNSender(self.eddn_url) - - self.fss_signals: List[Mapping[str, Any]] = [] - - def close(self): - """Close down the EDDN class instance.""" - logger.debug('Closing Sender...') - if self.sender: - self.sender.close() - - logger.debug('Done.') - - logger.debug('Closing EDDN requests.Session.') - self.session.close() - - def send(self, cmdr: str, msg: Mapping[str, Any]) -> None: + def send_message_by_id(self, id: int): """ - Send sends an update to EDDN. + Transmit the message identified by the given ID. - :param cmdr: the CMDR to use as the uploader ID. - :param msg: the payload to send. + :param id: + :return: """ - should_return, new_data = killswitch.check_killswitch('plugins.eddn.send', msg) + self.db.execute( + """ + SELECT * FROM messages WHERE id = :row_id + """, + {'row_id': id} + ) + row = dict(zip([c[0] for c in self.db.description], self.db.fetchone())) + + try: + self.send_message(row['message']) + + except requests.exceptions.HTTPError as e: + logger.warning(f"HTTPError: {str(e)}") + + finally: + # Remove from queue + ... + + + def send_message(self, msg: str) -> bool: + """ + Transmit a fully-formed EDDN message to the Gateway. + + Should catch and handle all failure conditions. A `True` return might + mean that the message was successfully sent, *or* that this message + should not be retried after a failure, i.e. too large. + + :param msg: Fully formed, string, message. + :return: `True` for "now remove this message from the queue" + """ + should_return, new_data = killswitch.check_killswitch('plugins.eddn.send', json.loads(msg)) if should_return: logger.warning('eddn.send has been disabled via killswitch. Returning.') return - msg = new_data + msg = json.dumps(new_data) - uploader_id = cmdr - - to_send: OrderedDictT[str, OrderedDict[str, Any]] = OrderedDict([ - ('$schemaRef', msg['$schemaRef']), - ('header', OrderedDict([ - ('softwareName', f'{applongname} [{system() if sys.platform != "darwin" else "Mac OS"}]'), - ('softwareVersion', str(appversion_nobuild())), - ('uploaderID', uploader_id), - ])), - ('message', msg['message']), - ]) - - # About the smallest request is going to be (newlines added for brevity): - # {"$schemaRef":"https://eddn.edcd.io/schemas/commodity/3","header":{"softwareName":"E:D Market - # Connector Windows","softwareVersion":"5.3.0-beta4extra","uploaderID":"abcdefghijklm"},"messag - # e":{"systemName":"delphi","stationName":"The Oracle","marketId":128782803,"timestamp":"2022-0 - # 1-26T12:00:00Z","commodities":[]}} - # - # Which comes to 315 bytes (including \n) and compresses to 244 bytes. So lets just compress everything - - encoded, compressed = text.gzip(json.dumps(to_send, separators=(',', ':')), max_size=0) + status: tk.Widget = self.eddn.parent.children['status'] + # Even the smallest possible message compresses somewhat, so always compress + encoded, compressed = text.gzip(json.dumps(msg, separators=(',', ':')), max_size=0) headers: None | dict[str, str] = None if compressed: headers = {'Content-Encoding': 'gzip'} - r = self.session.post(self.eddn_url, data=encoded, timeout=self.TIMEOUT, headers=headers) - if r.status_code != requests.codes.ok: + try: + r = self.session.post(self.eddn_endpoint, data=encoded, timeout=self.TIMEOUT, headers=headers) + if r.status_code == requests.codes.ok: + return True - # Check if EDDN is still objecting to an empty commodities list - if ( - r.status_code == 400 - and msg['$schemaRef'] == 'https://eddn.edcd.io/schemas/commodity/3' - and msg['message']['commodities'] == [] - and r.text == "FAIL: []" - ): - logger.trace_if('plugin.eddn', "EDDN is still objecting to empty commodities data") - return # We want to silence warnings otherwise - - if r.status_code == 413: + if r.status_code == http.HTTPStatus.REQUEST_ENTITY_TOO_LARGE: extra_data = { - 'schema_ref': msg.get('$schemaRef', 'Unset $schemaRef!'), + 'schema_ref': new_data.get('$schemaRef', 'Unset $schemaRef!'), 'sent_data_len': str(len(encoded)), } if '/journal/' in extra_data['schema_ref']: - extra_data['event'] = msg.get('message', {}).get('event', 'No Event Set') + extra_data['event'] = new_data.get('message', {}).get('event', 'No Event Set') - self._log_response(r, header_msg='Got a 413 while POSTing data', **extra_data) - return # drop the error + self._log_response(r, header_msg='Got "Payload Too Large" while POSTing data', **extra_data) + return True - if not self.UNKNOWN_SCHEMA_RE.match(r.text): - self._log_response(r, header_msg='Status from POST wasn\'t 200 (OK)') + self._log_response(r, header_msg="Status from POST wasn't 200 (OK)") + r.raise_for_status() - r.raise_for_status() + except requests.exceptions.HTTPError as e: + if unknown_schema := self.UNKNOWN_SCHEMA_RE.match(e.response.text): + logger.debug(f"EDDN doesn't (yet?) know about schema: {unknown_schema['schema_name']}" + f"/{unknown_schema['schema_version']}") + # This dropping is to cater for the time period when EDDN doesn't *yet* support a new schema. + return True + + elif e.response.status_code == http.HTTPStatus.BAD_REQUEST: + # EDDN straight up says no, so drop the message + logger.debug(f"EDDN responded '400 Bad Request' to the message, dropping:\n{msg!r}") + return True + + else: + # This should catch anything else, e.g. timeouts, gateway errors + status['text'] = self.http_error_to_log(e) + + except requests.exceptions.RequestException as e: + logger.debug('Failed sending', exc_info=e) + # LANG: Error while trying to send data to EDDN + status['text'] = _("Error: Can't connect to EDDN") + + except Exception as e: + logger.debug('Failed sending', exc_info=e) + status['text'] = str(e) + + return False def _log_response( self, @@ -441,6 +434,92 @@ class EDDN: Content :\t{response.text} ''')+additional_data) + @staticmethod + def http_error_to_log(exception: requests.exceptions.HTTPError) -> str: + """Convert an exception from raise_for_status to a log message and displayed error.""" + status_code = exception.errno + + if status_code == 429: # HTTP UPGRADE REQUIRED + logger.warning('EDMC is sending schemas that are too old') + # LANG: EDDN has banned this version of our client + return _('EDDN Error: EDMC is too old for EDDN. Please update.') + + elif status_code == 400: + # we a validation check or something else. + logger.warning(f'EDDN Error: {status_code} -- {exception.response}') + # LANG: EDDN returned an error that indicates something about what we sent it was wrong + return _('EDDN Error: Validation Failed (EDMC Too Old?). See Log') + + else: + logger.warning(f'Unknown status code from EDDN: {status_code} -- {exception.response}') + # LANG: EDDN returned some sort of HTTP error, one we didn't expect. {STATUS} contains a number + return _('EDDN Error: Returned {STATUS} status code').format(STATUS=status_code) + + +# TODO: a good few of these methods are static or could be classmethods. they should be created as such. +class EDDN: + """EDDN Data export.""" + + DEFAULT_URL = 'https://eddn.edcd.io:4430/upload/' + if 'eddn' in debug_senders: + DEFAULT_URL = f'http://{edmc_data.DEBUG_WEBSERVER_HOST}:{edmc_data.DEBUG_WEBSERVER_PORT}/eddn' + + REPLAYPERIOD = 400 # Roughly two messages per second, accounting for send delays [ms] + REPLAYFLUSH = 20 # Update log on disk roughly every 10 seconds + MODULE_RE = re.compile(r'^Hpt_|^Int_|Armour_', re.IGNORECASE) + CANONICALISE_RE = re.compile(r'\$(.+)_name;') + CAPI_LOCALISATION_RE = re.compile(r'^loc[A-Z].+') + + def __init__(self, parent: tk.Tk): + self.parent: tk.Tk = parent + + if config.eddn_url is not None: + self.eddn_url = config.eddn_url + + else: + self.eddn_url = self.DEFAULT_URL + + self.sender = EDDNSender(self, self.eddn_url) + + self.fss_signals: List[Mapping[str, Any]] = [] + + def close(self): + """Close down the EDDN class instance.""" + logger.debug('Closing Sender...') + if self.sender: + self.sender.close() + + logger.debug('Done.') + + logger.debug('Closing EDDN requests.Session.') + self.session.close() + + def send(self, cmdr: str, msg: Mapping[str, Any]) -> None: + """ + Enqueue a message for transmission. + + :param cmdr: the CMDR to use as the uploader ID. + :param msg: the payload to send. + """ + to_send: OrderedDictT[str, OrderedDict[str, Any]] = OrderedDict([ + ('$schemaRef', msg['$schemaRef']), + ('header', OrderedDict([ + ('softwareName', f'{applongname} [{system() if sys.platform != "darwin" else "Mac OS"}]'), + ('softwareVersion', str(appversion_nobuild())), + ('uploaderID', cmdr), + # TODO: Add `gameversion` and `gamebuild` if that change is live + # on EDDN. + ])), + ('message', msg['message']), + ]) + + # Ensure it's en-queued + msg_id = self.sender.add_message(cmdr, to_send) + # Now try to transmit it immediately + if self.sender.send_message_by_id(msg_id): + # De-queue + self.sender.delete_message(msg_id) + def sendreplay(self) -> None: # noqa: CCR001 """Send cached Journal lines to EDDN.""" # TODO: Convert to using the sqlite3 db @@ -493,63 +572,13 @@ class EDDN: 'https://eddn.edcd.io/schemas/' ) - try: - self.send(cmdr, msg) - self.replaylog.pop(0) - if not len(self.replaylog) % self.REPLAYFLUSH: - self.flush() - - except requests.exceptions.HTTPError as e: - if unknown_schema := self.UNKNOWN_SCHEMA_RE.match(e.response.text): - logger.debug(f"EDDN doesn't (yet?) know about schema: {unknown_schema['schema_name']}" - f"/{unknown_schema['schema_version']}") - # NB: This dropping is to cater for the time when EDDN - # doesn't *yet* support a new schema. - self.replaylog.pop(0) # Drop the message - self.flush() # Truncates the file, then writes the extant data - - elif e.response.status_code == 400: - # EDDN straight up says no, so drop the message - logger.debug(f"EDDN responded '400' to the message, dropping:\n{msg!r}") - self.replaylog.pop(0) # Drop the message - self.flush() # Truncates the file, then writes the extant data - - else: - status['text'] = self.http_error_to_log(e) - - except requests.exceptions.RequestException as e: - logger.debug('Failed sending', exc_info=e) - # LANG: Error while trying to send data to EDDN - status['text'] = _("Error: Can't connect to EDDN") - return # stop sending - - except Exception as e: - logger.debug('Failed sending', exc_info=e) - status['text'] = str(e) - return # stop sending + self.send(cmdr, msg) + self.replaylog.pop(0) + if not len(self.replaylog) % self.REPLAYFLUSH: + self.flush() self.parent.after(self.REPLAYPERIOD, self.sendreplay) - @staticmethod - def http_error_to_log(exception: requests.exceptions.HTTPError) -> str: - """Convert an exception from raise_for_status to a log message and displayed error.""" - status_code = exception.errno - - if status_code == 429: # HTTP UPGRADE REQUIRED - logger.warning('EDMC is sending schemas that are too old') - # LANG: EDDN has banned this version of our client - return _('EDDN Error: EDMC is too old for EDDN. Please update.') - - elif status_code == 400: - # we a validation check or something else. - logger.warning(f'EDDN Error: {status_code} -- {exception.response}') - # LANG: EDDN returned an error that indicates something about what we sent it was wrong - return _('EDDN Error: Validation Failed (EDMC Too Old?). See Log') - - else: - logger.warning(f'Unknown status code from EDDN: {status_code} -- {exception.response}') - # LANG: EDDN returned some sort of HTTP error, one we didn't expect. {STATUS} contains a number - return _('EDDN Error: Returned {STATUS} status code').format(STATUS=status_code) def export_commodities(self, data: Mapping[str, Any], is_beta: bool) -> None: # noqa: CCR001 """ @@ -883,33 +912,13 @@ class EDDN: def export_journal_entry(self, cmdr: str, entry: Mapping[str, Any], msg: Mapping[str, Any]) -> None: """ - Update EDDN with an event from the journal. - - Additionally if other lines have been saved for retry, it may send - those as well. + Send a Journal-sourced EDDN message. :param cmdr: Commander name as passed in through `journal_entry()`. :param entry: The full journal event dictionary (due to checks in this function). :param msg: The EDDN message body to be sent. """ - if self.replayfile or self.journal_replay_load_file(): - # Store the entry - self.replaylog.append(json.dumps([cmdr, msg])) - self.replayfile.write(f'{self.replaylog[-1]}\n') # type: ignore - - if ( - entry['event'] == 'Docked' or (entry['event'] == 'Location' and entry['Docked']) or not - (config.get_int('output') & config.OUT_SYS_DELAY) - ): - self.parent.after(self.REPLAYPERIOD, self.sendreplay) # Try to send this and previous entries - - else: - # Can't access replay file! Send immediately. - # LANG: Status text shown while attempting to send data - self.parent.children['status']['text'] = _('Sending data to EDDN...') - self.parent.update_idletasks() - self.send(cmdr, msg) - self.parent.children['status']['text'] = '' + self.send(cmdr, msg) def export_journal_generic(self, cmdr: str, is_beta: bool, entry: Mapping[str, Any]) -> None: """ From 598e54eaa4e869cd07608e7ebc5646e8355f98dd Mon Sep 17 00:00:00 2001 From: Athanasius Date: Fri, 30 Sep 2022 17:02:43 +0100 Subject: [PATCH 51/95] EDDNSender: Now properly sends messages to Gateway Including removing from the queue if it succeeded, or didn't and should be dropped. --- plugins/eddn.py | 35 ++++++++++++++++------------------- 1 file changed, 16 insertions(+), 19 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 22152f83..6f867fcd 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -293,9 +293,11 @@ class EDDNSender: self.db_conn.commit() except Exception: - logger.exception('EDDNReplay INSERT error') + logger.exception('INSERT error') + # Can't possibly be a valid row id + return -1 - return self.db.lastrowid + return self.db.lastrowid or -1 def delete_message(self, row_id: int) -> None: """ @@ -327,17 +329,16 @@ class EDDNSender: row = dict(zip([c[0] for c in self.db.description], self.db.fetchone())) try: - self.send_message(row['message']) + if self.send_message(row['message']): + self.delete_message(id) + return True except requests.exceptions.HTTPError as e: logger.warning(f"HTTPError: {str(e)}") - finally: - # Remove from queue - ... + return False - - def send_message(self, msg: str) -> bool: + def send_message(self, msg: str) -> bool: # noqa: CCR001 """ Transmit a fully-formed EDDN message to the Gateway. @@ -351,13 +352,11 @@ class EDDNSender: should_return, new_data = killswitch.check_killswitch('plugins.eddn.send', json.loads(msg)) if should_return: logger.warning('eddn.send has been disabled via killswitch. Returning.') - return - - msg = json.dumps(new_data) + return False status: tk.Widget = self.eddn.parent.children['status'] # Even the smallest possible message compresses somewhat, so always compress - encoded, compressed = text.gzip(json.dumps(msg, separators=(',', ':')), max_size=0) + encoded, compressed = text.gzip(json.dumps(new_data, separators=(',', ':')), max_size=0) headers: None | dict[str, str] = None if compressed: headers = {'Content-Encoding': 'gzip'} @@ -514,13 +513,12 @@ class EDDN: ]) # Ensure it's en-queued - msg_id = self.sender.add_message(cmdr, to_send) - # Now try to transmit it immediately - if self.sender.send_message_by_id(msg_id): - # De-queue - self.sender.delete_message(msg_id) + if (msg_id := self.sender.add_message(cmdr, to_send)) == -1: + return - def sendreplay(self) -> None: # noqa: CCR001 + self.sender.send_message_by_id(msg_id) + + def sendreplay(self) -> None: """Send cached Journal lines to EDDN.""" # TODO: Convert to using the sqlite3 db # **IF** this is moved to a thread worker then we need to ensure @@ -579,7 +577,6 @@ class EDDN: self.parent.after(self.REPLAYPERIOD, self.sendreplay) - def export_commodities(self, data: Mapping[str, Any], is_beta: bool) -> None: # noqa: CCR001 """ Update EDDN with the commodities on the current (lastStarport) station. From 3a57b53bbde003fb5a734daaa2d19b75ae5898e9 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 5 Oct 2022 15:11:37 +0100 Subject: [PATCH 52/95] config/EDDN: Rename OUT_SYS_DELAY to OUT_EDDN_DO_NOT_DELAY The sense of this `output` flag has been inverted (always?) for a long time. 1. I have the option "Delay sending until docked" showing as *off* in the UI. 2. My config.output value is `100000000001`. 3. The value of this flag is `4096`, which means 12th bit (starting from 1, not zero). 4. So I have the bit set, but the option visibly off. So, rename this both to be more pertinent to its use *and* to be correct as to what `True` for it means. --- config/__init__.py | 2 +- plugins/eddn.py | 10 ++++++++-- prefs.py | 2 +- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/config/__init__.py b/config/__init__.py index 56679019..b7437b2d 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -176,7 +176,7 @@ class AbstractConfig(abc.ABC): # OUT_SYS_AUTO = 512 # Now always automatic OUT_MKT_MANUAL = 1024 OUT_SYS_EDDN = 2048 - OUT_SYS_DELAY = 4096 + OUT_EDDN_DO_NOT_DELAY = 4096 app_dir_path: pathlib.Path plugin_dir_path: pathlib.Path diff --git a/plugins/eddn.py b/plugins/eddn.py index 6f867fcd..f5469c54 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -349,6 +349,9 @@ class EDDNSender: :param msg: Fully formed, string, message. :return: `True` for "now remove this message from the queue" """ + + # TODO: Check if user options require us to send at this time. + should_return, new_data = killswitch.check_killswitch('plugins.eddn.send', json.loads(msg)) if should_return: logger.warning('eddn.send has been disabled via killswitch. Returning.') @@ -500,6 +503,9 @@ class EDDN: :param cmdr: the CMDR to use as the uploader ID. :param msg: the payload to send. """ + + # TODO: Check if the global 'Send to EDDN' option is off + to_send: OrderedDictT[str, OrderedDict[str, Any]] = OrderedDict([ ('$schemaRef', msg['$schemaRef']), ('header', OrderedDict([ @@ -1854,7 +1860,7 @@ def plugin_prefs(parent, cmdr: str, is_beta: bool) -> Frame: ) this.eddn_system_button.grid(padx=BUTTONX, pady=(5, 0), sticky=tk.W) - this.eddn_delay = tk.IntVar(value=(output & config.OUT_SYS_DELAY) and 1) + this.eddn_delay = tk.IntVar(value=(output & config.OUT_EDDN_DO_NOT_DELAY) and 1) # Output setting under 'Send system and scan data to the Elite Dangerous Data Network' new in E:D 2.2 this.eddn_delay_button = nb.Checkbutton( eddnframe, @@ -1891,7 +1897,7 @@ def prefs_changed(cmdr: str, is_beta: bool) -> None: & (config.OUT_MKT_TD | config.OUT_MKT_CSV | config.OUT_SHIP | config.OUT_MKT_MANUAL)) + (this.eddn_station.get() and config.OUT_MKT_EDDN) + (this.eddn_system.get() and config.OUT_SYS_EDDN) + - (this.eddn_delay.get() and config.OUT_SYS_DELAY) + (this.eddn_delay.get() and config.OUT_EDDN_DO_NOT_DELAY) ) diff --git a/prefs.py b/prefs.py index 03b8dcd8..acc7f12c 100644 --- a/prefs.py +++ b/prefs.py @@ -1221,7 +1221,7 @@ class PreferencesDialog(tk.Toplevel): (self.out_csv.get() and config.OUT_MKT_CSV) + (config.OUT_MKT_MANUAL if not self.out_auto.get() else 0) + (self.out_ship.get() and config.OUT_SHIP) + - (config.get_int('output') & (config.OUT_MKT_EDDN | config.OUT_SYS_EDDN | config.OUT_SYS_DELAY)) + (config.get_int('output') & (config.OUT_MKT_EDDN | config.OUT_SYS_EDDN | config.OUT_EDDN_DO_NOT_DELAY)) ) config.set( From 9f02f18408ba72d88826838da73b6fd7ce112e5b Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 5 Oct 2022 15:26:43 +0100 Subject: [PATCH 53/95] config/EDDN: Rename OUT_SYS_EDDN to OUT_EDDN_SEND_NON_STATION * This was perhaps originally meant for what the UI option says, i.e. "send system and scan data", but is actually being used for anything that is **NOT** 'station data' (even though *that* option has 'MKT' it includes outfitting and shipyard as well). So, just name this more sanely such that code using it is more obvious as to the actual intent. --- config/__init__.py | 2 +- plugins/eddn.py | 10 +++++----- prefs.py | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/config/__init__.py b/config/__init__.py index b7437b2d..d57412c4 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -175,7 +175,7 @@ class AbstractConfig(abc.ABC): # OUT_SYS_EDSM = 256 # Now a plugin # OUT_SYS_AUTO = 512 # Now always automatic OUT_MKT_MANUAL = 1024 - OUT_SYS_EDDN = 2048 + OUT_EDDN_SEND_NON_STATION = 2048 OUT_EDDN_DO_NOT_DELAY = 4096 app_dir_path: pathlib.Path diff --git a/plugins/eddn.py b/plugins/eddn.py index f5469c54..ab7e7ef9 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -1824,7 +1824,7 @@ def plugin_prefs(parent, cmdr: str, is_beta: bool) -> Frame: BUTTONX = 12 # noqa: N806 # indent Checkbuttons and Radiobuttons if prefsVersion.shouldSetDefaults('0.0.0.0', not bool(config.get_int('output'))): - output: int = (config.OUT_MKT_EDDN | config.OUT_SYS_EDDN) # default settings + output: int = (config.OUT_MKT_EDDN | config.OUT_EDDN_SEND_NON_STATION) # default settings else: output = config.get_int('output') @@ -1849,7 +1849,7 @@ def plugin_prefs(parent, cmdr: str, is_beta: bool) -> Frame: ) # Output setting this.eddn_station_button.grid(padx=BUTTONX, pady=(5, 0), sticky=tk.W) - this.eddn_system = tk.IntVar(value=(output & config.OUT_SYS_EDDN) and 1) + this.eddn_system = tk.IntVar(value=(output & config.OUT_EDDN_SEND_NON_STATION) and 1) # Output setting new in E:D 2.2 this.eddn_system_button = nb.Checkbutton( eddnframe, @@ -1896,7 +1896,7 @@ def prefs_changed(cmdr: str, is_beta: bool) -> None: (config.get_int('output') & (config.OUT_MKT_TD | config.OUT_MKT_CSV | config.OUT_SHIP | config.OUT_MKT_MANUAL)) + (this.eddn_station.get() and config.OUT_MKT_EDDN) + - (this.eddn_system.get() and config.OUT_SYS_EDDN) + + (this.eddn_system.get() and config.OUT_EDDN_SEND_NON_STATION) + (this.eddn_delay.get() and config.OUT_EDDN_DO_NOT_DELAY) ) @@ -2066,7 +2066,7 @@ def journal_entry( # noqa: C901, CCR001 this.status_body_name = None # Events with their own EDDN schema - if config.get_int('output') & config.OUT_SYS_EDDN and not state['Captain']: + if config.get_int('output') & config.OUT_EDDN_SEND_NON_STATION and not state['Captain']: if event_name == 'fssdiscoveryscan': return this.eddn.export_journal_fssdiscoveryscan(cmdr, system, state['StarPos'], is_beta, entry) @@ -2123,7 +2123,7 @@ def journal_entry( # noqa: C901, CCR001 ) # Send journal schema events to EDDN, but not when on a crew - if (config.get_int('output') & config.OUT_SYS_EDDN and not state['Captain'] and + if (config.get_int('output') & config.OUT_EDDN_SEND_NON_STATION and not state['Captain'] and (event_name in ('location', 'fsdjump', 'docked', 'scan', 'saasignalsfound', 'carrierjump')) and ('StarPos' in entry or this.coordinates)): diff --git a/prefs.py b/prefs.py index acc7f12c..d2fb69b0 100644 --- a/prefs.py +++ b/prefs.py @@ -1221,7 +1221,7 @@ class PreferencesDialog(tk.Toplevel): (self.out_csv.get() and config.OUT_MKT_CSV) + (config.OUT_MKT_MANUAL if not self.out_auto.get() else 0) + (self.out_ship.get() and config.OUT_SHIP) + - (config.get_int('output') & (config.OUT_MKT_EDDN | config.OUT_SYS_EDDN | config.OUT_EDDN_DO_NOT_DELAY)) + (config.get_int('output') & (config.OUT_MKT_EDDN | config.OUT_EDDN_SEND_NON_STATION | config.OUT_EDDN_DO_NOT_DELAY)) ) config.set( From 0d35f8874a3b7b746a2f1e502b2834981e7ef0ee Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 5 Oct 2022 15:29:11 +0100 Subject: [PATCH 54/95] config/EDDN: Rename OUT_MKT_EDDN to OUT_EDDN_SEND_STATION_DATA This flag controls whether commodity, outfitting or shipyard schema messages are sent. Thus 'MKT' ('market') is misleading. Rename it so the intent when used is clear. --- EDMarketConnector.py | 2 +- config/__init__.py | 4 ++-- plugins/eddn.py | 13 +++++++------ prefs.py | 2 +- 4 files changed, 11 insertions(+), 10 deletions(-) diff --git a/EDMarketConnector.py b/EDMarketConnector.py index 283c3bf5..041f7331 100755 --- a/EDMarketConnector.py +++ b/EDMarketConnector.py @@ -922,7 +922,7 @@ class AppWindow(object): return False # Ignore possibly missing shipyard info - elif (config.get_int('output') & config.OUT_MKT_EDDN) \ + elif (config.get_int('output') & config.OUT_EDDN_SEND_STATION_DATA) \ and not (data['lastStarport'].get('commodities') or data['lastStarport'].get('modules')): if not self.status['text']: # LANG: Status - Either no market or no modules data for station from Frontier CAPI diff --git a/config/__init__.py b/config/__init__.py index d57412c4..98056504 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -162,7 +162,7 @@ def appversion_nobuild() -> semantic_version.Version: class AbstractConfig(abc.ABC): """Abstract root class of all platform specific Config implementations.""" - OUT_MKT_EDDN = 1 + OUT_EDDN_SEND_STATION_DATA = 1 # OUT_MKT_BPC = 2 # No longer supported OUT_MKT_TD = 4 OUT_MKT_CSV = 8 @@ -171,7 +171,7 @@ class AbstractConfig(abc.ABC): # OUT_SYS_FILE = 32 # No longer supported # OUT_STAT = 64 # No longer available # OUT_SHIP_CORIOLIS = 128 # Replaced by OUT_SHIP - OUT_STATION_ANY = OUT_MKT_EDDN | OUT_MKT_TD | OUT_MKT_CSV + OUT_STATION_ANY = OUT_EDDN_SEND_STATION_DATA | OUT_MKT_TD | OUT_MKT_CSV # OUT_SYS_EDSM = 256 # Now a plugin # OUT_SYS_AUTO = 512 # Now always automatic OUT_MKT_MANUAL = 1024 diff --git a/plugins/eddn.py b/plugins/eddn.py index ab7e7ef9..b0f28a27 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -1824,7 +1824,7 @@ def plugin_prefs(parent, cmdr: str, is_beta: bool) -> Frame: BUTTONX = 12 # noqa: N806 # indent Checkbuttons and Radiobuttons if prefsVersion.shouldSetDefaults('0.0.0.0', not bool(config.get_int('output'))): - output: int = (config.OUT_MKT_EDDN | config.OUT_EDDN_SEND_NON_STATION) # default settings + output: int = (config.OUT_EDDN_SEND_STATION_DATA | config.OUT_EDDN_SEND_NON_STATION) # default settings else: output = config.get_int('output') @@ -1839,7 +1839,7 @@ def plugin_prefs(parent, cmdr: str, is_beta: bool) -> Frame: underline=True ).grid(padx=PADX, sticky=tk.W) # Don't translate - this.eddn_station = tk.IntVar(value=(output & config.OUT_MKT_EDDN) and 1) + this.eddn_station = tk.IntVar(value=(output & config.OUT_EDDN_SEND_STATION_DATA) and 1) this.eddn_station_button = nb.Checkbutton( eddnframe, # LANG: Enable EDDN support for station data checkbox label @@ -1895,7 +1895,7 @@ def prefs_changed(cmdr: str, is_beta: bool) -> None: 'output', (config.get_int('output') & (config.OUT_MKT_TD | config.OUT_MKT_CSV | config.OUT_SHIP | config.OUT_MKT_MANUAL)) + - (this.eddn_station.get() and config.OUT_MKT_EDDN) + + (this.eddn_station.get() and config.OUT_EDDN_SEND_STATION_DATA) + (this.eddn_system.get() and config.OUT_EDDN_SEND_NON_STATION) + (this.eddn_delay.get() and config.OUT_EDDN_DO_NOT_DELAY) ) @@ -2208,11 +2208,12 @@ def journal_entry( # noqa: C901, CCR001 return _("Error: Can't connect to EDDN") # LANG: Error while trying to send data to EDDN except Exception as e: + return logger.debug('Failed in export_journal_entry', exc_info=e) return str(e) - elif (config.get_int('output') & config.OUT_MKT_EDDN and not state['Captain'] and - event_name in ('market', 'outfitting', 'shipyard')): + elif (config.get_int('output') & config.OUT_EDDN_SEND_STATION_DATA and not state['Captain'] and + event_name in ('market', 'outfitting', 'shipyard')): # Market.json, Outfitting.json or Shipyard.json to process try: @@ -2263,7 +2264,7 @@ def cmdr_data(data: CAPIData, is_beta: bool) -> Optional[str]: # noqa: CCR001 :return: str - Error message, or `None` if no errors. """ if (data['commander'].get('docked') or (this.on_foot and monitor.station) - and config.get_int('output') & config.OUT_MKT_EDDN): + and config.get_int('output') & config.OUT_EDDN_SEND_STATION_DATA): try: if this.marketId != data['lastStarport']['id']: this.commodities = this.outfitting = this.shipyard = None diff --git a/prefs.py b/prefs.py index d2fb69b0..dd52c78b 100644 --- a/prefs.py +++ b/prefs.py @@ -1221,7 +1221,7 @@ class PreferencesDialog(tk.Toplevel): (self.out_csv.get() and config.OUT_MKT_CSV) + (config.OUT_MKT_MANUAL if not self.out_auto.get() else 0) + (self.out_ship.get() and config.OUT_SHIP) + - (config.get_int('output') & (config.OUT_MKT_EDDN | config.OUT_EDDN_SEND_NON_STATION | config.OUT_EDDN_DO_NOT_DELAY)) + (config.get_int('output') & (config.OUT_EDDN_SEND_STATION_DATA | config.OUT_EDDN_SEND_NON_STATION | config.OUT_EDDN_DO_NOT_DELAY)) ) config.set( From 7d0ae88757fa0fe1ffb86502fd3492cb49e63bf3 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 5 Oct 2022 15:46:12 +0100 Subject: [PATCH 55/95] EDDN: EDDNSender.send_message() doesn't care about do/don't send options By this point other code will have made that decision. --- plugins/eddn.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index b0f28a27..14c42dbb 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -342,6 +342,10 @@ class EDDNSender: """ Transmit a fully-formed EDDN message to the Gateway. + If this is called then the attempt *will* be made. This is not where + options to not send to EDDN, or to delay the sending until docked, + are checked. + Should catch and handle all failure conditions. A `True` return might mean that the message was successfully sent, *or* that this message should not be retried after a failure, i.e. too large. @@ -349,9 +353,6 @@ class EDDNSender: :param msg: Fully formed, string, message. :return: `True` for "now remove this message from the queue" """ - - # TODO: Check if user options require us to send at this time. - should_return, new_data = killswitch.check_killswitch('plugins.eddn.send', json.loads(msg)) if should_return: logger.warning('eddn.send has been disabled via killswitch. Returning.') From 5eb4296ec6a7c1cab2210f430234145cf2ffd7e1 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 5 Oct 2022 16:12:28 +0100 Subject: [PATCH 56/95] EDDN: First steps to ensure new code respects user configuration It's easier to check "should we send this message at all?" earlier. Currently all of the following ('station data') do so: * CAPI commodity, outfitting (also fcmaterials) and shipyard. * Journal commodity, fcmaterials, outfitting, and shipyard. --- plugins/eddn.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 14c42dbb..1f9ee487 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -130,6 +130,8 @@ class EDDNSender: """Handle sending of EDDN messages to the Gateway.""" SQLITE_DB_FILENAME_V1 = 'eddn_queue-v1.db' + # EDDN schema types that pertain to station data + STATION_SCHEMAS = ('commodity', 'fcmaterials_capi', 'fcmaterials_journal', 'outfitting', 'shipyard') TIMEOUT = 10 # requests timeout UNKNOWN_SCHEMA_RE = re.compile( r"^FAIL: \[JsonValidationException\('Schema " @@ -504,8 +506,17 @@ class EDDN: :param cmdr: the CMDR to use as the uploader ID. :param msg: the payload to send. """ - - # TODO: Check if the global 'Send to EDDN' option is off + # TODO: Check if we should actually send this message: + # 1. Is sending of this 'class' of message configured on ? + # 2. Are we *not* docked and delayed sending is configured on ? + # NB: This is a placeholder whilst all the "start of processing data" + # code points are confirmed to have their own check. + if ( + any(f'/{s}/' in msg['$schemaRef'] for s in EDDNSender.STATION_SCHEMAS) + and not config.get_int('output') & config.OUT_EDDN_SEND_STATION_DATA + ): + # Sending of station data configured off + return to_send: OrderedDictT[str, OrderedDict[str, Any]] = OrderedDict([ ('$schemaRef', msg['$schemaRef']), @@ -523,7 +534,7 @@ class EDDN: if (msg_id := self.sender.add_message(cmdr, to_send)) == -1: return - self.sender.send_message_by_id(msg_id) + self.sender.send_message_by_id(msg_id) def sendreplay(self) -> None: """Send cached Journal lines to EDDN.""" @@ -1340,6 +1351,10 @@ class EDDN: # ] # } + # TODO: Check we're configured to send station data + if not config.get_int('output') & config.OUT_EDDN_SEND_STATION_DATA: + return None + # Sanity check if 'Items' not in entry: logger.warning(f"FCMaterials didn't contain an Items array!\n{entry!r}") @@ -2209,7 +2224,6 @@ def journal_entry( # noqa: C901, CCR001 return _("Error: Can't connect to EDDN") # LANG: Error while trying to send data to EDDN except Exception as e: - return logger.debug('Failed in export_journal_entry', exc_info=e) return str(e) From 03b36cbe39a86d7108d34ec1c252d21734a46e0a Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 5 Oct 2022 16:15:08 +0100 Subject: [PATCH 57/95] EDDN: Improve export_journal_fcmaterials "don't send" comment --- plugins/eddn.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 1f9ee487..d9cb6809 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -1350,8 +1350,7 @@ class EDDN: # } # ] # } - - # TODO: Check we're configured to send station data + # Abort if we're not configured to send 'station' data. if not config.get_int('output') & config.OUT_EDDN_SEND_STATION_DATA: return None From db7bb735d2423162b007730096fd9a8f4666e821 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 5 Oct 2022 16:42:27 +0100 Subject: [PATCH 58/95] EDDN: Put "should we send (now?)" checks into `EDDN.export_journal_entry()` In some cases the check might already have been done, but if not then this is the last easy place to perform it. NB: Unlike the old code this does *not* attempt to check "are we docked *now* ?" for triggering sending of previously queue messages. That's going to need a thread worker. --- plugins/eddn.py | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index d9cb6809..8558815f 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -246,7 +246,7 @@ class EDDNSender: if self.db_conn: self.db_conn.close() - def add_message(self, cmdr: str, msg: dict) -> int: + def add_message(self, cmdr: str, msg: MutableMapping[str, Any]) -> int: """ Add an EDDN message to the database. @@ -925,7 +925,7 @@ class EDDN: # this.shipyard = (horizons, shipyard) - def export_journal_entry(self, cmdr: str, entry: Mapping[str, Any], msg: Mapping[str, Any]) -> None: + def export_journal_entry(self, cmdr: str, entry: Mapping[str, Any], msg: MutableMapping[str, Any]) -> None: """ Send a Journal-sourced EDDN message. @@ -933,7 +933,24 @@ class EDDN: :param entry: The full journal event dictionary (due to checks in this function). :param msg: The EDDN message body to be sent. """ - self.send(cmdr, msg) + # Check if the user configured messages to be sent. + # + # 1. If this is a 'station' data message then check config.EDDN_SEND_STATION_DATA + # 2. Else check against config.EDDN_SEND_NON_STATION *and* config.OUT_EDDN_DO_NOT_DELAY + if any(f'{s}' in msg['$schemaRef'] for s in EDDNSender.STATION_SCHEMAS): + # 'Station data' + if config.get_int('output') & config.OUT_EDDN_SEND_STATION_DATA: + # And user has 'station data' configured to be sent + msg_id = self.sender.add_message(cmdr, msg) + # 'Station data' is never delayed on construction of message + self.sender.send_message_by_id(msg_id) + + elif config.get_int('output') & config.OUT_EDDN_SEND_NON_STATION: + # Any data that isn't 'station' is configured to be sent + msg_id = self.sender.add_message(cmdr, msg) + if not config.get_int('output') & config.OUT_SYS_DELAY: + # No delay in sending configured, so attempt immediately + self.sender.send_message_by_id(msg_id) def export_journal_generic(self, cmdr: str, is_beta: bool, entry: Mapping[str, Any]) -> None: """ From fe24cf7e9596277d10bb68d930251ffa479785c9 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 5 Oct 2022 16:52:30 +0100 Subject: [PATCH 59/95] EDDN: Remove legacy `sendreplay()` * `EDDN.sendreplay()` is no longer used. * In `prefsvarschanged()` there was a reference to `eddn.replayfile`, so as to grey out the "Delay sending..." option if the file wasn't available. So that's moot and also removed, but also... * Comment the purpose of that line in `prefsvarchanged()` because it's not immediately obvious. --- plugins/eddn.py | 64 ++++--------------------------------------------- 1 file changed, 4 insertions(+), 60 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 8558815f..7250a8a7 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -536,65 +536,6 @@ class EDDN: self.sender.send_message_by_id(msg_id) - def sendreplay(self) -> None: - """Send cached Journal lines to EDDN.""" - # TODO: Convert to using the sqlite3 db - # **IF** this is moved to a thread worker then we need to ensure - # that we're operating sqlite3 in a thread-safe manner, - # Ref: - if not self.replayfile: - return # Probably closing app - - status: tk.Widget = self.parent.children['status'] - - if not self.replaylog: - status['text'] = '' - return - - localized: str = _('Sending data to EDDN...') # LANG: Status text shown while attempting to send data - if len(self.replaylog) == 1: - status['text'] = localized - - else: - status['text'] = f'{localized.replace("...", "")} [{len(self.replaylog)}]' - - self.parent.update_idletasks() - - # Paranoia check in case this function gets chain-called. - if not self.replaylog: - # import traceback - # logger.error( - # f'self.replaylog (type: {type(self.replaylog)}) is falsey after update_idletasks(). Traceback:\n' - # f'{"".join(traceback.format_list(traceback.extract_stack()))}') - return - - try: - cmdr, msg = json.loads(self.replaylog[0], object_pairs_hook=OrderedDict) - - except json.JSONDecodeError as e: - # Couldn't decode - shouldn't happen! - logger.debug(f'\n{self.replaylog[0]}\n', exc_info=e) - # Discard and continue - self.replaylog.pop(0) - - else: - # TODO: Check message against *current* relevant schema so we don't try - # to send an old message that's now invalid. - - # Rewrite old schema name - if msg['$schemaRef'].startswith('http://schemas.elite-markets.net/eddn/'): - msg['$schemaRef'] = str(msg['$schemaRef']).replace( - 'http://schemas.elite-markets.net/eddn/', - 'https://eddn.edcd.io/schemas/' - ) - - self.send(cmdr, msg) - self.replaylog.pop(0) - if not len(self.replaylog) % self.REPLAYFLUSH: - self.flush() - - self.parent.after(self.REPLAYPERIOD, self.sendreplay) - def export_commodities(self, data: Mapping[str, Any], is_beta: bool) -> None: # noqa: CCR001 """ Update EDDN with the commodities on the current (lastStarport) station. @@ -1911,9 +1852,12 @@ def prefsvarchanged(event=None) -> None: :param event: tkinter event ? """ + # These two lines are legacy and probably not even needed this.eddn_station_button['state'] = tk.NORMAL this.eddn_system_button['state'] = tk.NORMAL - this.eddn_delay_button['state'] = this.eddn.replayfile and this.eddn_system.get() and tk.NORMAL or tk.DISABLED + # This line will grey out the 'Delay sending ...' option if the 'Send + # system and scan data' option is off. + this.eddn_delay_button['state'] = this.eddn_system.get() and tk.NORMAL or tk.DISABLED def prefs_changed(cmdr: str, is_beta: bool) -> None: From 06edcf3ea96cf18b7314dba8b450da3294b20f13 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 5 Oct 2022 17:21:27 +0100 Subject: [PATCH 60/95] EDDN: Remove `EDDN.send()` in favour of renamed `.export_journal_entry()` * Now that we're not trying to do "did we just/are we know docked?" in this code it turns out that both CAPI and Journal messages can use the same function for this. * And as it's no longer journal-specific `EDDN.export_journal_entry()` has been renamed to `EDDN.send_message()`. This whole branch now needs to actually implement sending queued messages when docked, and periodically in the case of initial failures. --- plugins/eddn.py | 83 +++++++++++++------------------------------------ 1 file changed, 22 insertions(+), 61 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 7250a8a7..f3c3e9d6 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -499,43 +499,6 @@ class EDDN: logger.debug('Closing EDDN requests.Session.') self.session.close() - def send(self, cmdr: str, msg: Mapping[str, Any]) -> None: - """ - Enqueue a message for transmission. - - :param cmdr: the CMDR to use as the uploader ID. - :param msg: the payload to send. - """ - # TODO: Check if we should actually send this message: - # 1. Is sending of this 'class' of message configured on ? - # 2. Are we *not* docked and delayed sending is configured on ? - # NB: This is a placeholder whilst all the "start of processing data" - # code points are confirmed to have their own check. - if ( - any(f'/{s}/' in msg['$schemaRef'] for s in EDDNSender.STATION_SCHEMAS) - and not config.get_int('output') & config.OUT_EDDN_SEND_STATION_DATA - ): - # Sending of station data configured off - return - - to_send: OrderedDictT[str, OrderedDict[str, Any]] = OrderedDict([ - ('$schemaRef', msg['$schemaRef']), - ('header', OrderedDict([ - ('softwareName', f'{applongname} [{system() if sys.platform != "darwin" else "Mac OS"}]'), - ('softwareVersion', str(appversion_nobuild())), - ('uploaderID', cmdr), - # TODO: Add `gameversion` and `gamebuild` if that change is live - # on EDDN. - ])), - ('message', msg['message']), - ]) - - # Ensure it's en-queued - if (msg_id := self.sender.add_message(cmdr, to_send)) == -1: - return - - self.sender.send_message_by_id(msg_id) - def export_commodities(self, data: Mapping[str, Any], is_beta: bool) -> None: # noqa: CCR001 """ Update EDDN with the commodities on the current (lastStarport) station. @@ -601,7 +564,7 @@ class EDDN: if 'prohibited' in data['lastStarport']: message['prohibited'] = sorted(x for x in (data['lastStarport']['prohibited'] or {}).values()) - self.send(data['commander']['name'], { + self.send_message(data['commander']['name'], { '$schemaRef': f'https://eddn.edcd.io/schemas/commodity/3{"/test" if is_beta else ""}', 'message': message, }) @@ -686,7 +649,7 @@ class EDDN: # Don't send empty modules list - schema won't allow it if outfitting and this.outfitting != (horizons, outfitting): - self.send(data['commander']['name'], { + self.send_message(data['commander']['name'], { '$schemaRef': f'https://eddn.edcd.io/schemas/outfitting/2{"/test" if is_beta else ""}', 'message': OrderedDict([ ('timestamp', data['timestamp']), @@ -730,7 +693,7 @@ class EDDN: ) # Don't send empty ships list - shipyard data is only guaranteed present if user has visited the shipyard. if shipyard and this.shipyard != (horizons, shipyard): - self.send(data['commander']['name'], { + self.send_message(data['commander']['name'], { '$schemaRef': f'https://eddn.edcd.io/schemas/shipyard/2{"/test" if is_beta else ""}', 'message': OrderedDict([ ('timestamp', data['timestamp']), @@ -778,7 +741,7 @@ class EDDN: # none and that really does need to be recorded over EDDN so that, e.g. # EDDB can update in a timely manner. if this.commodities != commodities: - self.send(cmdr, { + self.send_message(cmdr, { '$schemaRef': f'https://eddn.edcd.io/schemas/commodity/3{"/test" if is_beta else ""}', 'message': OrderedDict([ ('timestamp', entry['timestamp']), @@ -817,7 +780,7 @@ class EDDN: ) # Don't send empty modules list - schema won't allow it if outfitting and this.outfitting != (horizons, outfitting): - self.send(cmdr, { + self.send_message(cmdr, { '$schemaRef': f'https://eddn.edcd.io/schemas/outfitting/2{"/test" if is_beta else ""}', 'message': OrderedDict([ ('timestamp', entry['timestamp']), @@ -851,7 +814,7 @@ class EDDN: shipyard = sorted(ship['ShipType'] for ship in ships) # Don't send empty ships list - shipyard data is only guaranteed present if user has visited the shipyard. if shipyard and this.shipyard != (horizons, shipyard): - self.send(cmdr, { + self.send_message(cmdr, { '$schemaRef': f'https://eddn.edcd.io/schemas/shipyard/2{"/test" if is_beta else ""}', 'message': OrderedDict([ ('timestamp', entry['timestamp']), @@ -866,12 +829,11 @@ class EDDN: # this.shipyard = (horizons, shipyard) - def export_journal_entry(self, cmdr: str, entry: Mapping[str, Any], msg: MutableMapping[str, Any]) -> None: + def send_message(self, cmdr: str, msg: MutableMapping[str, Any]) -> None: """ - Send a Journal-sourced EDDN message. + Send an EDDN message. :param cmdr: Commander name as passed in through `journal_entry()`. - :param entry: The full journal event dictionary (due to checks in this function). :param msg: The EDDN message body to be sent. """ # Check if the user configured messages to be sent. @@ -905,7 +867,7 @@ class EDDN: '$schemaRef': f'https://eddn.edcd.io/schemas/journal/1{"/test" if is_beta else ""}', 'message': entry } - this.eddn.export_journal_entry(cmdr, entry, msg) + this.eddn.send_message(cmdr, msg) def entry_augment_system_data( self, @@ -993,7 +955,7 @@ class EDDN: 'message': entry } - this.eddn.export_journal_entry(cmdr, entry, msg) + this.eddn.send_message(cmdr, msg) return None def export_journal_navbeaconscan( @@ -1035,7 +997,7 @@ class EDDN: 'message': entry } - this.eddn.export_journal_entry(cmdr, entry, msg) + this.eddn.send_message(cmdr, msg) return None def export_journal_codexentry( # noqa: CCR001 @@ -1135,7 +1097,7 @@ class EDDN: 'message': entry } - this.eddn.export_journal_entry(cmdr, entry, msg) + this.eddn.send_message(cmdr, msg) return None def export_journal_scanbarycentre( @@ -1189,7 +1151,7 @@ class EDDN: 'message': entry } - this.eddn.export_journal_entry(cmdr, entry, msg) + this.eddn.send_message(cmdr, msg) return None def export_journal_navroute( @@ -1262,7 +1224,7 @@ class EDDN: 'message': entry } - this.eddn.export_journal_entry(cmdr, entry, msg) + this.eddn.send_message(cmdr, msg) return None def export_journal_fcmaterials( @@ -1346,7 +1308,7 @@ class EDDN: 'message': entry } - this.eddn.export_journal_entry(cmdr, entry, msg) + this.eddn.send_message(cmdr, msg) return None def export_capi_fcmaterials( @@ -1404,7 +1366,7 @@ class EDDN: 'message': entry } - this.eddn.export_journal_entry(data['commander']['name'], entry, msg) + this.eddn.send_message(data['commander']['name'], msg) return None def export_journal_approachsettlement( @@ -1479,7 +1441,7 @@ class EDDN: 'message': entry } - this.eddn.export_journal_entry(cmdr, entry, msg) + this.eddn.send_message(cmdr, msg) return None def export_journal_fssallbodiesfound( @@ -1529,7 +1491,7 @@ class EDDN: 'message': entry } - this.eddn.export_journal_entry(cmdr, entry, msg) + this.eddn.send_message(cmdr, msg) return None def export_journal_fssbodysignals( @@ -1585,7 +1547,7 @@ class EDDN: 'message': entry } - this.eddn.export_journal_entry(cmdr, entry, msg) + this.eddn.send_message(cmdr, msg) return None def enqueue_journal_fsssignaldiscovered(self, entry: MutableMapping[str, Any]) -> None: @@ -1692,8 +1654,7 @@ class EDDN: logger.trace_if("plugin.eddn.fsssignaldiscovered", f"FSSSignalDiscovered batch is {json.dumps(msg)}") - # Fake an 'entry' as it's only there for some "should we send replay?" checks in the called function. - this.eddn.export_journal_entry(cmdr, {'event': 'send_fsssignaldiscovered'}, msg) + this.eddn.send_message(cmdr, msg) self.fss_signals = [] return None @@ -2180,11 +2141,11 @@ def journal_entry( # noqa: C901, CCR001 this.eddn.export_journal_generic(cmdr, is_beta, filter_localised(entry)) except requests.exceptions.RequestException as e: - logger.debug('Failed in export_journal_entry', exc_info=e) + logger.debug('Failed in send_message', exc_info=e) return _("Error: Can't connect to EDDN") # LANG: Error while trying to send data to EDDN except Exception as e: - logger.debug('Failed in export_journal_entry', exc_info=e) + logger.debug('Failed in export_journal_generic', exc_info=e) return str(e) elif (config.get_int('output') & config.OUT_EDDN_SEND_STATION_DATA and not state['Captain'] and From 871f50288e54a1a78f620734962b539164ef4c8c Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 5 Oct 2022 18:01:39 +0100 Subject: [PATCH 61/95] prefs.py: Fix overly long line (config constants renames) --- prefs.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/prefs.py b/prefs.py index dd52c78b..7be52526 100644 --- a/prefs.py +++ b/prefs.py @@ -1221,7 +1221,9 @@ class PreferencesDialog(tk.Toplevel): (self.out_csv.get() and config.OUT_MKT_CSV) + (config.OUT_MKT_MANUAL if not self.out_auto.get() else 0) + (self.out_ship.get() and config.OUT_SHIP) + - (config.get_int('output') & (config.OUT_EDDN_SEND_STATION_DATA | config.OUT_EDDN_SEND_NON_STATION | config.OUT_EDDN_DO_NOT_DELAY)) + (config.get_int('output') & ( + config.OUT_EDDN_SEND_STATION_DATA | config.OUT_EDDN_SEND_NON_STATION | config.OUT_EDDN_DO_NOT_DELAY + )) ) config.set( From 876c34ecfdf5ec07495de83e3809b2e31f8b02b7 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Tue, 22 Nov 2022 14:09:08 +0000 Subject: [PATCH 62/95] eddn: Remove two 'noqa: CCR001' that are no longer applicable --- plugins/eddn.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index f3c3e9d6..ed3f4cef 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -340,7 +340,7 @@ class EDDNSender: return False - def send_message(self, msg: str) -> bool: # noqa: CCR001 + def send_message(self, msg: str) -> bool: """ Transmit a fully-formed EDDN message to the Gateway. @@ -2191,7 +2191,7 @@ def journal_entry( # noqa: C901, CCR001 return None -def cmdr_data(data: CAPIData, is_beta: bool) -> Optional[str]: # noqa: CCR001 +def cmdr_data(data: CAPIData, is_beta: bool) -> Optional[str]: """ Process new CAPI data. From a2d2723f491d94f7b7a4fc419a40759e3e138a6d Mon Sep 17 00:00:00 2001 From: Athanasius Date: Tue, 22 Nov 2022 15:29:05 +0000 Subject: [PATCH 63/95] monitor: Add an 'IsDocked' flag to monitor.state. * This is cleaner than starting to track it in `plugins/eddn.py` specifically. * This is literally only about if we're piloting a ship that is docked, so not even trying to resolve "on-foot, in a station, 'Location' said not docked though". --- monitor.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/monitor.py b/monitor.py index 859b2a50..040afa82 100644 --- a/monitor.py +++ b/monitor.py @@ -166,6 +166,7 @@ class EDLogs(FileSystemEventHandler): # type: ignore # See below 'Modules': None, 'CargoJSON': None, # The raw data from the last time cargo.json was read 'Route': None, # Last plotted route from Route.json file + 'IsDocked': False, # Whether we think cmdr is docked 'OnFoot': False, # Whether we think you're on-foot 'Component': defaultdict(int), # Odyssey Components in Ship Locker 'Item': defaultdict(int), # Odyssey Items in Ship Locker @@ -306,6 +307,7 @@ class EDLogs(FileSystemEventHandler): # type: ignore # See below self.systemaddress = None self.is_beta = False self.state['OnFoot'] = False + self.state['IsDocked'] = False self.state['Body'] = None self.state['BodyType'] = None @@ -725,6 +727,7 @@ class EDLogs(FileSystemEventHandler): # type: ignore # See below self.station_marketid = None self.stationtype = None self.stationservices = None + self.state['IsDocked'] = False elif event_type == 'embark': # This event is logged when a player (on foot) gets into a ship or SRV @@ -791,6 +794,7 @@ class EDLogs(FileSystemEventHandler): # type: ignore # See below self.state['Dropship'] = False elif event_type == 'docked': + self.state['IsDocked'] = True self.station = entry.get('StationName') # May be None self.station_marketid = entry.get('MarketID') # May be None self.stationtype = entry.get('StationType') # May be None @@ -813,6 +817,8 @@ class EDLogs(FileSystemEventHandler): # type: ignore # See below if event_type == 'location': logger.trace_if('journal.locations', '"Location" event') + if entry.get('Docked'): + self.state['IsDocked'] = True elif event_type == 'fsdjump': self.planet = None From be1ef32238a22d6186669e99d096004d0f337986 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Tue, 22 Nov 2022 15:30:30 +0000 Subject: [PATCH 64/95] config: Minor ordering change to ensure `OUT_STATION_ANY` set correctly * This depends on `OUT_EDDN_SEND_STATION_DATA` which is defined below where this originally was. --- config/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/__init__.py b/config/__init__.py index 98056504..1715cd46 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -171,12 +171,12 @@ class AbstractConfig(abc.ABC): # OUT_SYS_FILE = 32 # No longer supported # OUT_STAT = 64 # No longer available # OUT_SHIP_CORIOLIS = 128 # Replaced by OUT_SHIP - OUT_STATION_ANY = OUT_EDDN_SEND_STATION_DATA | OUT_MKT_TD | OUT_MKT_CSV # OUT_SYS_EDSM = 256 # Now a plugin # OUT_SYS_AUTO = 512 # Now always automatic OUT_MKT_MANUAL = 1024 OUT_EDDN_SEND_NON_STATION = 2048 OUT_EDDN_DO_NOT_DELAY = 4096 + OUT_STATION_ANY = OUT_EDDN_SEND_STATION_DATA | OUT_MKT_TD | OUT_MKT_CSV app_dir_path: pathlib.Path plugin_dir_path: pathlib.Path From 59f046ee67e39400f2107cbf737f46f0b5626c01 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Tue, 22 Nov 2022 15:51:10 +0000 Subject: [PATCH 65/95] eddn: First cut of periodic retry of sending messages --- plugins/eddn.py | 52 ++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 51 insertions(+), 1 deletion(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index ed3f4cef..95627765 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -34,6 +34,7 @@ import tkinter as tk from collections import OrderedDict from platform import system from textwrap import dedent +from threading import Lock from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Mapping, MutableMapping, Optional from typing import OrderedDict as OrderedDictT from typing import Tuple, Union @@ -66,6 +67,8 @@ class This: def __init__(self): # Track if we're on foot self.on_foot = False + # Track if we're docked + self.docked = False # Horizons ? self.horizons = False @@ -164,6 +167,10 @@ class EDDNSender: self.convert_legacy_file() ####################################################################### + self.queue_processing = Lock() + # Initiate retry/send-now timer + self.eddn.parent.after(self.eddn.REPLAYPERIOD, self.queue_check_and_send) + def sqlite_queue_v1(self) -> sqlite3.Connection: """ Initialise a v1 EDDN queue database. @@ -414,6 +421,48 @@ class EDDNSender: return False + def queue_check_and_send(self) -> None: + """Check if we should be sending queued messages, and send if we should.""" + # Mutex in case we're already processing + if not self.queue_processing.acquire(blocking=False): + return + + # We send either if docked or 'Delay sending until docked' not set + if this.docked or config.get_int('output') & config.OUT_EDDN_DO_NOT_DELAY: + # We need our own cursor here, in case the semantics of + # tk `after()` could allow this to run in the middle of other + # database usage. + db_cursor = self.db_conn.cursor() + + # Options: + # 1. Process every queued message, regardless. + # 2. Bail if we get any sort of connection error from EDDN. + + # Every queued message that is for *this* commander. We do **NOT** + # check if it's station/not-station, as the control of if a message + # was even created, versus the Settings > EDDN options, is applied + # *then*, not at time of sending. + try: + db_cursor.execute( + """ + SELECT message_id FROM messages + ORDER BY created ASC + """ + ) + + except Exception: + logger.exception("DB error querying queued messages") + + else: + row = dict(zip([c[0] for c in db_cursor.description], db_cursor.fetchone())) + self.send_message_by_id(row['message_id']) + + db_cursor.close() + + # Set us up to run again after a delay + self.queue_processing.release() + self.eddn.parent.after(self.eddn.REPLAYPERIOD, self.queue_check_and_send) + def _log_response( self, response: requests.Response, @@ -851,7 +900,7 @@ class EDDN: elif config.get_int('output') & config.OUT_EDDN_SEND_NON_STATION: # Any data that isn't 'station' is configured to be sent msg_id = self.sender.add_message(cmdr, msg) - if not config.get_int('output') & config.OUT_SYS_DELAY: + if config.get_int('output') & config.OUT_EDDN_DO_NOT_DELAY: # No delay in sending configured, so attempt immediately self.sender.send_message_by_id(msg_id) @@ -1925,6 +1974,7 @@ def journal_entry( # noqa: C901, CCR001 event_name = entry['event'].lower() this.on_foot = state['OnFoot'] + this.docked = state['IsDocked'] # Note if we're under Horizons and/or Odyssey # The only event these are already in is `LoadGame` which isn't sent to EDDN. From 6070f82c6b054b0d87cf759360338733dbf65451 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Tue, 22 Nov 2022 16:47:17 +0000 Subject: [PATCH 66/95] OUT_EDDN_DO_NOT_DELAY should be OUT_EDDN_DELAY * The old name was OUT_SYS_DELAY. * Yes, this is the inverse of what we want, which is "should we not delay messages", but this is the legacy. --- config/__init__.py | 2 +- prefs.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/config/__init__.py b/config/__init__.py index 1715cd46..67eae43b 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -175,7 +175,7 @@ class AbstractConfig(abc.ABC): # OUT_SYS_AUTO = 512 # Now always automatic OUT_MKT_MANUAL = 1024 OUT_EDDN_SEND_NON_STATION = 2048 - OUT_EDDN_DO_NOT_DELAY = 4096 + OUT_EDDN_DELAY = 4096 OUT_STATION_ANY = OUT_EDDN_SEND_STATION_DATA | OUT_MKT_TD | OUT_MKT_CSV app_dir_path: pathlib.Path diff --git a/prefs.py b/prefs.py index 7be52526..376b6c67 100644 --- a/prefs.py +++ b/prefs.py @@ -1222,7 +1222,7 @@ class PreferencesDialog(tk.Toplevel): (config.OUT_MKT_MANUAL if not self.out_auto.get() else 0) + (self.out_ship.get() and config.OUT_SHIP) + (config.get_int('output') & ( - config.OUT_EDDN_SEND_STATION_DATA | config.OUT_EDDN_SEND_NON_STATION | config.OUT_EDDN_DO_NOT_DELAY + config.OUT_EDDN_SEND_STATION_DATA | config.OUT_EDDN_SEND_NON_STATION | config.OUT_EDDN_DELAY )) ) From 06fa3629ea806feede46d7d79d9adce652e5b6e8 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Tue, 22 Nov 2022 16:49:02 +0000 Subject: [PATCH 67/95] eddn: OUT_EDDN_DELAY (not inverted) & further work on the sending * The eddn parts of the OUT_EDDN_DO_NOT_DELAY -> OUT_EDDN_DELAY change. This includes the 'sense' of it being inverted from what it was. * EDDN.REPLAY_DELAY is now a float, as it's used with `time.sleep()`. *This* is the 400ms value for inter-message cooldown. * EDDN.REPLAY_PERIOD is still an int, used with tk `after()`. This is how often we attempt the queue. * EDDN.session is no longer a thing, move that part of EDDN.close() to EDDNSender.close(). * EDDN queue DB has `id`, not `message_id`. * Now *looping* in the queue sender, not only sending the oldest message. --- plugins/eddn.py | 41 +++++++++++++++++++++++++++-------------- 1 file changed, 27 insertions(+), 14 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 95627765..15a0c61c 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -30,6 +30,7 @@ import pathlib import re import sqlite3 import sys +import time import tkinter as tk from collections import OrderedDict from platform import system @@ -169,7 +170,7 @@ class EDDNSender: self.queue_processing = Lock() # Initiate retry/send-now timer - self.eddn.parent.after(self.eddn.REPLAYPERIOD, self.queue_check_and_send) + self.eddn.parent.after(self.eddn.REPLAY_PERIOD, self.queue_check_and_send) def sqlite_queue_v1(self) -> sqlite3.Connection: """ @@ -253,6 +254,9 @@ class EDDNSender: if self.db_conn: self.db_conn.close() + logger.debug('Closing EDDN requests.Session.') + self.session.close() + def add_message(self, cmdr: str, msg: MutableMapping[str, Any]) -> int: """ Add an EDDN message to the database. @@ -423,12 +427,16 @@ class EDDNSender: def queue_check_and_send(self) -> None: """Check if we should be sending queued messages, and send if we should.""" + # logger.debug("Called") # Mutex in case we're already processing if not self.queue_processing.acquire(blocking=False): + logger.debug("Couldn't obtain mutex") return + # logger.debug("Obtained mutex") # We send either if docked or 'Delay sending until docked' not set - if this.docked or config.get_int('output') & config.OUT_EDDN_DO_NOT_DELAY: + if this.docked or not (config.get_int('output') & config.OUT_EDDN_DELAY): + # logger.debug("Should send") # We need our own cursor here, in case the semantics of # tk `after()` could allow this to run in the middle of other # database usage. @@ -445,7 +453,7 @@ class EDDNSender: try: db_cursor.execute( """ - SELECT message_id FROM messages + SELECT id FROM messages ORDER BY created ASC """ ) @@ -454,14 +462,20 @@ class EDDNSender: logger.exception("DB error querying queued messages") else: - row = dict(zip([c[0] for c in db_cursor.description], db_cursor.fetchone())) - self.send_message_by_id(row['message_id']) + while row := db_cursor.fetchone(): + row = dict(zip([c[0] for c in db_cursor.description], row)) + self.send_message_by_id(row['id']) + time.sleep(self.eddn.REPLAY_DELAY) db_cursor.close() + # else: + # logger.debug("Should NOT send") + # Set us up to run again after a delay self.queue_processing.release() - self.eddn.parent.after(self.eddn.REPLAYPERIOD, self.queue_check_and_send) + # logger.debug("Mutex released") + self.eddn.parent.after(self.eddn.REPLAY_PERIOD, self.queue_check_and_send) def _log_response( self, @@ -518,7 +532,9 @@ class EDDN: if 'eddn' in debug_senders: DEFAULT_URL = f'http://{edmc_data.DEBUG_WEBSERVER_HOST}:{edmc_data.DEBUG_WEBSERVER_PORT}/eddn' - REPLAYPERIOD = 400 # Roughly two messages per second, accounting for send delays [ms] + # FIXME: Change back to `300_000` + REPLAY_PERIOD = 1_000 # How often to try (re-)sending the queue, [milliseconds] + REPLAY_DELAY = 0.400 # Roughly two messages per second, accounting for send delays [seconds] REPLAYFLUSH = 20 # Update log on disk roughly every 10 seconds MODULE_RE = re.compile(r'^Hpt_|^Int_|Armour_', re.IGNORECASE) CANONICALISE_RE = re.compile(r'\$(.+)_name;') @@ -545,9 +561,6 @@ class EDDN: logger.debug('Done.') - logger.debug('Closing EDDN requests.Session.') - self.session.close() - def export_commodities(self, data: Mapping[str, Any], is_beta: bool) -> None: # noqa: CCR001 """ Update EDDN with the commodities on the current (lastStarport) station. @@ -888,7 +901,7 @@ class EDDN: # Check if the user configured messages to be sent. # # 1. If this is a 'station' data message then check config.EDDN_SEND_STATION_DATA - # 2. Else check against config.EDDN_SEND_NON_STATION *and* config.OUT_EDDN_DO_NOT_DELAY + # 2. Else check against config.EDDN_SEND_NON_STATION *and* config.OUT_EDDN_DELAY if any(f'{s}' in msg['$schemaRef'] for s in EDDNSender.STATION_SCHEMAS): # 'Station data' if config.get_int('output') & config.OUT_EDDN_SEND_STATION_DATA: @@ -900,7 +913,7 @@ class EDDN: elif config.get_int('output') & config.OUT_EDDN_SEND_NON_STATION: # Any data that isn't 'station' is configured to be sent msg_id = self.sender.add_message(cmdr, msg) - if config.get_int('output') & config.OUT_EDDN_DO_NOT_DELAY: + if not (config.get_int('output') & config.OUT_EDDN_DELAY): # No delay in sending configured, so attempt immediately self.sender.send_message_by_id(msg_id) @@ -1843,7 +1856,7 @@ def plugin_prefs(parent, cmdr: str, is_beta: bool) -> Frame: ) this.eddn_system_button.grid(padx=BUTTONX, pady=(5, 0), sticky=tk.W) - this.eddn_delay = tk.IntVar(value=(output & config.OUT_EDDN_DO_NOT_DELAY) and 1) + this.eddn_delay = tk.IntVar(value=(output & config.OUT_EDDN_DELAY) and 1) # Output setting under 'Send system and scan data to the Elite Dangerous Data Network' new in E:D 2.2 this.eddn_delay_button = nb.Checkbutton( eddnframe, @@ -1883,7 +1896,7 @@ def prefs_changed(cmdr: str, is_beta: bool) -> None: & (config.OUT_MKT_TD | config.OUT_MKT_CSV | config.OUT_SHIP | config.OUT_MKT_MANUAL)) + (this.eddn_station.get() and config.OUT_EDDN_SEND_STATION_DATA) + (this.eddn_system.get() and config.OUT_EDDN_SEND_NON_STATION) + - (this.eddn_delay.get() and config.OUT_EDDN_DO_NOT_DELAY) + (this.eddn_delay.get() and config.OUT_EDDN_DELAY) ) From 4a0518da9ffc145a9c9d936fe1638c5894b69270 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Tue, 22 Nov 2022 17:26:32 +0000 Subject: [PATCH 68/95] eddn: Set header up properly for all journal messages * New function `EDDN.add_header()`. * It utilises new `this` members, set from `journal_entry()`. --- plugins/eddn.py | 37 ++++++++++++++++++++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 15a0c61c..2dc30c1d 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -66,6 +66,12 @@ class This: """Holds module globals.""" def __init__(self): + # Game version and build + self.game_version = "" + self.game_build = "" + # Commander Name + self.cmdr_name = "" + # Track if we're on foot self.on_foot = False # Track if we're docked @@ -906,17 +912,43 @@ class EDDN: # 'Station data' if config.get_int('output') & config.OUT_EDDN_SEND_STATION_DATA: # And user has 'station data' configured to be sent + msg = self.add_header(msg) msg_id = self.sender.add_message(cmdr, msg) # 'Station data' is never delayed on construction of message self.sender.send_message_by_id(msg_id) elif config.get_int('output') & config.OUT_EDDN_SEND_NON_STATION: # Any data that isn't 'station' is configured to be sent + msg = self.add_header(msg) msg_id = self.sender.add_message(cmdr, msg) if not (config.get_int('output') & config.OUT_EDDN_DELAY): # No delay in sending configured, so attempt immediately self.sender.send_message_by_id(msg_id) + def add_header(self, msg: MutableMapping[str, Any]) -> MutableMapping[str, Any]: + """ + Augment the given EDDN message with our header defaults. + + NB: This should *only* be called for newly constructed messages, not + for either a legacy message or an already queued one! + + :param msg: Message to be augmented + :return: The augmented version + """ + if 'header' in msg: + logger.error("Passed `msg` which already has a header") + return msg + + msg['header'] = { + 'softwareName': f'{applongname} [{system() if sys.platform != "darwin" else "Mac OS"}]', + 'softwareVersion': str(appversion_nobuild()), + 'uploaderID': this.cmdr_name, + 'gameversion': this.game_version, + 'gamebuild': this.game_build, + } + + return msg + def export_journal_generic(self, cmdr: str, is_beta: bool, entry: Mapping[str, Any]) -> None: """ Send an EDDN event on the journal schema. @@ -1966,7 +1998,7 @@ def journal_entry( # noqa: C901, CCR001 """ Process a new Journal entry. - :param cmdr: `str` - Name of currennt Cmdr. + :param cmdr: `str` - Name of current Cmdr. :param is_beta: `bool` - True if this is a beta version of the Game. :param system: `str` - Name of system Cmdr is in. :param station: `str` - Name of station Cmdr is docked at, if applicable. @@ -1986,6 +2018,9 @@ def journal_entry( # noqa: C901, CCR001 entry = new_data event_name = entry['event'].lower() + this.cmdr_name = cmdr + this.game_version = state['GameVersion'] + this.game_build = state['GameBuild'] this.on_foot = state['OnFoot'] this.docked = state['IsDocked'] From b31c8c05362cba2839bc964d386126662b70b2ee Mon Sep 17 00:00:00 2001 From: Athanasius Date: Tue, 22 Nov 2022 17:35:17 +0000 Subject: [PATCH 69/95] eddn: A start on ensuring header (gameversion/build) are always set --- plugins/eddn.py | 28 ++++++++++++---------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 2dc30c1d..4204b226 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -912,43 +912,38 @@ class EDDN: # 'Station data' if config.get_int('output') & config.OUT_EDDN_SEND_STATION_DATA: # And user has 'station data' configured to be sent - msg = self.add_header(msg) + msg['header'] = self.standard_header() msg_id = self.sender.add_message(cmdr, msg) # 'Station data' is never delayed on construction of message self.sender.send_message_by_id(msg_id) elif config.get_int('output') & config.OUT_EDDN_SEND_NON_STATION: # Any data that isn't 'station' is configured to be sent - msg = self.add_header(msg) + msg['header'] = self.standard_header() msg_id = self.sender.add_message(cmdr, msg) if not (config.get_int('output') & config.OUT_EDDN_DELAY): # No delay in sending configured, so attempt immediately self.sender.send_message_by_id(msg_id) - def add_header(self, msg: MutableMapping[str, Any]) -> MutableMapping[str, Any]: + def standard_header( + self, game_version: Optional[str] = None, game_build: Optional[str] = None + ) -> MutableMapping[str, Any]: """ - Augment the given EDDN message with our header defaults. + Return the standard header for an EDDN message, given tracked state. NB: This should *only* be called for newly constructed messages, not for either a legacy message or an already queued one! - :param msg: Message to be augmented - :return: The augmented version + :return: The standard header """ - if 'header' in msg: - logger.error("Passed `msg` which already has a header") - return msg - - msg['header'] = { + return { 'softwareName': f'{applongname} [{system() if sys.platform != "darwin" else "Mac OS"}]', 'softwareVersion': str(appversion_nobuild()), 'uploaderID': this.cmdr_name, - 'gameversion': this.game_version, - 'gamebuild': this.game_build, + 'gameversion': game_version or this.game_version, + 'gamebuild': game_build or this.game_build, } - return msg - def export_journal_generic(self, cmdr: str, is_beta: bool, entry: Mapping[str, Any]) -> None: """ Send an EDDN event on the journal schema. @@ -1457,7 +1452,8 @@ class EDDN: msg = { '$schemaRef': f'https://eddn.edcd.io/schemas/fcmaterials_capi/1{"/test" if is_beta else ""}', - 'message': entry + 'message': entry, + 'header': self.standard_header(game_version='CAPI-commodity', game_build='CAPI-commodity'), } this.eddn.send_message(data['commander']['name'], msg) From f6e25042165789808fe76b7f89bddde7ef53f730 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Tue, 22 Nov 2022 17:37:15 +0000 Subject: [PATCH 70/95] eddn: Fix capi_fcmaterials gameversion/build & add to CAPI commodity * Erroneously used 'CAPI-commoodity' when it's 'CAPI-market' (name of the CAPI endpoint, not anything to do with EDDN schema names, and '-commodity' would also be wrong for that). * Set `header` for (CAPI) `export_commodities()`. --- plugins/eddn.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 4204b226..2a129462 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -635,6 +635,7 @@ class EDDN: self.send_message(data['commander']['name'], { '$schemaRef': f'https://eddn.edcd.io/schemas/commodity/3{"/test" if is_beta else ""}', 'message': message, + 'header': self.standard_header(game_version='CAPI-market', game_build='CAPI-market') }) this.commodities = commodities @@ -1453,7 +1454,7 @@ class EDDN: msg = { '$schemaRef': f'https://eddn.edcd.io/schemas/fcmaterials_capi/1{"/test" if is_beta else ""}', 'message': entry, - 'header': self.standard_header(game_version='CAPI-commodity', game_build='CAPI-commodity'), + 'header': self.standard_header(game_version='CAPI-market', game_build='CAPI-market'), } this.eddn.send_message(data['commander']['name'], msg) From f2dbfacf70071632013910519ff7374ea42f67d8 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Tue, 22 Nov 2022 17:40:33 +0000 Subject: [PATCH 71/95] eddn: Add header for CAPI outfitting and shipyard exports --- plugins/eddn.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 2a129462..1bd6e834 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -635,7 +635,7 @@ class EDDN: self.send_message(data['commander']['name'], { '$schemaRef': f'https://eddn.edcd.io/schemas/commodity/3{"/test" if is_beta else ""}', 'message': message, - 'header': self.standard_header(game_version='CAPI-market', game_build='CAPI-market') + 'header': self.standard_header(game_version='CAPI-market', game_build='CAPI-market'), }) this.commodities = commodities @@ -729,6 +729,7 @@ class EDDN: ('modules', outfitting), ('odyssey', this.odyssey), ]), + 'header': self.standard_header(game_version='CAPI-shipyard', game_build='CAPI-shipyard'), }) this.outfitting = (horizons, outfitting) @@ -773,6 +774,7 @@ class EDDN: ('ships', shipyard), ('odyssey', this.odyssey), ]), + 'header': self.standard_header(game_version='CAPI-shipyard', game_build='CAPI-shipyard'), }) this.shipyard = (horizons, shipyard) From fda91df04fa0c7d68a4a1c7f09ec5171acd49540 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 23 Nov 2022 13:29:47 +0000 Subject: [PATCH 72/95] eddn: Working with tk `after()`, on timer or when docked * An aborted attempt was made to use a thread worker, but: 1. sqlite3 doesn't allow cross-thread use of the same sqlite3 connection. 2. Having an on-going query on one cursor, e.g. gathering all the outstanding message `id`, whilst trying to DELETE a row hits a "database is locked" error. * So, back to tk `after()`. `send_message_by_id()` has been audited to ensure its boolean return is accurate. So there shouldn't be any way in which to get hung up on a single message *other than if the EDDN Gateway is having issues, and thus it should be retried anyway*. Any reason for a 'bad message' will cause `True` return and thus deletion of the message in *this* call to `queue_check_and_send()`. * There is a new `reschedule` parameter to `queue_check_and_send()`. If `True` then at the end it should re-schedule. There is a check in `journal_entry()` for the `Docked` event, and if this occurs it will schedule `queue_check_and_send()` with `reschedule` set to `False` so that we don't end up with multiple parallel schedulings. It's still possible for a docking to have coincided with a scheduled run and thus cause double-rate sending to EDDN, but we can live with that. * The same scheduling mechanism is used, with a much smaller delay, to process more than one queued message per run. Hence the `have_rescheduled` bool *in* the function to indicate if a 'fast' reschedule has already been set. This prevents the slow one *also* being set in this scenario. The latter will be scheduled when the fast one found no more rows to process. --- plugins/eddn.py | 39 ++++++++++++++++++++++++++++----------- 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 1bd6e834..2158c6de 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -176,7 +176,7 @@ class EDDNSender: self.queue_processing = Lock() # Initiate retry/send-now timer - self.eddn.parent.after(self.eddn.REPLAY_PERIOD, self.queue_check_and_send) + self.eddn.parent.after(self.eddn.REPLAY_PERIOD, self.queue_check_and_send, True) def sqlite_queue_v1(self) -> sqlite3.Connection: """ @@ -431,15 +431,24 @@ class EDDNSender: return False - def queue_check_and_send(self) -> None: - """Check if we should be sending queued messages, and send if we should.""" - # logger.debug("Called") + def queue_check_and_send(self, reschedule: bool = False) -> None: + """ + Check if we should be sending queued messages, and send if we should. + + :param reschedule: Boolean indicating if we should call `after()` again. + """ + logger.debug("Called") # Mutex in case we're already processing if not self.queue_processing.acquire(blocking=False): logger.debug("Couldn't obtain mutex") + if reschedule: + self.eddn.parent.after(self.eddn.REPLAY_PERIOD, self.queue_check_and_send, reschedule) + return # logger.debug("Obtained mutex") + # Used to indicate if we've rescheduled at the faster rate already. + have_rescheduled = False # We send either if docked or 'Delay sending until docked' not set if this.docked or not (config.get_int('output') & config.OUT_EDDN_DELAY): # logger.debug("Should send") @@ -461,6 +470,7 @@ class EDDNSender: """ SELECT id FROM messages ORDER BY created ASC + LIMIT 1 """ ) @@ -468,20 +478,24 @@ class EDDNSender: logger.exception("DB error querying queued messages") else: - while row := db_cursor.fetchone(): + row = db_cursor.fetchone() + if row: row = dict(zip([c[0] for c in db_cursor.description], row)) self.send_message_by_id(row['id']) - time.sleep(self.eddn.REPLAY_DELAY) + # Always re-schedule as this is only a "Don't hammer EDDN" delay + self.eddn.parent.after(self.eddn.REPLAY_DELAY, self.queue_check_and_send, reschedule) + have_rescheduled = True - db_cursor.close() + db_cursor.close() # else: # logger.debug("Should NOT send") - # Set us up to run again after a delay self.queue_processing.release() # logger.debug("Mutex released") - self.eddn.parent.after(self.eddn.REPLAY_PERIOD, self.queue_check_and_send) + if reschedule and not have_rescheduled: + # Set us up to run again per the configured period + self.eddn.parent.after(self.eddn.REPLAY_PERIOD, self.queue_check_and_send, reschedule) def _log_response( self, @@ -539,8 +553,8 @@ class EDDN: DEFAULT_URL = f'http://{edmc_data.DEBUG_WEBSERVER_HOST}:{edmc_data.DEBUG_WEBSERVER_PORT}/eddn' # FIXME: Change back to `300_000` - REPLAY_PERIOD = 1_000 # How often to try (re-)sending the queue, [milliseconds] - REPLAY_DELAY = 0.400 # Roughly two messages per second, accounting for send delays [seconds] + REPLAY_PERIOD = 300_000 # How often to try (re-)sending the queue, [milliseconds] + REPLAY_DELAY = 400 # Roughly two messages per second, accounting for send delays [milliseconds] REPLAYFLUSH = 20 # Update log on disk roughly every 10 seconds MODULE_RE = re.compile(r'^Hpt_|^Int_|Armour_', re.IGNORECASE) CANONICALISE_RE = re.compile(r'\$(.+)_name;') @@ -2081,6 +2095,9 @@ def journal_entry( # noqa: C901, CCR001 # Yes, explicitly state `None` here, so it's crystal clear. this.systemaddress = entry.get('SystemAddress', None) # type: ignore + if event_name == 'docked': + this.eddn.parent.after(this.eddn.REPLAY_DELAY, this.eddn.sender.queue_check_and_send, False) + elif event_name == 'approachbody': this.body_name = entry['Body'] this.body_id = entry.get('BodyID') From 524c0425fb784bf04bb5cad8a7d76cee2277a7a1 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 23 Nov 2022 13:46:49 +0000 Subject: [PATCH 73/95] eddn: EDDN.send_message(): Only set standard header if not already set. Else we risk overwriting e.g. `CAPI-shipyard`. --- plugins/eddn.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 2158c6de..d201e291 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -30,7 +30,6 @@ import pathlib import re import sqlite3 import sys -import time import tkinter as tk from collections import OrderedDict from platform import system @@ -929,14 +928,18 @@ class EDDN: # 'Station data' if config.get_int('output') & config.OUT_EDDN_SEND_STATION_DATA: # And user has 'station data' configured to be sent - msg['header'] = self.standard_header() + if 'header' not in msg: + msg['header'] = self.standard_header() + msg_id = self.sender.add_message(cmdr, msg) # 'Station data' is never delayed on construction of message self.sender.send_message_by_id(msg_id) elif config.get_int('output') & config.OUT_EDDN_SEND_NON_STATION: # Any data that isn't 'station' is configured to be sent - msg['header'] = self.standard_header() + if 'header' not in msg: + msg['header'] = self.standard_header() + msg_id = self.sender.add_message(cmdr, msg) if not (config.get_int('output') & config.OUT_EDDN_DELAY): # No delay in sending configured, so attempt immediately From b7769821bede7db3e6a79971917c9dc0baa54a96 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 23 Nov 2022 13:56:01 +0000 Subject: [PATCH 74/95] eddn: Send immediately if docked This has the "are we delaying sending?" check, but without the short-circuit for "we are actually docked right now". --- plugins/eddn.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index d201e291..db6dbf96 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -941,7 +941,7 @@ class EDDN: msg['header'] = self.standard_header() msg_id = self.sender.add_message(cmdr, msg) - if not (config.get_int('output') & config.OUT_EDDN_DELAY): + if this.docked or not (config.get_int('output') & config.OUT_EDDN_DELAY): # No delay in sending configured, so attempt immediately self.sender.send_message_by_id(msg_id) From 3d9bb643300bf8d8684138414ddd548abdf0c950 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 23 Nov 2022 14:12:43 +0000 Subject: [PATCH 75/95] eddn: Remove outdated comment about replaylog rework And using tk `parent` *is* absolutely necessary. --- plugins/eddn.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index db6dbf96..da519cc4 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -1797,10 +1797,7 @@ def plugin_app(parent: tk.Tk) -> Optional[tk.Frame]: Set up any plugin-specific UI. In this case we need the tkinter parent in order to later call - `update_idletasks()` on it. - - TODO: Re-work the whole replaylog and general sending to EDDN so this isn't - necessary. + `update_idletasks()` on it, or schedule things with `after()`. :param parent: tkinter parent frame. :return: Optional tk.Frame, if the tracking UI is active. From 95fa9d577c31494bb4910504f62ddb50b616c45c Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 23 Nov 2022 14:20:24 +0000 Subject: [PATCH 76/95] eddn: Use a shorter, 10 second, delay at startup for first queue check --- plugins/eddn.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index da519cc4..cc94ff1a 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -175,7 +175,7 @@ class EDDNSender: self.queue_processing = Lock() # Initiate retry/send-now timer - self.eddn.parent.after(self.eddn.REPLAY_PERIOD, self.queue_check_and_send, True) + self.eddn.parent.after(self.eddn.REPLAY_STARTUP_DELAY, self.queue_check_and_send, True) def sqlite_queue_v1(self) -> sqlite3.Connection: """ @@ -552,6 +552,7 @@ class EDDN: DEFAULT_URL = f'http://{edmc_data.DEBUG_WEBSERVER_HOST}:{edmc_data.DEBUG_WEBSERVER_PORT}/eddn' # FIXME: Change back to `300_000` + REPLAY_STARTUP_DELAY = 10_000 # Delay during startup before checking queue [milliseconds] REPLAY_PERIOD = 300_000 # How often to try (re-)sending the queue, [milliseconds] REPLAY_DELAY = 400 # Roughly two messages per second, accounting for send delays [milliseconds] REPLAYFLUSH = 20 # Update log on disk roughly every 10 seconds From 0eb33e011b80997c1df6759b79f97e3fef3d27dd Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 23 Nov 2022 15:57:25 +0000 Subject: [PATCH 77/95] PLUGINS.md: Document new `IsDocked` state flag. --- PLUGINS.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/PLUGINS.md b/PLUGINS.md index 1649b47e..e8d47a3e 100644 --- a/PLUGINS.md +++ b/PLUGINS.md @@ -617,6 +617,7 @@ Content of `state` (updated to the current journal entry): | `Modules` | `dict` | Currently fitted modules | | `NavRoute` | `dict` | Last plotted multi-hop route | | `ModuleInfo` | `dict` | Last loaded ModulesInfo.json data | +| `IsDocked` | `bool` | Whether the Cmdr is currently docked *in their own ship*. | | `OnFoot` | `bool` | Whether the Cmdr is on foot | | `Component` | `dict` | 'Component' MicroResources in Odyssey, `int` count each. | | `Item` | `dict` | 'Item' MicroResources in Odyssey, `int` count each. | @@ -710,6 +711,17 @@ NB: It *is* possible, if a player is quick enough, to plot and clear a route before we load it, in which case we'd be retaining the *previous* plotted route. +New in version 5.6.0: + +`IsDocked` boolean added to `state`. This is set True for a `Location` event +having `"Docked":true"`, or the `Docked` event. It is set back to False (its +default value) for an `Undocked` event. Being on-foot in a station at login +time does *not* count as docked for this. + +In general on-foot, including being in a taxi, might not set this 100% +correctly. Its main use in core code is to detect being docked so as to send +any stored EDDN messages due to "Delay sending until docked" option. + ___ ##### Synthetic Events From 32229217b2c5676ae8c6530fcf1bc026131705dc Mon Sep 17 00:00:00 2001 From: Athanasius Date: Wed, 23 Nov 2022 16:00:28 +0000 Subject: [PATCH 78/95] eddn: Bail from sending queued messages if one failed NB: This is failed *not* due to the message being 'bad' in some manner. It will mean the Gateway timed out, refused connection etc. --- plugins/eddn.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index cc94ff1a..805f45cf 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -364,6 +364,8 @@ class EDDNSender: options to not send to EDDN, or to delay the sending until docked, are checked. + It *is* however the one 'sending' place that the EDDN killswitches are checked. + Should catch and handle all failure conditions. A `True` return might mean that the message was successfully sent, *or* that this message should not be retried after a failure, i.e. too large. @@ -480,10 +482,14 @@ class EDDNSender: row = db_cursor.fetchone() if row: row = dict(zip([c[0] for c in db_cursor.description], row)) - self.send_message_by_id(row['id']) - # Always re-schedule as this is only a "Don't hammer EDDN" delay - self.eddn.parent.after(self.eddn.REPLAY_DELAY, self.queue_check_and_send, reschedule) - have_rescheduled = True + if self.send_message_by_id(row['id']): + # If `True` was returned then we're done with this message. + # `False` means "failed to send, but not because the message + # is bad", i.e. an EDDN Gateway problem. Thus, in that case + # we do *NOT* schedule attempting the next message. + # Always re-schedule as this is only a "Don't hammer EDDN" delay + self.eddn.parent.after(self.eddn.REPLAY_DELAY, self.queue_check_and_send, reschedule) + have_rescheduled = True db_cursor.close() From c8e00304fba503142d18f451882ce0ecbf25570d Mon Sep 17 00:00:00 2001 From: Philipp Trulson Date: Thu, 24 Nov 2022 14:09:43 +0100 Subject: [PATCH 79/95] Fix URL for EDDN GitHub Link --- plugins/eddn.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index cd1d53ff..c0b80697 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -1697,7 +1697,7 @@ def plugin_prefs(parent, cmdr: str, is_beta: bool) -> Frame: eddnframe, text='Elite Dangerous Data Network', background=nb.Label().cget('background'), - url='https://github.com/EDSM-NET/EDDN/wiki', + url='https://github.com/EDCD/EDDN#eddn---elite-dangerous-data-network', underline=True ).grid(padx=PADX, sticky=tk.W) # Don't translate From 073afc842fb6a3d0d7e22b374d8396d4c921bbee Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 24 Nov 2022 14:09:57 +0000 Subject: [PATCH 80/95] eddn: New `--trace-on plugins.eddn.send` & other logging tweaks * In case of apparent issues, have a `--trace-on` to better see what's (not) happening. All the old DBEUG logging, even if commented out, is now under this. * Also added some INFO level logging for the legacy replay.jsonl conversion, as it should be one-time per user. * Some additional DEBUG logging for closing down. --- plugins/eddn.py | 42 ++++++++++++++++++++++++++++++++++-------- 1 file changed, 34 insertions(+), 8 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 805f45cf..366f29f3 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -175,6 +175,10 @@ class EDDNSender: self.queue_processing = Lock() # Initiate retry/send-now timer + logger.trace_if( + "plugin.eddn.send", + f"First queue run scheduled for {self.eddn.REPLAY_STARTUP_DELAY}ms from now" + ) self.eddn.parent.after(self.eddn.REPLAY_STARTUP_DELAY, self.queue_check_and_send, True) def sqlite_queue_v1(self) -> sqlite3.Connection: @@ -227,6 +231,9 @@ class EDDNSender: db_conn.close() raise e + else: + logger.info("New `eddn_queue-v1.db` created") + # We return only the connection, so tidy up db.close() @@ -237,6 +244,7 @@ class EDDNSender: filename = config.app_dir_path / 'replay.jsonl' try: with open(filename, 'r+', buffering=1) as replay_file: + logger.info("Converting legacy `replay.jsonl` to `eddn_queue-v1.db`") for line in replay_file: cmdr, msg = json.loads(line) self.add_message(cmdr, msg) @@ -247,15 +255,18 @@ class EDDNSender: finally: # Best effort at removing the file/contents # NB: The legacy code assumed it could write to the file. + logger.info("Converson` to `eddn_queue-v1.db` complete, removing `replay.jsonl`") replay_file = open(filename, 'w') # Will truncate replay_file.close() os.unlink(filename) def close(self) -> None: """Clean up any resources.""" + logger.debug('Closing db cursor.') if self.db: self.db.close() + logger.debug('Closing db connection.') if self.db_conn: self.db_conn.close() @@ -277,6 +288,7 @@ class EDDNSender: :param msg: The full, transmission-ready, EDDN message. :return: ID of the successfully inserted row. """ + logger.trace_if("plugin.eddn.send", f"Message for {msg['$schemaRef']=}") # Cater for legacy replay.json messages if 'header' not in msg: msg['header'] = { @@ -315,6 +327,7 @@ class EDDNSender: # Can't possibly be a valid row id return -1 + logger.trace_if("plugin.eddn.send", f"Message for {msg['$schemaRef']=} recorded, id={self.db.lastrowid}") return self.db.lastrowid or -1 def delete_message(self, row_id: int) -> None: @@ -323,6 +336,7 @@ class EDDNSender: :param row_id: id of message to be deleted. """ + logger.trace_if("plugin.eddn.send", f"Deleting message with {row_id=}") self.db.execute( """ DELETE FROM messages WHERE id = :row_id @@ -338,6 +352,7 @@ class EDDNSender: :param id: :return: """ + logger.trace_if("plugin.eddn.send", f"Sending message with {id=}") self.db.execute( """ SELECT * FROM messages WHERE id = :row_id @@ -373,6 +388,7 @@ class EDDNSender: :param msg: Fully formed, string, message. :return: `True` for "now remove this message from the queue" """ + logger.trace_if("plugin.eddn.send", "Sending message") should_return, new_data = killswitch.check_killswitch('plugins.eddn.send', json.loads(msg)) if should_return: logger.warning('eddn.send has been disabled via killswitch. Returning.') @@ -432,27 +448,31 @@ class EDDNSender: return False - def queue_check_and_send(self, reschedule: bool = False) -> None: + def queue_check_and_send(self, reschedule: bool = False) -> None: # noqa: CCR001 """ Check if we should be sending queued messages, and send if we should. :param reschedule: Boolean indicating if we should call `after()` again. """ - logger.debug("Called") + logger.trace_if("plugin.eddn.send", "Called") # Mutex in case we're already processing if not self.queue_processing.acquire(blocking=False): - logger.debug("Couldn't obtain mutex") + logger.trace_if("plugin.eddn.send", "Couldn't obtain mutex") if reschedule: + logger.trace_if("plugin.eddn.send", f"Next run scheduled for {self.eddn.REPLAY_PERIOD}ms from now") self.eddn.parent.after(self.eddn.REPLAY_PERIOD, self.queue_check_and_send, reschedule) + else: + logger.trace_if("plugin.eddn.send", "NO next run scheduled (there should be another one already set)") + return - # logger.debug("Obtained mutex") + logger.trace_if("plugin.eddn.send", "Obtained mutex") # Used to indicate if we've rescheduled at the faster rate already. have_rescheduled = False # We send either if docked or 'Delay sending until docked' not set if this.docked or not (config.get_int('output') & config.OUT_EDDN_DELAY): - # logger.debug("Should send") + logger.trace_if("plugin.eddn.send", "Should send") # We need our own cursor here, in case the semantics of # tk `after()` could allow this to run in the middle of other # database usage. @@ -488,18 +508,21 @@ class EDDNSender: # is bad", i.e. an EDDN Gateway problem. Thus, in that case # we do *NOT* schedule attempting the next message. # Always re-schedule as this is only a "Don't hammer EDDN" delay + logger.trace_if("plugin.eddn.send", f"Next run scheduled for {self.eddn.REPLAY_DELAY}ms from " + "now") self.eddn.parent.after(self.eddn.REPLAY_DELAY, self.queue_check_and_send, reschedule) have_rescheduled = True db_cursor.close() - # else: - # logger.debug("Should NOT send") + else: + logger.trace_if("plugin.eddn.send", "Should NOT send") self.queue_processing.release() - # logger.debug("Mutex released") + logger.trace_if("plugin.eddn.send", "Mutex released") if reschedule and not have_rescheduled: # Set us up to run again per the configured period + logger.trace_if("plugin.eddn.send", f"Next run scheduled for {self.eddn.REPLAY_PERIOD}ms from now") self.eddn.parent.after(self.eddn.REPLAY_PERIOD, self.queue_check_and_send, reschedule) def _log_response( @@ -935,6 +958,7 @@ class EDDN: # 'Station data' if config.get_int('output') & config.OUT_EDDN_SEND_STATION_DATA: # And user has 'station data' configured to be sent + logger.trace_if("plugin.eddn.send", "Recording/sending 'station' message") if 'header' not in msg: msg['header'] = self.standard_header() @@ -944,12 +968,14 @@ class EDDN: elif config.get_int('output') & config.OUT_EDDN_SEND_NON_STATION: # Any data that isn't 'station' is configured to be sent + logger.trace_if("plugin.eddn.send", "Recording 'non-station' message") if 'header' not in msg: msg['header'] = self.standard_header() msg_id = self.sender.add_message(cmdr, msg) if this.docked or not (config.get_int('output') & config.OUT_EDDN_DELAY): # No delay in sending configured, so attempt immediately + logger.trace_if("plugin.eddn.send", "Sending 'non-station' message") self.sender.send_message_by_id(msg_id) def standard_header( From 4d29eb605943080edcc4f89ebf79b9a8d150b975 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Thu, 24 Nov 2022 14:42:34 +0000 Subject: [PATCH 81/95] appversion: Change to 5.6.0-alpha0 for 'new EDDN code' testing * There's a Pre-Release 5.5.1-alpha0 out there, we want to look distinct from it. --- config/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/__init__.py b/config/__init__.py index 67eae43b..70d0603e 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -52,7 +52,7 @@ appcmdname = 'EDMC' # # Major.Minor.Patch(-prerelease)(+buildmetadata) # NB: Do *not* import this, use the functions appversion() and appversion_nobuild() -_static_appversion = '5.5.1-alpha0' +_static_appversion = '5.6.0-alpha0' _cached_version: Optional[semantic_version.Version] = None copyright = '© 2015-2019 Jonathan Harris, 2020-2022 EDCD' From 345938a81d8cddcd5d6e232bf316a65a1b685644 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Fri, 25 Nov 2022 15:45:07 +0000 Subject: [PATCH 82/95] eddn: Set `gamebuild=""` for CAPI-sourced data Whilst setting it to the same "CAPI-" string as `gameversion` in these cases would probably be OK, that's not the intent of the EDDN documentation, which has now been clarified. --- plugins/eddn.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 366f29f3..ad58d641 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -678,7 +678,7 @@ class EDDN: self.send_message(data['commander']['name'], { '$schemaRef': f'https://eddn.edcd.io/schemas/commodity/3{"/test" if is_beta else ""}', 'message': message, - 'header': self.standard_header(game_version='CAPI-market', game_build='CAPI-market'), + 'header': self.standard_header(game_version='CAPI-market', game_build=''), }) this.commodities = commodities @@ -772,7 +772,7 @@ class EDDN: ('modules', outfitting), ('odyssey', this.odyssey), ]), - 'header': self.standard_header(game_version='CAPI-shipyard', game_build='CAPI-shipyard'), + 'header': self.standard_header(game_version='CAPI-shipyard', game_build=''), }) this.outfitting = (horizons, outfitting) @@ -817,7 +817,7 @@ class EDDN: ('ships', shipyard), ('odyssey', this.odyssey), ]), - 'header': self.standard_header(game_version='CAPI-shipyard', game_build='CAPI-shipyard'), + 'header': self.standard_header(game_version='CAPI-shipyard', game_build=''), }) this.shipyard = (horizons, shipyard) @@ -1506,7 +1506,7 @@ class EDDN: msg = { '$schemaRef': f'https://eddn.edcd.io/schemas/fcmaterials_capi/1{"/test" if is_beta else ""}', 'message': entry, - 'header': self.standard_header(game_version='CAPI-market', game_build='CAPI-market'), + 'header': self.standard_header(game_version='CAPI-market', game_build=''), } this.eddn.send_message(data['commander']['name'], msg) From 8a58220a661b8b5eafb26aaac5e56833c48735cc Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 27 Nov 2022 12:47:19 +0000 Subject: [PATCH 83/95] inara: Send only Live galaxy data after Update 14 * Adds `monitor.is_live_galaxy()` for general use. * Assumes Update 14 starts after 2022-11-29T09:00:00+00:00. That's the currently schedule day, and recently the servers have been down by the time. Likelihood of them coming back *up* quickly seems slim to none. * If we couldn't parse the `gameversion` from Journal using `semantic_version.Version.coerce()` this will fail, and assume we're on the Legacy galaxy. --- monitor.py | 29 +++++++++++++++++++++++++++++ plugins/inara.py | 26 ++++++++++++++++++++++++++ 2 files changed, 55 insertions(+) diff --git a/monitor.py b/monitor.py index 859b2a50..f2a68b95 100644 --- a/monitor.py +++ b/monitor.py @@ -21,6 +21,8 @@ from typing import Tuple if TYPE_CHECKING: import tkinter +import semantic_version + import util_ships from config import config from edmc_data import edmc_suit_shortnames, edmc_suit_symbol_localised @@ -111,6 +113,7 @@ class EDLogs(FileSystemEventHandler): # type: ignore # See below # Context for journal handling self.version: Optional[str] = None + self.version_semantic: Optional[semantic_version.Version] = None self.is_beta = False self.mode: Optional[str] = None self.group: Optional[str] = None @@ -131,6 +134,11 @@ class EDLogs(FileSystemEventHandler): # type: ignore # See below self._fcmaterials_retries_remaining = 0 self._last_fcmaterials_journal_timestamp: Optional[float] = None + # For determining Live versus Legacy galaxy. + # The assumption is gameversion will parse via `coerce()` and always + # be >= for Live, and < for Legacy. + self.live_galaxy_base_version = semantic_version.Version('4.0.0') + self.__init_state() def __init_state(self) -> None: @@ -293,6 +301,7 @@ class EDLogs(FileSystemEventHandler): # type: ignore # See below self.currentdir = None self.version = None + self.version_semantic = None self.mode = None self.group = None self.cmdr = None @@ -1677,6 +1686,7 @@ class EDLogs(FileSystemEventHandler): # type: ignore # See below self.state['GameVersion'] = entry['gameversion'] self.state['GameBuild'] = entry['build'] self.version = self.state['GameVersion'] + self.version_semantic = semantic_version.Version.coerce(self.state['GameVersion']) self.is_beta = any(v in self.version.lower() for v in ('alpha', 'beta')) # type: ignore except KeyError: if not suppress: @@ -2348,6 +2358,25 @@ class EDLogs(FileSystemEventHandler): # type: ignore # See below self._last_fcmaterials_journal_timestamp = None return file + def is_live_galaxy(self) -> bool: + """ + Indicate if current tracking indicates Live galaxy. + + We assume: + 1) `gameversion` remains something that semantic_verison.Version.coerce() can parse. + 2) Any Live galaxy client reports a version >= the defined base version. + 3) Any Legacy client will always report a version < that base version. + :return: True for Live, False for Legacy or unknown. + """ + # If we don't yet know the version we can't tell, so assume the worst + if self.version_semantic is None: + return False + + if self.version_semantic >= self.live_galaxy_base_version: + return True + + return False + # singleton monitor = EDLogs() diff --git a/plugins/inara.py b/plugins/inara.py index 5c09ec27..1a7cf1f6 100644 --- a/plugins/inara.py +++ b/plugins/inara.py @@ -28,6 +28,7 @@ import time import tkinter as tk from collections import OrderedDict, defaultdict, deque from dataclasses import dataclass +from datetime import datetime, timedelta, timezone from operator import itemgetter from threading import Lock, Thread from typing import TYPE_CHECKING, Any, Callable, Deque, Dict, List, Mapping, NamedTuple, Optional @@ -44,6 +45,7 @@ import timeout_session from companion import CAPIData from config import applongname, appversion, config, debug_senders from EDMCLogging import get_main_logger +from monitor import monitor from ttkHyperlinkLabel import HyperlinkLabel logger = get_main_logger() @@ -88,6 +90,11 @@ class This: def __init__(self): self.session = timeout_session.new_session() self.thread: Thread + self.parent: tk.Tk + + # Handle only sending Live galaxy data + self.legacy_galaxy_last_notified: Optional[datetime] = None + self.lastlocation = None # eventData from the last Commander's Flight Log event self.lastship = None # eventData from the last addCommanderShip or setCommanderShip event @@ -210,6 +217,7 @@ def plugin_start3(plugin_dir: str) -> str: def plugin_app(parent: tk.Tk) -> None: """Plugin UI setup Hook.""" + this.parent = parent this.system_link = parent.children['system'] # system label in main window this.station_link = parent.children['station'] # station label in main window this.system_link.bind_all('<>', update_location) @@ -361,6 +369,24 @@ def journal_entry( # noqa: C901, CCR001 :return: str - empty if no error, else error string. """ + if not monitor.is_live_galaxy(): + # This only applies after Update 14, which as of 2022-11-27 is scheduled + # for 2022-11-29, with the game servers presumably being down around + # 09:00 + if datetime.now(timezone.utc) >= datetime.fromisoformat("2022-11-27T09:00:00+00:00"): + # Update 14 ETA has passed, so perform the check + if ( + this.legacy_galaxy_last_notified is None + or (datetime.now(timezone.utc) - this.legacy_galaxy_last_notified) > timedelta(seconds=300) + ): + # LANG: The Inara API only accepts Live galaxy data, not Legacy galaxy data + logger.info(_("Inara only accepts Live galaxy data")) + # this.parent.children['status']['text'] = + this.legacy_galaxy_last_notified = datetime.now(timezone.utc) + return _("Inara only accepts Live galaxy data") + + return '' + should_return, new_entry = killswitch.check_killswitch('plugins.inara.journal', entry, logger) if should_return: plug.show_error(_('Inara disabled. See Log.')) # LANG: INARA support disabled via killswitch From b3eff6ab32b2a75e7b074f4e2c26bc65f87335fd Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 27 Nov 2022 13:02:54 +0000 Subject: [PATCH 84/95] monitor: Catch any failure to coerce `gameversion` Also, log either the error or the success. That will aid us in diagnosing any future issues. --- monitor.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/monitor.py b/monitor.py index f2a68b95..52a53c51 100644 --- a/monitor.py +++ b/monitor.py @@ -1686,7 +1686,20 @@ class EDLogs(FileSystemEventHandler): # type: ignore # See below self.state['GameVersion'] = entry['gameversion'] self.state['GameBuild'] = entry['build'] self.version = self.state['GameVersion'] - self.version_semantic = semantic_version.Version.coerce(self.state['GameVersion']) + + try: + self.version_semantic = semantic_version.Version.coerce(self.state['GameVersion']) + + except Exception: + # Catching all Exceptions as this is *one* call, and we won't + # get caught out by any semantic_version changes. + self.version_semantic = None + logger.error(f"Couldn't coerce {self.state['GameVersion']=}") + pass + + else: + logger.info(f"Parsed {self.state['GameVersion']=} into {self.version_semantic=}") + self.is_beta = any(v in self.version.lower() for v in ('alpha', 'beta')) # type: ignore except KeyError: if not suppress: From 4858ec72005225b9b09fe3710c68f1ddec5d0354 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 27 Nov 2022 16:09:06 +0000 Subject: [PATCH 85/95] Pre-Release 5.6.0-beta1: appversion & changelog --- ChangeLog.md | 72 +++++++++++++++++++++++++++++++++++++++++++++- config/__init__.py | 2 +- 2 files changed, 72 insertions(+), 2 deletions(-) diff --git a/ChangeLog.md b/ChangeLog.md index 4c04a484..1e31f7e8 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -27,9 +27,79 @@ produce the Windows executables and installer. --- -Pre-Release 5.5.1-alpha0 +Pre-Release 5.6.0-beta1 === * We now test against, and package with, Python 3.10.8. +* The code for sending data to EDDN has been reworked. This changes the + 'replay log' from utilising an internal array, backed by a flat file + (`replay.jsonl`), to an sqlite3 database. + + As a result: + 1. Any messages stored in the old `replay.jsonl` are converted at startup, + if that file is present, and then the file removed. + 2. All new messages are stored in this new sqlite3 queue before any attempt + is made to send them. An immediate attempt is then made to send any + message not affected by "Delay sending until docked". + 3. Sending of queued messages will be attempted every 5 minutes, unless + "Delay sending until docked" is active and the Cmdr is not docked in + their own ship. This is in case a message failed to send due to an issue + communicating with the EDDN Gateway. + 4. When you dock in your own ship an immediate attempt to send all queued + messages will be initiated. + 5. When processing queued messages the same 0.4-second inter-message delay + as with the old code has been implemented. This serves to not suddenly + flood the EDDN Gateway. If any message fails to send for Gateway reasons, + i.e. not a bad message, then this processing is abandoned to wait for + the next invocation. + + The 5-minute timer in point 2 differs from the old code, where almost any + new message sending attempt could initiate processing of the queue. At + application startup this delay is only 10 seconds. + + Currently, the feedback of "Sending data to EDDN..." in the UI status line + has been removed. +* As a result of this EDDN rework this application now sends appropriate + `gameversion` and `gamebuild` strings in EDDN message headers. + The rework was necessary in order to enable this, in case of any queued + or delayed messages which did not contain this information in the legacy + `replay.jsonl` format. + + **EDDN Listeners** - *If any message was already delayed such that it did not + have the EDDN header recorded, then the `gameversion` and `gamebuild` will + be empty strings*. In order to indicate this the `softwareName` will have + ` (legacy replay)` appended to it, e.g. `E:D Market Connector Connector + [Windows] (legacy replay)`. In general this indicates that the message was + queued up using a version of EDMC prior to this one. If you're only + interested in Live galaxy data then you might want to ignore such messages. + +Update 14 and the Galaxy Split +--- +Due to the galaxy split [announced by Frontier](https://www.elitedangerous.com/news/elite-dangerous-update-14-and-beyond-live-and-legacy-modes) +there are some changes to the major third-party websites and tools. + +* Inara [has chosen](https://inara.cz/elite/board-thread/7049/463292/#463292) + to only accept Live galaxy data on its API. As such EDMarketConnector will + not even process Journal data for Inara after 2022-11-29T09:00:00+00:00 + unless the `gameversion` indicates a Live client. This explicitly checks + that the game's version is semantically equal to or greater than '4.0.0'. + + If a Live client is *not* detected, then there is an INFO level logging + message "Inara only accepts Live galaxy data", which is also set as the main + UI status line. This message will repeat, at most, every 5 minutes. + + If you continue to play in the Legacy galaxy only then you probably want to + just disable the Inara plugin with the checkbox on Settings > Inara. + +* As this application now sends `gameversion` in all EDDN message headers you + can expect other third-party sites to choose to filter data based on that. + + Look for announcements by individual sites/tools as to what they have chosen + to do. + +Developers +--- +* There is a new flag in `state` passed to plugins, `IsDocked`. See PLUGINS.md + for details. --- diff --git a/config/__init__.py b/config/__init__.py index 70d0603e..982912c1 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -52,7 +52,7 @@ appcmdname = 'EDMC' # # Major.Minor.Patch(-prerelease)(+buildmetadata) # NB: Do *not* import this, use the functions appversion() and appversion_nobuild() -_static_appversion = '5.6.0-alpha0' +_static_appversion = '5.6.0-beta1' _cached_version: Optional[semantic_version.Version] = None copyright = '© 2015-2019 Jonathan Harris, 2020-2022 EDCD' From 81111d72bf362f105217a653f82080e0482a83b4 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 27 Nov 2022 16:37:37 +0000 Subject: [PATCH 86/95] Changelog: Correct 'Developers' header, and clarify EDDN delays * I'd missed 'Plugin ' off 'Plugin Developers'. * Clarification added that EDDN messages will still go through quickly if not delayed by user settings or an EDDN Gateway issue. --- ChangeLog.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/ChangeLog.md b/ChangeLog.md index 1e31f7e8..dda30d95 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -58,6 +58,11 @@ Pre-Release 5.6.0-beta1 Currently, the feedback of "Sending data to EDDN..." in the UI status line has been removed. + + **If you do not have "Delay sending until docked" active, then the only + messages that will be at all delayed will be where there was a communication + problem with the EDDN Gateway, or it otherwise indicated a problem other + than 'your message is bad'.** * As a result of this EDDN rework this application now sends appropriate `gameversion` and `gamebuild` strings in EDDN message headers. The rework was necessary in order to enable this, in case of any queued @@ -96,7 +101,7 @@ there are some changes to the major third-party websites and tools. Look for announcements by individual sites/tools as to what they have chosen to do. -Developers +Plugin Developers --- * There is a new flag in `state` passed to plugins, `IsDocked`. See PLUGINS.md for details. From a944eaf445c58858bfdacc8f262e8b2448fbc46f Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 27 Nov 2022 16:49:45 +0000 Subject: [PATCH 87/95] translations: Add new inara.py string about Live data only --- L10n/en.template | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/L10n/en.template b/L10n/en.template index f6ebd8e1..8bbfad96 100644 --- a/L10n/en.template +++ b/L10n/en.template @@ -46,6 +46,9 @@ /* inara.py: Text for INARA API keys link ( goes to https://inara.cz/settings-api ); In files: inara.py:225; load.py:225; inara.py:234; */ "Inara credentials" = "Inara credentials"; +/* inara.py: The Inara API only accepts Live galaxy data, not Legacy galaxy data; In files: inara.py:383; inara.py:386; */ +"Inara only accepts Live galaxy data" = "Inara only accepts Live galaxy data"; + /* inara.py: INARA API returned some kind of error (error message will be contained in {MSG}); In files: inara.py:1316; inara.py:1328; load.py:1319; load.py:1331; inara.py:1587; inara.py:1600; */ "Error: Inara {MSG}" = "Error: Inara {MSG}"; @@ -744,4 +747,3 @@ /* stats.py: Status dialog title; In files: stats.py:422; */ "Ships" = "Ships"; - From 2ac8026e3ea85fee7246538046884c0710cc751f Mon Sep 17 00:00:00 2001 From: Athanasius Date: Sun, 27 Nov 2022 17:01:28 +0000 Subject: [PATCH 88/95] eddn: Allow for `''` in `standard_header()` `''` is Falsey, but a valid value, so we need an explicit `is not None` test for these. --- plugins/eddn.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 5921e19c..7e8bde96 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -989,12 +989,25 @@ class EDDN: :return: The standard header """ + # We want to pass `''` sometimes, so can't just use a Truthiness test + if game_version is not None: + gv = game_version + + else: + gv = this.game_version + + if game_build is not None: + gb = game_build + + else: + gb = this.game_build + return { 'softwareName': f'{applongname} [{system() if sys.platform != "darwin" else "Mac OS"}]', 'softwareVersion': str(appversion_nobuild()), 'uploaderID': this.cmdr_name, - 'gameversion': game_version or this.game_version, - 'gamebuild': game_build or this.game_build, + 'gameversion': gv, + 'gamebuild': gb, } def export_journal_generic(self, cmdr: str, is_beta: bool, entry: Mapping[str, Any]) -> None: From ad9504160539750de0944df0cb1e9cc395b09a86 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Mon, 28 Nov 2022 10:51:25 +0000 Subject: [PATCH 89/95] edsm: Send gameversion/build in all messages * Record the 'state' version of these in `this`. * Use those when constructing the message. * NB: Need to check if messages can be retained in the queue across client changes. Coming up .... --- plugins/edsm.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/plugins/edsm.py b/plugins/edsm.py index 8bd5fd30..2c826f58 100644 --- a/plugins/edsm.py +++ b/plugins/edsm.py @@ -71,6 +71,9 @@ class This: def __init__(self): self.shutting_down = False # Plugin is shutting down. + self.game_version = "" + self.game_build = "" + self.session: requests.Session = requests.Session() self.session.headers['User-Agent'] = user_agent self.queue: Queue = Queue() # Items to be sent to EDSM by worker thread @@ -432,6 +435,9 @@ def journal_entry( # noqa: C901, CCR001 if should_return: return + this.game_version = state['GameVersion'] + this.game_build = state['GameBuild'] + entry = new_entry this.on_foot = state['OnFoot'] @@ -726,6 +732,8 @@ def worker() -> None: # noqa: CCR001 C901 # Cant be broken up currently 'apiKey': apikey, 'fromSoftware': applongname, 'fromSoftwareVersion': str(appversion()), + 'fromGameVersion': this.game_version, + 'fromGameBuild': this.game_build, 'message': json.dumps(pending, ensure_ascii=False).encode('utf-8'), } From 5743fd38034a224a5f35f5687264460c62adc65f Mon Sep 17 00:00:00 2001 From: Athanasius Date: Mon, 28 Nov 2022 11:04:51 +0000 Subject: [PATCH 90/95] edsm: Push gameversion/build into the queue to ensure correctness 1. Due to the _TIMEOUT on the actual `post()` of a message it would be possible for new entries to get queued in the meantime. These queued entries could be 'in session' and end up going through pending and thus sent before one of the 'new session' events is detected so as to clear pending. The `this.gameversion/build` could have changed in the meantime, so are no longer correct if game client changed. 2. So, pass in the current gameversion/build when a message is pushed into the queue, and parse those back out when they're pulled out of the queue. 3. Use those versions in the message, not `this.` versions. --- plugins/edsm.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/plugins/edsm.py b/plugins/edsm.py index 2c826f58..bf82b815 100644 --- a/plugins/edsm.py +++ b/plugins/edsm.py @@ -552,7 +552,7 @@ entry: {entry!r}''' materials.update(transient) logger.trace_if(CMDR_EVENTS, f'"LoadGame" event, queueing Materials: {cmdr=}') - this.queue.put((cmdr, materials)) + this.queue.put((cmdr, this.game_version, this.game_build, materials)) if entry['event'] in ('CarrierJump', 'FSDJump', 'Location', 'Docked'): logger.trace_if( @@ -561,7 +561,7 @@ Queueing: {entry!r}''' ) logger.trace_if(CMDR_EVENTS, f'"{entry["event"]=}" event, queueing: {cmdr=}') - this.queue.put((cmdr, entry)) + this.queue.put((cmdr, this.game_version, this.game_build, entry)) # Update system data @@ -663,10 +663,10 @@ def worker() -> None: # noqa: CCR001 C901 # Cant be broken up currently logger.debug(f'{this.shutting_down=}, so setting closing = True') closing = True - item: Optional[Tuple[str, Mapping[str, Any]]] = this.queue.get() + item: Optional[Tuple[str, str, str, Mapping[str, Any]]] = this.queue.get() if item: - (cmdr, entry) = item - logger.trace_if(CMDR_EVENTS, f'De-queued ({cmdr=}, {entry["event"]=})') + (cmdr, game_version, game_build, entry) = item + logger.trace_if(CMDR_EVENTS, f'De-queued ({cmdr=}, {game_version=}, {game_build=}, {entry["event"]=})') else: logger.debug('Empty queue message, setting closing = True') @@ -732,8 +732,8 @@ def worker() -> None: # noqa: CCR001 C901 # Cant be broken up currently 'apiKey': apikey, 'fromSoftware': applongname, 'fromSoftwareVersion': str(appversion()), - 'fromGameVersion': this.game_version, - 'fromGameBuild': this.game_build, + 'fromGameVersion': game_version, + 'fromGameBuild': game_build, 'message': json.dumps(pending, ensure_ascii=False).encode('utf-8'), } @@ -815,7 +815,7 @@ def worker() -> None: # noqa: CCR001 C901 # Cant be broken up currently plug.show_error(_("Error: Can't connect to EDSM")) if entry['event'].lower() in ('shutdown', 'commander', 'fileheader'): - # Game shutdown or new login so we MUST not hang on to pending + # Game shutdown or new login, so we MUST not hang on to pending pending = [] logger.trace_if(CMDR_EVENTS, f'Blanked pending because of event: {entry["event"]}') From 1ec1253b486937cd0cf2f430c687f1b010ecde53 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Mon, 28 Nov 2022 11:08:48 +0000 Subject: [PATCH 91/95] appversion: Change to 5.6.0-alpha2 to be distinct * This is alpha, not beta. * We have an -alpha0 and a -beta1 already, so use -alpha2 so even that digit is distinct. --- config/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/__init__.py b/config/__init__.py index 70d0603e..bd8c1418 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -52,7 +52,7 @@ appcmdname = 'EDMC' # # Major.Minor.Patch(-prerelease)(+buildmetadata) # NB: Do *not* import this, use the functions appversion() and appversion_nobuild() -_static_appversion = '5.6.0-alpha0' +_static_appversion = '5.6.0-alpha2' _cached_version: Optional[semantic_version.Version] = None copyright = '© 2015-2019 Jonathan Harris, 2020-2022 EDCD' From a581d889fece98b053a7ec076db6cade490fc9bb Mon Sep 17 00:00:00 2001 From: Athanasius Date: Mon, 28 Nov 2022 12:18:10 +0000 Subject: [PATCH 92/95] edsm: Add a paranoia check for changed gameversion * In theory we would always see `Fileheader` and clear `pending[]`, but let's be extra paranoid and also clear it if there's a gameversion/build difference between the prior event and the current one. --- plugins/edsm.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/plugins/edsm.py b/plugins/edsm.py index bf82b815..8ddb7661 100644 --- a/plugins/edsm.py +++ b/plugins/edsm.py @@ -644,6 +644,8 @@ def worker() -> None: # noqa: CCR001 C901 # Cant be broken up currently pending: List[Mapping[str, Any]] = [] # Unsent events closing = False cmdr: str = "" + last_game_version = "" + last_game_build = "" entry: Mapping[str, Any] = {} while not this.discarded_events: @@ -692,6 +694,20 @@ def worker() -> None: # noqa: CCR001 C901 # Cant be broken up currently logger.trace_if( CMDR_EVENTS, f'({cmdr=}, {entry["event"]=}): not in discarded_events, appending to pending') + # Discard the pending list if it's a new Journal file OR + # if the gameversion has changed. We claim a single + # gameversion for an entire batch of events so can't mix + # them. + # The specific gameversion check caters for scenarios where + # we took some time in the last POST, had new events queued + # in the meantime *and* the game client crashed *and* was + # changed to a different gameversion. + if ( + entry['event'].lower() == 'fileheader' + or last_game_version != game_version or last_game_build != game_build + ): + pending = [] + pending.append(entry) # drop events if required by killswitch @@ -823,6 +839,9 @@ def worker() -> None: # noqa: CCR001 C901 # Cant be broken up currently logger.debug('closing, so returning.') return + last_game_version = game_version + last_game_build = game_build + logger.debug('Done.') From 3f0aefd671746c68ed6c1d2c22d78bcceaa30b42 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Mon, 28 Nov 2022 15:38:34 +0000 Subject: [PATCH 93/95] eddn: Fix spelling mistake in a log message --- plugins/eddn.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/eddn.py b/plugins/eddn.py index 7e8bde96..5af9b6e6 100644 --- a/plugins/eddn.py +++ b/plugins/eddn.py @@ -255,7 +255,7 @@ class EDDNSender: finally: # Best effort at removing the file/contents # NB: The legacy code assumed it could write to the file. - logger.info("Converson` to `eddn_queue-v1.db` complete, removing `replay.jsonl`") + logger.info("Conversion` to `eddn_queue-v1.db` complete, removing `replay.jsonl`") replay_file = open(filename, 'w') # Will truncate replay_file.close() os.unlink(filename) From 1e5d09eff44cbf55cba5e189b640f89df9862b51 Mon Sep 17 00:00:00 2001 From: Athanasius Date: Mon, 28 Nov 2022 15:39:03 +0000 Subject: [PATCH 94/95] Release 5.6.0: appversion and changelog --- ChangeLog.md | 81 +++++++++++++++++++++++++++++++++++++--------- config/__init__.py | 2 +- 2 files changed, 66 insertions(+), 17 deletions(-) diff --git a/ChangeLog.md b/ChangeLog.md index dda30d95..283a09be 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -27,8 +27,17 @@ produce the Windows executables and installer. --- -Pre-Release 5.6.0-beta1 +Release 5.6.0 === + +Tha major reason for this release is to address the Live versus Legacy galaxy +split [coming in Update 14 of the game](https://www.elitedangerous.com/news/elite-dangerous-update-14-and-beyond-live-and-legacy-modes). +See the section "Update 14 and the Galaxy Split" below for how this might +impact you. + +Changes +--- + * We now test against, and package with, Python 3.10.8. * The code for sending data to EDDN has been reworked. This changes the 'replay log' from utilising an internal array, backed by a flat file @@ -52,7 +61,7 @@ Pre-Release 5.6.0-beta1 i.e. not a bad message, then this processing is abandoned to wait for the next invocation. - The 5-minute timer in point 2 differs from the old code, where almost any + The 5-minute timer in point 3 differs from the old code, where almost any new message sending attempt could initiate processing of the queue. At application startup this delay is only 10 seconds. @@ -68,14 +77,14 @@ Pre-Release 5.6.0-beta1 The rework was necessary in order to enable this, in case of any queued or delayed messages which did not contain this information in the legacy `replay.jsonl` format. - - **EDDN Listeners** - *If any message was already delayed such that it did not - have the EDDN header recorded, then the `gameversion` and `gamebuild` will - be empty strings*. In order to indicate this the `softwareName` will have - ` (legacy replay)` appended to it, e.g. `E:D Market Connector Connector - [Windows] (legacy replay)`. In general this indicates that the message was - queued up using a version of EDMC prior to this one. If you're only - interested in Live galaxy data then you might want to ignore such messages. +* For EDSM there is a very unlikely set of circumstances that could, in theory + lead to some events not being sent. This is so as to safeguard against + sending a batch with a gameversion/build claimed that does not match for + *all* of the events in that batch. + + It would take a combination of "communications with EDSM are slow", more + events (the ones that would be lost), a game client crash, *and* starting + a new game client before the 'more events' are sent. Update 14 and the Galaxy Split --- @@ -83,10 +92,12 @@ Due to the galaxy split [announced by Frontier](https://www.elitedangerous.com/n there are some changes to the major third-party websites and tools. * Inara [has chosen](https://inara.cz/elite/board-thread/7049/463292/#463292) - to only accept Live galaxy data on its API. As such EDMarketConnector will - not even process Journal data for Inara after 2022-11-29T09:00:00+00:00 - unless the `gameversion` indicates a Live client. This explicitly checks - that the game's version is semantically equal to or greater than '4.0.0'. + to only accept Live galaxy data on its API. + + This application will not even process Journal data for Inara after + 2022-11-29T09:00:00+00:00 *unless the `gameversion` indicates a Live client*. + This explicitly checks that the game's version is semantically equal to or + greater than '4.0.0'. If a Live client is *not* detected, then there is an INFO level logging message "Inara only accepts Live galaxy data", which is also set as the main @@ -94,13 +105,51 @@ there are some changes to the major third-party websites and tools. If you continue to play in the Legacy galaxy only then you probably want to just disable the Inara plugin with the checkbox on Settings > Inara. +* All batches of events sent to EDSM will be tagged with a `gameversion`, in + a similar manner to the EDDN header. -* As this application now sends `gameversion` in all EDDN message headers you - can expect other third-party sites to choose to filter data based on that. + Ref: [EDSM api-journal-v1](https://www.edsm.net/en/api-journal-v1) +* All EDDN messages will now have appropriate `gameversion` and `gamebuild` + fields in the `header` as per + [EDDN/docs/Developers.md](https://github.com/EDCD/EDDN/blob/live/docs/Developers.md#gameversions-and-gamebuild). + + As a result of this you can expect third-party sites to choose to filter data + based on that. Look for announcements by individual sites/tools as to what they have chosen to do. +Known Bugs +--- +In testing if it had been broken at all due to 5.5.0 -> 5.6.0 changes it has +come to light that `EDMC.EXE -n`, to send data to EDDN, was already broken in +5.5.0. + +In addition, there is now some extra 'INFO' logging output which will be +produced by any invocation of `EDMC.EXE`. This might break third-party use of +it, e.g. [Trade Computer Extension Mk.II](https://forums.frontier.co.uk/threads/trade-computer-extension-mk-ii.223056/). +This will be fixed as soon as the dust settles from Update 14, with emphasis +being on ensuring the GUI `EDMarketConnector.exe` functions properly. + +Notes for EDDN Listeners +--- +* Where EDMC sourced data from the Journal files it will set `gameversion` + and `gamebuild` as per their values in `Fileheader` or `LoadGame`, whichever + was more recent (there are some events that occur between these). +* *If any message was already delayed such that it did not + have the EDDN header recorded, then the `gameversion` and `gamebuild` will + be empty strings*. In order to indicate this the `softwareName` will have + ` (legacy replay)` appended to it, e.g. `E:D Market Connector Connector + [Windows] (legacy replay)`. In general this indicates that the message was + queued up using a version of EDMC prior to this one. If you're only + interested in Live galaxy data then you might want to ignore such messages. +* Where EDMC sourced data from a CAPI endpoint, the resulting EDDN message + will have a `gameversion` of `CAPI-` set, e.g. `CAPI-market`. + **At this time it is not 100% certain which galaxy this data will be for, so + all listeners are advised to ignore/queue such data until this is clarified**. + + `gamebuild` will be an empty string for all CAPI-sourced data. + Plugin Developers --- * There is a new flag in `state` passed to plugins, `IsDocked`. See PLUGINS.md diff --git a/config/__init__.py b/config/__init__.py index e3fa1312..100ec7e7 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -52,7 +52,7 @@ appcmdname = 'EDMC' # # Major.Minor.Patch(-prerelease)(+buildmetadata) # NB: Do *not* import this, use the functions appversion() and appversion_nobuild() -_static_appversion = '5.6.0-beta2' +_static_appversion = '5.6.0' _cached_version: Optional[semantic_version.Version] = None copyright = '© 2015-2019 Jonathan Harris, 2020-2022 EDCD' From a9ccb7d56597c9f33d633e623a31eaff1a1e264e Mon Sep 17 00:00:00 2001 From: Athanasius Date: Mon, 28 Nov 2022 15:45:06 +0000 Subject: [PATCH 95/95] translations: Updated from onesky * The inara.py "Inara only accepts Live galaxy data" added. --- L10n/ja.strings | 3 +++ L10n/pt-BR.strings | 3 +++ L10n/pt-PT.strings | 3 +++ L10n/ru.strings | 3 +++ L10n/sr-Latn-BA.strings | 3 +++ L10n/sr-Latn.strings | 3 +++ 6 files changed, 18 insertions(+) diff --git a/L10n/ja.strings b/L10n/ja.strings index 8656cfbd..531e9c6d 100644 --- a/L10n/ja.strings +++ b/L10n/ja.strings @@ -1,3 +1,6 @@ +/* inara.py: The Inara API only accepts Live galaxy data, not Legacy galaxy data; In files: inara.py:383; inara.py:386; */ +"Inara only accepts Live galaxy data" = "Inaraは現行の銀河データのみ受け付けます"; + /* Language name */ "!Language" = "日本語"; diff --git a/L10n/pt-BR.strings b/L10n/pt-BR.strings index 07c0e669..93e34f0c 100644 --- a/L10n/pt-BR.strings +++ b/L10n/pt-BR.strings @@ -1,3 +1,6 @@ +/* inara.py: The Inara API only accepts Live galaxy data, not Legacy galaxy data; In files: inara.py:383; inara.py:386; */ +"Inara only accepts Live galaxy data" = "Inara apenas aceita dados da versão Live."; + /* Language name */ "!Language" = "Português (Brasil)"; diff --git a/L10n/pt-PT.strings b/L10n/pt-PT.strings index 0ee7810f..20821465 100644 --- a/L10n/pt-PT.strings +++ b/L10n/pt-PT.strings @@ -1,3 +1,6 @@ +/* inara.py: The Inara API only accepts Live galaxy data, not Legacy galaxy data; In files: inara.py:383; inara.py:386; */ +"Inara only accepts Live galaxy data" = "A API Inara só aceita dados da versão Live"; + /* Language name */ "!Language" = "Português (Portugal)"; diff --git a/L10n/ru.strings b/L10n/ru.strings index 979f0367..582fdd21 100644 --- a/L10n/ru.strings +++ b/L10n/ru.strings @@ -1,3 +1,6 @@ +/* inara.py: The Inara API only accepts Live galaxy data, not Legacy galaxy data; In files: inara.py:383; inara.py:386; */ +"Inara only accepts Live galaxy data" = "Inara принимает только данные Live-версии"; + /* Language name */ "!Language" = "Русский"; diff --git a/L10n/sr-Latn-BA.strings b/L10n/sr-Latn-BA.strings index 9f94d6c3..bdc47cd8 100644 --- a/L10n/sr-Latn-BA.strings +++ b/L10n/sr-Latn-BA.strings @@ -1,3 +1,6 @@ +/* inara.py: The Inara API only accepts Live galaxy data, not Legacy galaxy data; In files: inara.py:383; inara.py:386; */ +"Inara only accepts Live galaxy data" = "Inara prihvata samo Live galaxy podatke"; + /* Language name */ "!Language" = "Srpski (Latinica, Bosna i Hercegovina)"; diff --git a/L10n/sr-Latn.strings b/L10n/sr-Latn.strings index 189f5192..ed3cee47 100644 --- a/L10n/sr-Latn.strings +++ b/L10n/sr-Latn.strings @@ -1,3 +1,6 @@ +/* inara.py: The Inara API only accepts Live galaxy data, not Legacy galaxy data; In files: inara.py:383; inara.py:386; */ +"Inara only accepts Live galaxy data" = "Inara prihvata samo \"žive\" podatke o galaksiji"; + /* Language name */ "!Language" = "Srpski (Latinica)";