mirror of
https://github.com/EDCD/EDMarketConnector.git
synced 2025-04-15 08:40:34 +03:00
Merge pull request #2260 from HullSeals/enhancement/2114/pathlib-handover
[2114] Pathlib Handover
This commit is contained in:
commit
2adb440762
11
EDMC.py
11
EDMC.py
@ -14,6 +14,7 @@ import locale
|
|||||||
import os
|
import os
|
||||||
import queue
|
import queue
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
from time import sleep, time
|
from time import sleep, time
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
@ -212,22 +213,24 @@ def main(): # noqa: C901, CCR001
|
|||||||
# system, chances are its the current locale, and not utf-8. Otherwise if it was copied, its probably
|
# system, chances are its the current locale, and not utf-8. Otherwise if it was copied, its probably
|
||||||
# utf8. Either way, try the system FIRST because reading something like cp1251 in UTF-8 results in garbage
|
# utf8. Either way, try the system FIRST because reading something like cp1251 in UTF-8 results in garbage
|
||||||
# but the reverse results in an exception.
|
# but the reverse results in an exception.
|
||||||
json_file = os.path.abspath(args.j)
|
json_file = Path(args.j).resolve()
|
||||||
try:
|
try:
|
||||||
with open(json_file) as file_handle:
|
with open(json_file) as file_handle:
|
||||||
data = json.load(file_handle)
|
data = json.load(file_handle)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
with open(json_file, encoding='utf-8') as file_handle:
|
with open(json_file, encoding='utf-8') as file_handle:
|
||||||
data = json.load(file_handle)
|
data = json.load(file_handle)
|
||||||
config.set('querytime', int(os.path.getmtime(args.j)))
|
file_path = Path(args.j)
|
||||||
|
modification_time = file_path.stat().st_mtime
|
||||||
|
config.set('querytime', int(modification_time))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Get state from latest Journal file
|
# Get state from latest Journal file
|
||||||
logger.debug('Getting state from latest journal file')
|
logger.debug('Getting state from latest journal file')
|
||||||
try:
|
try:
|
||||||
monitor.currentdir = config.get_str('journaldir', default=config.default_journal_dir)
|
monitor.currentdir = Path(config.get_str('journaldir', default=config.default_journal_dir))
|
||||||
if not monitor.currentdir:
|
if not monitor.currentdir:
|
||||||
monitor.currentdir = config.default_journal_dir
|
monitor.currentdir = config.default_journal_dir_path
|
||||||
|
|
||||||
logger.debug(f'logdir = "{monitor.currentdir}"')
|
logger.debug(f'logdir = "{monitor.currentdir}"')
|
||||||
logfile = monitor.journal_newest_filename(monitor.currentdir)
|
logfile = monitor.journal_newest_filename(monitor.currentdir)
|
||||||
|
@ -26,12 +26,13 @@ To utilise logging in core code, or internal plugins, include this:
|
|||||||
|
|
||||||
To utilise logging in a 'found' (third-party) plugin, include this:
|
To utilise logging in a 'found' (third-party) plugin, include this:
|
||||||
|
|
||||||
import os
|
from pathlib import Path
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
plugin_name = os.path.basename(os.path.dirname(__file__))
|
# Retrieve the name of the plugin folder
|
||||||
|
plugin_name = Path(__file__).resolve().parent.name
|
||||||
|
# Set up logger with hierarchical name including appname and plugin_name
|
||||||
# plugin_name here *must* be the name of the folder the plugin resides in
|
# plugin_name here *must* be the name of the folder the plugin resides in
|
||||||
# See, plug.py:load_plugins()
|
|
||||||
logger = logging.getLogger(f'{appname}.{plugin_name}')
|
logger = logging.getLogger(f'{appname}.{plugin_name}')
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
@ -485,8 +486,8 @@ class EDMCContextFilter(logging.Filter):
|
|||||||
:return: The munged module_name.
|
:return: The munged module_name.
|
||||||
"""
|
"""
|
||||||
file_name = pathlib.Path(frame_info.filename).expanduser()
|
file_name = pathlib.Path(frame_info.filename).expanduser()
|
||||||
plugin_dir = pathlib.Path(config.plugin_dir_path).expanduser()
|
plugin_dir = config.plugin_dir_path.expanduser()
|
||||||
internal_plugin_dir = pathlib.Path(config.internal_plugin_dir_path).expanduser()
|
internal_plugin_dir = config.internal_plugin_dir_path.expanduser()
|
||||||
# Find the first parent called 'plugins'
|
# Find the first parent called 'plugins'
|
||||||
plugin_top = file_name
|
plugin_top = file_name
|
||||||
while plugin_top and plugin_top.name != '':
|
while plugin_top and plugin_top.name != '':
|
||||||
|
@ -11,7 +11,7 @@ import locale
|
|||||||
import webbrowser
|
import webbrowser
|
||||||
import platform
|
import platform
|
||||||
import sys
|
import sys
|
||||||
from os import chdir, environ, path
|
from os import chdir, environ
|
||||||
import pathlib
|
import pathlib
|
||||||
import logging
|
import logging
|
||||||
from journal_lock import JournalLock
|
from journal_lock import JournalLock
|
||||||
@ -19,10 +19,10 @@ from journal_lock import JournalLock
|
|||||||
if getattr(sys, "frozen", False):
|
if getattr(sys, "frozen", False):
|
||||||
# Under py2exe sys.path[0] is the executable name
|
# Under py2exe sys.path[0] is the executable name
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
chdir(path.dirname(sys.path[0]))
|
chdir(pathlib.Path(sys.path[0]).parent)
|
||||||
# Allow executable to be invoked from any cwd
|
# Allow executable to be invoked from any cwd
|
||||||
environ["TCL_LIBRARY"] = path.join(path.dirname(sys.path[0]), "lib", "tcl")
|
environ['TCL_LIBRARY'] = str(pathlib.Path(sys.path[0]).parent / 'lib' / 'tcl')
|
||||||
environ["TK_LIBRARY"] = path.join(path.dirname(sys.path[0]), "lib", "tk")
|
environ['TK_LIBRARY'] = str(pathlib.Path(sys.path[0]).parent / 'lib' / 'tk')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# We still want to *try* to have CWD be where the main script is, even if
|
# We still want to *try* to have CWD be where the main script is, even if
|
||||||
@ -44,11 +44,12 @@ def get_sys_report(config: config.AbstractConfig) -> str:
|
|||||||
plt = platform.uname()
|
plt = platform.uname()
|
||||||
locale.setlocale(locale.LC_ALL, "")
|
locale.setlocale(locale.LC_ALL, "")
|
||||||
lcl = locale.getlocale()
|
lcl = locale.getlocale()
|
||||||
monitor.currentdir = config.get_str(
|
monitor.currentdir = pathlib.Path(config.get_str(
|
||||||
"journaldir", default=config.default_journal_dir
|
"journaldir", default=config.default_journal_dir
|
||||||
|
)
|
||||||
)
|
)
|
||||||
if not monitor.currentdir:
|
if not monitor.currentdir:
|
||||||
monitor.currentdir = config.default_journal_dir
|
monitor.currentdir = config.default_journal_dir_path
|
||||||
try:
|
try:
|
||||||
logfile = monitor.journal_newest_filename(monitor.currentdir)
|
logfile = monitor.journal_newest_filename(monitor.currentdir)
|
||||||
if logfile is None:
|
if logfile is None:
|
||||||
@ -115,12 +116,12 @@ def main() -> None:
|
|||||||
root.withdraw() # Hide the window initially to calculate the dimensions
|
root.withdraw() # Hide the window initially to calculate the dimensions
|
||||||
try:
|
try:
|
||||||
icon_image = tk.PhotoImage(
|
icon_image = tk.PhotoImage(
|
||||||
file=path.join(cur_config.respath_path, "io.edcd.EDMarketConnector.png")
|
file=cur_config.respath_path / "io.edcd.EDMarketConnector.png"
|
||||||
)
|
)
|
||||||
|
|
||||||
root.iconphoto(True, icon_image)
|
root.iconphoto(True, icon_image)
|
||||||
except tk.TclError:
|
except tk.TclError:
|
||||||
root.iconbitmap(path.join(cur_config.respath_path, "EDMarketConnector.ico"))
|
root.iconbitmap(cur_config.respath_path / "EDMarketConnector.ico")
|
||||||
|
|
||||||
sys_report = get_sys_report(cur_config)
|
sys_report = get_sys_report(cur_config)
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
import webbrowser
|
import webbrowser
|
||||||
from os import chdir, environ, path
|
from os import chdir, environ
|
||||||
from time import localtime, strftime, time
|
from time import localtime, strftime, time
|
||||||
from typing import TYPE_CHECKING, Any, Literal
|
from typing import TYPE_CHECKING, Any, Literal
|
||||||
from constants import applongname, appname, protocolhandler_redirect
|
from constants import applongname, appname, protocolhandler_redirect
|
||||||
@ -31,10 +31,10 @@ from constants import applongname, appname, protocolhandler_redirect
|
|||||||
if getattr(sys, 'frozen', False):
|
if getattr(sys, 'frozen', False):
|
||||||
# Under py2exe sys.path[0] is the executable name
|
# Under py2exe sys.path[0] is the executable name
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
chdir(path.dirname(sys.path[0]))
|
os.chdir(pathlib.Path(sys.path[0]).parent)
|
||||||
# Allow executable to be invoked from any cwd
|
# Allow executable to be invoked from any cwd
|
||||||
environ['TCL_LIBRARY'] = path.join(path.dirname(sys.path[0]), 'lib', 'tcl')
|
environ['TCL_LIBRARY'] = str(pathlib.Path(sys.path[0]).parent / 'lib' / 'tcl')
|
||||||
environ['TK_LIBRARY'] = path.join(path.dirname(sys.path[0]), 'lib', 'tk')
|
environ['TK_LIBRARY'] = str(pathlib.Path(sys.path[0]).parent / 'lib' / 'tk')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# We still want to *try* to have CWD be where the main script is, even if
|
# We still want to *try* to have CWD be where the main script is, even if
|
||||||
@ -470,8 +470,8 @@ class AppWindow:
|
|||||||
self.w.wm_iconbitmap(default='EDMarketConnector.ico')
|
self.w.wm_iconbitmap(default='EDMarketConnector.ico')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.w.tk.call('wm', 'iconphoto', self.w, '-default',
|
image_path = config.respath_path / 'io.edcd.EDMarketConnector.png'
|
||||||
tk.PhotoImage(file=path.join(config.respath_path, 'io.edcd.EDMarketConnector.png')))
|
self.w.tk.call('wm', 'iconphoto', self.w, '-default', image=tk.PhotoImage(file=image_path))
|
||||||
|
|
||||||
# TODO: Export to files and merge from them in future ?
|
# TODO: Export to files and merge from them in future ?
|
||||||
self.theme_icon = tk.PhotoImage(
|
self.theme_icon = tk.PhotoImage(
|
||||||
@ -1652,7 +1652,7 @@ class AppWindow:
|
|||||||
# Avoid file length limits if possible
|
# Avoid file length limits if possible
|
||||||
provider = config.get_str('shipyard_provider', default='EDSY')
|
provider = config.get_str('shipyard_provider', default='EDSY')
|
||||||
target = plug.invoke(provider, 'EDSY', 'shipyard_url', loadout, monitor.is_beta)
|
target = plug.invoke(provider, 'EDSY', 'shipyard_url', loadout, monitor.is_beta)
|
||||||
file_name = path.join(config.app_dir_path, "last_shipyard.html")
|
file_name = config.app_dir_path / "last_shipyard.html"
|
||||||
|
|
||||||
with open(file_name, 'w') as f:
|
with open(file_name, 'w') as f:
|
||||||
f.write(SHIPYARD_HTML_TEMPLATE.format(
|
f.write(SHIPYARD_HTML_TEMPLATE.format(
|
||||||
|
19
build.py
19
build.py
@ -10,7 +10,6 @@ import shutil
|
|||||||
import sys
|
import sys
|
||||||
import pathlib
|
import pathlib
|
||||||
from string import Template
|
from string import Template
|
||||||
from os.path import join, isdir
|
|
||||||
import py2exe
|
import py2exe
|
||||||
from config import (
|
from config import (
|
||||||
appcmdname,
|
appcmdname,
|
||||||
@ -37,7 +36,7 @@ def iss_build(template_path: str, output_file: str) -> None:
|
|||||||
new_file.write(newfile)
|
new_file.write(newfile)
|
||||||
|
|
||||||
|
|
||||||
def system_check(dist_dir: str) -> str:
|
def system_check(dist_dir: pathlib.Path) -> str:
|
||||||
"""Check if the system is able to build."""
|
"""Check if the system is able to build."""
|
||||||
if sys.version_info < (3, 11):
|
if sys.version_info < (3, 11):
|
||||||
sys.exit(f"Unexpected Python version {sys.version}")
|
sys.exit(f"Unexpected Python version {sys.version}")
|
||||||
@ -55,17 +54,17 @@ def system_check(dist_dir: str) -> str:
|
|||||||
|
|
||||||
print(f"Git short hash: {git_shorthash}")
|
print(f"Git short hash: {git_shorthash}")
|
||||||
|
|
||||||
if dist_dir and len(dist_dir) > 1 and isdir(dist_dir):
|
if dist_dir and pathlib.Path.is_dir(dist_dir):
|
||||||
shutil.rmtree(dist_dir)
|
shutil.rmtree(dist_dir)
|
||||||
return gitversion_file
|
return gitversion_file
|
||||||
|
|
||||||
|
|
||||||
def generate_data_files(
|
def generate_data_files(
|
||||||
app_name: str, gitversion_file: str, plugins: list[str]
|
app_name: str, gitversion_file: str, plugins: list[str]
|
||||||
) -> list[tuple[str, list[str]]]:
|
) -> list[tuple[object, object]]:
|
||||||
"""Create the required datafiles to build."""
|
"""Create the required datafiles to build."""
|
||||||
l10n_dir = "L10n"
|
l10n_dir = "L10n"
|
||||||
fdevids_dir = "FDevIDs"
|
fdevids_dir = pathlib.Path("FDevIDs")
|
||||||
data_files = [
|
data_files = [
|
||||||
(
|
(
|
||||||
"",
|
"",
|
||||||
@ -88,13 +87,13 @@ def generate_data_files(
|
|||||||
),
|
),
|
||||||
(
|
(
|
||||||
l10n_dir,
|
l10n_dir,
|
||||||
[join(l10n_dir, x) for x in os.listdir(l10n_dir) if x.endswith(".strings")],
|
[pathlib.Path(l10n_dir) / x for x in os.listdir(l10n_dir) if x.endswith(".strings")]
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
fdevids_dir,
|
fdevids_dir,
|
||||||
[
|
[
|
||||||
join(fdevids_dir, "commodity.csv"),
|
pathlib.Path(fdevids_dir / "commodity.csv"),
|
||||||
join(fdevids_dir, "rare_commodity.csv"),
|
pathlib.Path(fdevids_dir / "rare_commodity.csv"),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
("plugins", plugins),
|
("plugins", plugins),
|
||||||
@ -104,7 +103,7 @@ def generate_data_files(
|
|||||||
|
|
||||||
def build() -> None:
|
def build() -> None:
|
||||||
"""Build EDMarketConnector using Py2Exe."""
|
"""Build EDMarketConnector using Py2Exe."""
|
||||||
dist_dir: str = "dist.win32"
|
dist_dir: pathlib.Path = pathlib.Path("dist.win32")
|
||||||
gitversion_filename: str = system_check(dist_dir)
|
gitversion_filename: str = system_check(dist_dir)
|
||||||
|
|
||||||
# Constants
|
# Constants
|
||||||
@ -142,7 +141,7 @@ def build() -> None:
|
|||||||
}
|
}
|
||||||
|
|
||||||
# Function to generate DATA_FILES list
|
# Function to generate DATA_FILES list
|
||||||
data_files: list[tuple[str, list[str]]] = generate_data_files(
|
data_files: list[tuple[object, object]] = generate_data_files(
|
||||||
appname, gitversion_filename, plugins
|
appname, gitversion_filename, plugins
|
||||||
)
|
)
|
||||||
|
|
||||||
|
17
collate.py
17
collate.py
@ -17,7 +17,6 @@ import json
|
|||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import sys
|
import sys
|
||||||
from os.path import isfile
|
|
||||||
from traceback import print_exc
|
from traceback import print_exc
|
||||||
|
|
||||||
import companion
|
import companion
|
||||||
@ -35,7 +34,7 @@ def __make_backup(file_name: pathlib.Path, suffix: str = '.bak') -> None:
|
|||||||
"""
|
"""
|
||||||
backup_name = file_name.parent / (file_name.name + suffix)
|
backup_name = file_name.parent / (file_name.name + suffix)
|
||||||
|
|
||||||
if isfile(backup_name):
|
if pathlib.Path.is_file(backup_name):
|
||||||
os.unlink(backup_name)
|
os.unlink(backup_name)
|
||||||
|
|
||||||
os.rename(file_name, backup_name)
|
os.rename(file_name, backup_name)
|
||||||
@ -52,13 +51,13 @@ def addcommodities(data) -> None: # noqa: CCR001
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
commodityfile = pathlib.Path(config.app_dir_path / 'FDevIDs' / 'commodity.csv')
|
commodityfile = config.app_dir_path / 'FDevIDs' / 'commodity.csv'
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
commodityfile = pathlib.Path('FDevIDs/commodity.csv')
|
commodityfile = pathlib.Path('FDevIDs/commodity.csv')
|
||||||
commodities = {}
|
commodities = {}
|
||||||
|
|
||||||
# slurp existing
|
# slurp existing
|
||||||
if isfile(commodityfile):
|
if pathlib.Path.is_file(commodityfile):
|
||||||
with open(commodityfile) as csvfile:
|
with open(commodityfile) as csvfile:
|
||||||
reader = csv.DictReader(csvfile)
|
reader = csv.DictReader(csvfile)
|
||||||
for row in reader:
|
for row in reader:
|
||||||
@ -86,7 +85,7 @@ def addcommodities(data) -> None: # noqa: CCR001
|
|||||||
if len(commodities) <= size_pre:
|
if len(commodities) <= size_pre:
|
||||||
return
|
return
|
||||||
|
|
||||||
if isfile(commodityfile):
|
if pathlib.Path.is_file(commodityfile):
|
||||||
__make_backup(commodityfile)
|
__make_backup(commodityfile)
|
||||||
|
|
||||||
with open(commodityfile, 'w', newline='\n') as csvfile:
|
with open(commodityfile, 'w', newline='\n') as csvfile:
|
||||||
@ -109,7 +108,7 @@ def addmodules(data): # noqa: C901, CCR001
|
|||||||
fields = ('id', 'symbol', 'category', 'name', 'mount', 'guidance', 'ship', 'class', 'rating', 'entitlement')
|
fields = ('id', 'symbol', 'category', 'name', 'mount', 'guidance', 'ship', 'class', 'rating', 'entitlement')
|
||||||
|
|
||||||
# slurp existing
|
# slurp existing
|
||||||
if isfile(outfile):
|
if pathlib.Path.is_file(outfile):
|
||||||
with open(outfile) as csvfile:
|
with open(outfile) as csvfile:
|
||||||
reader = csv.DictReader(csvfile, restval='')
|
reader = csv.DictReader(csvfile, restval='')
|
||||||
for row in reader:
|
for row in reader:
|
||||||
@ -147,7 +146,7 @@ def addmodules(data): # noqa: C901, CCR001
|
|||||||
if not len(modules) > size_pre:
|
if not len(modules) > size_pre:
|
||||||
return
|
return
|
||||||
|
|
||||||
if isfile(outfile):
|
if pathlib.Path.is_file(outfile):
|
||||||
__make_backup(outfile)
|
__make_backup(outfile)
|
||||||
|
|
||||||
with open(outfile, 'w', newline='\n') as csvfile:
|
with open(outfile, 'w', newline='\n') as csvfile:
|
||||||
@ -170,7 +169,7 @@ def addships(data) -> None: # noqa: CCR001
|
|||||||
fields = ('id', 'symbol', 'name')
|
fields = ('id', 'symbol', 'name')
|
||||||
|
|
||||||
# slurp existing
|
# slurp existing
|
||||||
if isfile(shipfile):
|
if pathlib.Path.is_file(shipfile):
|
||||||
with open(shipfile) as csvfile:
|
with open(shipfile) as csvfile:
|
||||||
reader = csv.DictReader(csvfile, restval='')
|
reader = csv.DictReader(csvfile, restval='')
|
||||||
for row in reader:
|
for row in reader:
|
||||||
@ -200,7 +199,7 @@ def addships(data) -> None: # noqa: CCR001
|
|||||||
if not len(ships) > size_pre:
|
if not len(ships) > size_pre:
|
||||||
return
|
return
|
||||||
|
|
||||||
if isfile(shipfile):
|
if pathlib.Path.is_file(shipfile):
|
||||||
__make_backup(shipfile)
|
__make_backup(shipfile)
|
||||||
|
|
||||||
with open(shipfile, 'w', newline='\n') as csvfile:
|
with open(shipfile, 'w', newline='\n') as csvfile:
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from os.path import join
|
from pathlib import Path
|
||||||
|
|
||||||
from config import config
|
from config import config
|
||||||
from edmc_data import commodity_bracketmap as bracketmap
|
from edmc_data import commodity_bracketmap as bracketmap
|
||||||
@ -29,7 +29,7 @@ def export(data, kind=COMMODITY_DEFAULT, filename=None) -> None:
|
|||||||
filename_time = time.strftime('%Y-%m-%dT%H.%M.%S', time.localtime(querytime))
|
filename_time = time.strftime('%Y-%m-%dT%H.%M.%S', time.localtime(querytime))
|
||||||
filename_kind = 'csv'
|
filename_kind = 'csv'
|
||||||
filename = f'{filename_system}.{filename_starport}.{filename_time}.{filename_kind}'
|
filename = f'{filename_system}.{filename_starport}.{filename_time}.{filename_kind}'
|
||||||
filename = join(config.get_str('outdir'), filename)
|
filename = Path(config.get_str('outdir')) / filename
|
||||||
|
|
||||||
if kind == COMMODITY_CSV:
|
if kind == COMMODITY_CSV:
|
||||||
sep = ';' # BUG: for fixing later after cleanup
|
sep = ';' # BUG: for fixing later after cleanup
|
||||||
|
@ -27,6 +27,7 @@ import tkinter as tk
|
|||||||
import urllib.parse
|
import urllib.parse
|
||||||
import webbrowser
|
import webbrowser
|
||||||
from email.utils import parsedate
|
from email.utils import parsedate
|
||||||
|
from pathlib import Path
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
from typing import TYPE_CHECKING, Any, Mapping, TypeVar
|
from typing import TYPE_CHECKING, Any, Mapping, TypeVar
|
||||||
import requests
|
import requests
|
||||||
@ -1135,7 +1136,7 @@ class Session:
|
|||||||
|
|
||||||
def dump_capi_data(self, data: CAPIData) -> None:
|
def dump_capi_data(self, data: CAPIData) -> None:
|
||||||
"""Dump CAPI data to file for examination."""
|
"""Dump CAPI data to file for examination."""
|
||||||
if os.path.isdir('dump'):
|
if Path('dump').is_dir():
|
||||||
file_name: str = ""
|
file_name: str = ""
|
||||||
if data.source_endpoint == self.FRONTIER_CAPI_PATH_FLEETCARRIER:
|
if data.source_endpoint == self.FRONTIER_CAPI_PATH_FLEETCARRIER:
|
||||||
file_name += f"FleetCarrier.{data['name']['callsign']}"
|
file_name += f"FleetCarrier.{data['name']['callsign']}"
|
||||||
@ -1203,7 +1204,7 @@ def fixup(data: CAPIData) -> CAPIData: # noqa: C901, CCR001 # Can't be usefully
|
|||||||
if not commodity_map:
|
if not commodity_map:
|
||||||
# Lazily populate
|
# Lazily populate
|
||||||
for f in ('commodity.csv', 'rare_commodity.csv'):
|
for f in ('commodity.csv', 'rare_commodity.csv'):
|
||||||
if not os.path.isfile(config.app_dir_path / 'FDevIDs/' / f):
|
if not (config.app_dir_path / 'FDevIDs' / f).is_file():
|
||||||
logger.warning(f'FDevID file {f} not found! Generating output without these commodity name rewrites.')
|
logger.warning(f'FDevID file {f} not found! Generating output without these commodity name rewrites.')
|
||||||
continue
|
continue
|
||||||
with open(config.app_dir_path / 'FDevIDs' / f, 'r') as csvfile:
|
with open(config.app_dir_path / 'FDevIDs' / f, 'r') as csvfile:
|
||||||
|
@ -12,7 +12,7 @@ import sys
|
|||||||
import time
|
import time
|
||||||
import tkinter as tk
|
import tkinter as tk
|
||||||
from calendar import timegm
|
from calendar import timegm
|
||||||
from os.path import getsize, isdir, isfile, join
|
from pathlib import Path
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
from watchdog.observers.api import BaseObserver
|
from watchdog.observers.api import BaseObserver
|
||||||
from config import config
|
from config import config
|
||||||
@ -57,7 +57,7 @@ class Dashboard(FileSystemEventHandler):
|
|||||||
|
|
||||||
logdir = config.get_str('journaldir', default=config.default_journal_dir)
|
logdir = config.get_str('journaldir', default=config.default_journal_dir)
|
||||||
logdir = logdir or config.default_journal_dir
|
logdir = logdir or config.default_journal_dir
|
||||||
if not isdir(logdir):
|
if not Path.is_dir(Path(logdir)):
|
||||||
logger.info(f"No logdir, or it isn't a directory: {logdir=}")
|
logger.info(f"No logdir, or it isn't a directory: {logdir=}")
|
||||||
self.stop()
|
self.stop()
|
||||||
return False
|
return False
|
||||||
@ -164,7 +164,8 @@ class Dashboard(FileSystemEventHandler):
|
|||||||
|
|
||||||
:param event: Watchdog event.
|
:param event: Watchdog event.
|
||||||
"""
|
"""
|
||||||
if event.is_directory or (isfile(event.src_path) and getsize(event.src_path)):
|
modpath = Path(event.src_path)
|
||||||
|
if event.is_directory or (modpath.is_file() and modpath.stat().st_size):
|
||||||
# Can get on_modified events when the file is emptied
|
# Can get on_modified events when the file is emptied
|
||||||
self.process(event.src_path if not event.is_directory else None)
|
self.process(event.src_path if not event.is_directory else None)
|
||||||
|
|
||||||
@ -177,7 +178,7 @@ class Dashboard(FileSystemEventHandler):
|
|||||||
if config.shutting_down:
|
if config.shutting_down:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
status_json_path = join(self.currentdir, 'Status.json')
|
status_json_path = Path(self.currentdir) / 'Status.json'
|
||||||
with open(status_json_path, 'rb') as h:
|
with open(status_json_path, 'rb') as h:
|
||||||
data = h.read().strip()
|
data = h.read().strip()
|
||||||
if data: # Can be empty if polling while the file is being re-written
|
if data: # Can be empty if polling while the file is being re-written
|
||||||
|
@ -3,10 +3,10 @@
|
|||||||
"""Plugin that tests that modules we bundle for plugins are present and working."""
|
"""Plugin that tests that modules we bundle for plugins are present and working."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import shutil
|
import shutil
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import zipfile
|
import zipfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import semantic_version
|
import semantic_version
|
||||||
from SubA import SubA
|
from SubA import SubA
|
||||||
@ -14,7 +14,7 @@ from SubA import SubA
|
|||||||
from config import appname, appversion, config
|
from config import appname, appversion, config
|
||||||
|
|
||||||
# This could also be returned from plugin_start3()
|
# This could also be returned from plugin_start3()
|
||||||
plugin_name = os.path.basename(os.path.dirname(__file__))
|
plugin_name = Path(__file__).resolve().parent.name
|
||||||
|
|
||||||
# Logger per found plugin, so the folder name is included in
|
# Logger per found plugin, so the folder name is included in
|
||||||
# the logging format.
|
# the logging format.
|
||||||
@ -49,17 +49,17 @@ class PluginTest:
|
|||||||
|
|
||||||
def __init__(self, directory: str):
|
def __init__(self, directory: str):
|
||||||
logger.debug(f'directory = "{directory}')
|
logger.debug(f'directory = "{directory}')
|
||||||
dbfile = os.path.join(directory, this.DBFILE)
|
dbfile = Path(directory) / this.DBFILE
|
||||||
|
|
||||||
# Test 'import zipfile'
|
# Test 'import zipfile'
|
||||||
with zipfile.ZipFile(dbfile + '.zip', 'w') as zip:
|
with zipfile.ZipFile(str(dbfile) + '.zip', 'w') as zip:
|
||||||
if os.path.exists(dbfile):
|
if dbfile.exists():
|
||||||
zip.write(dbfile)
|
zip.write(dbfile)
|
||||||
zip.close()
|
zip.close()
|
||||||
|
|
||||||
# Testing 'import shutil'
|
# Testing 'import shutil'
|
||||||
if os.path.exists(dbfile):
|
if dbfile.exists():
|
||||||
shutil.copyfile(dbfile, dbfile + '.bak')
|
shutil.copyfile(dbfile, str(dbfile) + '.bak')
|
||||||
|
|
||||||
# Testing 'import sqlite3'
|
# Testing 'import sqlite3'
|
||||||
self.sqlconn = sqlite3.connect(dbfile)
|
self.sqlconn = sqlite3.connect(dbfile)
|
||||||
|
12
installer.py
12
installer.py
@ -5,23 +5,23 @@ Copyright (c) EDCD, All Rights Reserved
|
|||||||
Licensed under the GNU General Public License.
|
Licensed under the GNU General Public License.
|
||||||
See LICENSE file.
|
See LICENSE file.
|
||||||
"""
|
"""
|
||||||
import os
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
from pathlib import Path
|
||||||
from build import build
|
from build import build
|
||||||
|
|
||||||
|
|
||||||
def run_inno_setup_installer(iss_path: str) -> None:
|
def run_inno_setup_installer(iss_path: Path) -> None:
|
||||||
"""Run the Inno installer, building the installation exe."""
|
"""Run the Inno installer, building the installation exe."""
|
||||||
# Get the path to the Inno Setup compiler (iscc.exe) (Currently set to default path)
|
# Get the path to the Inno Setup compiler (iscc.exe) (Currently set to default path)
|
||||||
inno_setup_compiler_path: str = "C:\\Program Files (x86)\\Inno Setup 6\\ISCC.exe"
|
inno_setup_compiler_path = Path("C:\\Program Files (x86)\\Inno Setup 6\\ISCC.exe")
|
||||||
|
|
||||||
# Check if the Inno Setup compiler executable exists
|
# Check if the Inno Setup compiler executable exists
|
||||||
if not os.path.isfile(inno_setup_compiler_path):
|
if not inno_setup_compiler_path.exists():
|
||||||
print(f"Error: Inno Setup compiler not found at '{inno_setup_compiler_path}'.")
|
print(f"Error: Inno Setup compiler not found at '{inno_setup_compiler_path}'.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Check if the provided .iss file exists
|
# Check if the provided .iss file exists
|
||||||
if not os.path.isfile(iss_file_path):
|
if not iss_file_path.exists():
|
||||||
print(f"Error: The provided .iss file '{iss_path}' not found.")
|
print(f"Error: The provided .iss file '{iss_path}' not found.")
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -40,6 +40,6 @@ def run_inno_setup_installer(iss_path: str) -> None:
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
build()
|
build()
|
||||||
# Add the ISS Template File
|
# Add the ISS Template File
|
||||||
iss_file_path: str = "./EDMC_Installer_Config.iss"
|
iss_file_path = Path("./EDMC_Installer_Config.iss")
|
||||||
# Build the ISS file
|
# Build the ISS file
|
||||||
run_inno_setup_installer(iss_file_path)
|
run_inno_setup_installer(iss_file_path)
|
||||||
|
44
l10n.py
44
l10n.py
@ -17,10 +17,9 @@ import re
|
|||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from os import listdir, sep, makedirs
|
from os import listdir, sep
|
||||||
from os.path import basename, dirname, isdir, join, abspath, exists
|
|
||||||
from typing import TYPE_CHECKING, Iterable, TextIO, cast
|
from typing import TYPE_CHECKING, Iterable, TextIO, cast
|
||||||
|
import pathlib
|
||||||
from config import config
|
from config import config
|
||||||
from EDMCLogging import get_main_logger
|
from EDMCLogging import get_main_logger
|
||||||
|
|
||||||
@ -35,7 +34,7 @@ logger = get_main_logger()
|
|||||||
|
|
||||||
# Language name
|
# Language name
|
||||||
LANGUAGE_ID = '!Language'
|
LANGUAGE_ID = '!Language'
|
||||||
LOCALISATION_DIR = 'L10n'
|
LOCALISATION_DIR: pathlib.Path = pathlib.Path('L10n')
|
||||||
|
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
import ctypes
|
import ctypes
|
||||||
@ -119,10 +118,10 @@ class Translations:
|
|||||||
|
|
||||||
self.translations = {None: self.contents(cast(str, lang))}
|
self.translations = {None: self.contents(cast(str, lang))}
|
||||||
for plugin in listdir(config.plugin_dir_path):
|
for plugin in listdir(config.plugin_dir_path):
|
||||||
plugin_path = join(config.plugin_dir_path, plugin, LOCALISATION_DIR)
|
plugin_path = config.plugin_dir_path / plugin / LOCALISATION_DIR
|
||||||
if isdir(plugin_path):
|
if pathlib.Path.is_dir(plugin_path):
|
||||||
try:
|
try:
|
||||||
self.translations[plugin] = self.contents(cast(str, lang), str(plugin_path))
|
self.translations[plugin] = self.contents(cast(str, lang), plugin_path)
|
||||||
|
|
||||||
except UnicodeDecodeError as e:
|
except UnicodeDecodeError as e:
|
||||||
logger.warning(f'Malformed file {lang}.strings in plugin {plugin}: {e}')
|
logger.warning(f'Malformed file {lang}.strings in plugin {plugin}: {e}')
|
||||||
@ -133,7 +132,7 @@ class Translations:
|
|||||||
# DEPRECATED: Migrate to translations.translate or tr.tl. Will remove in 6.0 or later.
|
# DEPRECATED: Migrate to translations.translate or tr.tl. Will remove in 6.0 or later.
|
||||||
builtins.__dict__['_'] = self.translate
|
builtins.__dict__['_'] = self.translate
|
||||||
|
|
||||||
def contents(self, lang: str, plugin_path: str | None = None) -> dict[str, str]:
|
def contents(self, lang: str, plugin_path: pathlib.Path | None = None) -> dict[str, str]:
|
||||||
"""Load all the translations from a translation file."""
|
"""Load all the translations from a translation file."""
|
||||||
assert lang in self.available()
|
assert lang in self.available()
|
||||||
translations = {}
|
translations = {}
|
||||||
@ -173,12 +172,12 @@ class Translations:
|
|||||||
:return: The translated string
|
:return: The translated string
|
||||||
"""
|
"""
|
||||||
plugin_name: str | None = None
|
plugin_name: str | None = None
|
||||||
plugin_path: str | None = None
|
plugin_path: pathlib.Path | None = None
|
||||||
|
|
||||||
if context:
|
if context:
|
||||||
# TODO: There is probably a better way to go about this now.
|
# TODO: There is probably a better way to go about this now.
|
||||||
plugin_name = context[len(config.plugin_dir)+1:].split(sep)[0]
|
plugin_name = context[len(config.plugin_dir)+1:].split(sep)[0]
|
||||||
plugin_path = join(config.plugin_dir_path, plugin_name, LOCALISATION_DIR)
|
plugin_path = config.plugin_dir_path / plugin_name / LOCALISATION_DIR
|
||||||
|
|
||||||
if lang:
|
if lang:
|
||||||
contents: dict[str, str] = self.contents(lang=lang, plugin_path=plugin_path)
|
contents: dict[str, str] = self.contents(lang=lang, plugin_path=plugin_path)
|
||||||
@ -225,17 +224,17 @@ class Translations:
|
|||||||
|
|
||||||
return names
|
return names
|
||||||
|
|
||||||
def respath(self) -> str:
|
def respath(self) -> pathlib.Path:
|
||||||
"""Path to localisation files."""
|
"""Path to localisation files."""
|
||||||
if getattr(sys, 'frozen', False):
|
if getattr(sys, 'frozen', False):
|
||||||
return abspath(join(dirname(sys.executable), LOCALISATION_DIR))
|
return pathlib.Path(sys.executable).parent.joinpath(LOCALISATION_DIR).resolve()
|
||||||
|
|
||||||
if __file__:
|
if __file__:
|
||||||
return abspath(join(dirname(__file__), LOCALISATION_DIR))
|
return pathlib.Path(__file__).parent.joinpath(LOCALISATION_DIR).resolve()
|
||||||
|
|
||||||
return abspath(LOCALISATION_DIR)
|
return LOCALISATION_DIR.resolve()
|
||||||
|
|
||||||
def file(self, lang: str, plugin_path: str | None = None) -> TextIO | None:
|
def file(self, lang: str, plugin_path: pathlib.Path | None = None) -> TextIO | None:
|
||||||
"""
|
"""
|
||||||
Open the given lang file for reading.
|
Open the given lang file for reading.
|
||||||
|
|
||||||
@ -244,8 +243,8 @@ class Translations:
|
|||||||
:return: the opened file (Note: This should be closed when done)
|
:return: the opened file (Note: This should be closed when done)
|
||||||
"""
|
"""
|
||||||
if plugin_path:
|
if plugin_path:
|
||||||
file_path = join(plugin_path, f'{lang}.strings')
|
file_path = plugin_path / f"{lang}.strings"
|
||||||
if not exists(file_path):
|
if not file_path.exists():
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -253,7 +252,7 @@ class Translations:
|
|||||||
except OSError:
|
except OSError:
|
||||||
logger.exception(f'could not open {file_path}')
|
logger.exception(f'could not open {file_path}')
|
||||||
|
|
||||||
res_path = join(self.respath(), f'{lang}.strings')
|
res_path = self.respath() / f'{lang}.strings'
|
||||||
return open(res_path, encoding='utf-8')
|
return open(res_path, encoding='utf-8')
|
||||||
|
|
||||||
|
|
||||||
@ -382,9 +381,10 @@ Translations: Translations = translations # type: ignore
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
regexp = re.compile(r'''_\([ur]?(['"])(((?<!\\)\\\1|.)+?)\1\)[^#]*(#.+)?''') # match a single line python literal
|
regexp = re.compile(r'''_\([ur]?(['"])(((?<!\\)\\\1|.)+?)\1\)[^#]*(#.+)?''') # match a single line python literal
|
||||||
seen: dict[str, str] = {}
|
seen: dict[str, str] = {}
|
||||||
|
plugin_dir = pathlib.Path('plugins')
|
||||||
for f in (
|
for f in (
|
||||||
sorted(x for x in listdir('.') if x.endswith('.py')) +
|
sorted(x for x in listdir('.') if x.endswith('.py')) +
|
||||||
sorted(join('plugins', x) for x in (listdir('plugins') if isdir('plugins') else []) if x.endswith('.py'))
|
sorted(plugin_dir.glob('*.py')) if plugin_dir.is_dir() else []
|
||||||
):
|
):
|
||||||
with open(f, encoding='utf-8') as h:
|
with open(f, encoding='utf-8') as h:
|
||||||
lineno = 0
|
lineno = 0
|
||||||
@ -393,11 +393,11 @@ if __name__ == "__main__":
|
|||||||
match = regexp.search(line)
|
match = regexp.search(line)
|
||||||
if match and not seen.get(match.group(2)): # only record first commented instance of a string
|
if match and not seen.get(match.group(2)): # only record first commented instance of a string
|
||||||
seen[match.group(2)] = (
|
seen[match.group(2)] = (
|
||||||
(match.group(4) and (match.group(4)[1:].strip()) + '. ' or '') + f'[{basename(f)}]'
|
(match.group(4) and (match.group(4)[1:].strip()) + '. ' or '') + f'[{pathlib.Path(f).name}]'
|
||||||
)
|
)
|
||||||
if seen:
|
if seen:
|
||||||
target_path = join(LOCALISATION_DIR, 'en.template.new')
|
target_path = LOCALISATION_DIR / 'en.template.new'
|
||||||
makedirs(dirname(target_path), exist_ok=True)
|
target_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
with open(target_path, 'w', encoding='utf-8') as target_file:
|
with open(target_path, 'w', encoding='utf-8') as target_file:
|
||||||
target_file.write(f'/* Language name */\n"{LANGUAGE_ID}" = "English";\n\n')
|
target_file.write(f'/* Language name */\n"{LANGUAGE_ID}" = "English";\n\n')
|
||||||
for thing in sorted(seen, key=str.lower):
|
for thing in sorted(seen, key=str.lower):
|
||||||
|
@ -11,7 +11,7 @@ import json
|
|||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
from os import listdir
|
from os import listdir
|
||||||
from os.path import join
|
from pathlib import Path
|
||||||
import companion
|
import companion
|
||||||
import util_ships
|
import util_ships
|
||||||
from config import config
|
from config import config
|
||||||
@ -45,7 +45,7 @@ def export(data: companion.CAPIData, requested_filename: str | None = None) -> N
|
|||||||
regexp = re.compile(re.escape(ship) + r'\.\d\d\d\d-\d\d-\d\dT\d\d\.\d\d\.\d\d\.txt')
|
regexp = re.compile(re.escape(ship) + r'\.\d\d\d\d-\d\d-\d\dT\d\d\.\d\d\.\d\d\.txt')
|
||||||
oldfiles = sorted([x for x in listdir(config.get_str('outdir')) if regexp.match(x)])
|
oldfiles = sorted([x for x in listdir(config.get_str('outdir')) if regexp.match(x)])
|
||||||
if oldfiles:
|
if oldfiles:
|
||||||
with open(join(config.get_str('outdir'), oldfiles[-1]), 'rU') as h:
|
with open(Path(config.get_str('outdir')) / Path(oldfiles[-1]), 'rU') as h:
|
||||||
if h.read() == string:
|
if h.read() == string:
|
||||||
return # same as last time - don't write
|
return # same as last time - don't write
|
||||||
|
|
||||||
@ -53,9 +53,9 @@ def export(data: companion.CAPIData, requested_filename: str | None = None) -> N
|
|||||||
|
|
||||||
# Write
|
# Write
|
||||||
|
|
||||||
output_directory = config.get_str('outdir')
|
output_directory = Path(config.get_str('outdir'))
|
||||||
ship_time = time.strftime('%Y-%m-%dT%H.%M.%S', time.localtime(query_time))
|
ship_time = time.strftime('%Y-%m-%dT%H.%M.%S', time.localtime(query_time))
|
||||||
file_path = join(output_directory, f"{ship}.{ship_time}.txt")
|
file_path = output_directory / f"{ship}.{ship_time}.txt"
|
||||||
|
|
||||||
with open(file_path, 'wt') as h:
|
with open(file_path, 'wt') as h:
|
||||||
h.write(string)
|
h.write(string)
|
||||||
|
36
monitor.py
36
monitor.py
@ -8,7 +8,7 @@ See LICENSE file.
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import pathlib
|
from pathlib import Path
|
||||||
import queue
|
import queue
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
@ -16,7 +16,6 @@ import threading
|
|||||||
from calendar import timegm
|
from calendar import timegm
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from os import SEEK_END, SEEK_SET, listdir
|
from os import SEEK_END, SEEK_SET, listdir
|
||||||
from os.path import basename, expanduser, getctime, isdir, join
|
|
||||||
from time import gmtime, localtime, mktime, sleep, strftime, strptime, time
|
from time import gmtime, localtime, mktime, sleep, strftime, strptime, time
|
||||||
from typing import TYPE_CHECKING, Any, BinaryIO, MutableMapping
|
from typing import TYPE_CHECKING, Any, BinaryIO, MutableMapping
|
||||||
import semantic_version
|
import semantic_version
|
||||||
@ -82,7 +81,7 @@ class EDLogs(FileSystemEventHandler):
|
|||||||
# TODO(A_D): A bunch of these should be switched to default values (eg '' for strings) and no longer be Optional
|
# TODO(A_D): A bunch of these should be switched to default values (eg '' for strings) and no longer be Optional
|
||||||
FileSystemEventHandler.__init__(self) # futureproofing - not need for current version of watchdog
|
FileSystemEventHandler.__init__(self) # futureproofing - not need for current version of watchdog
|
||||||
self.root: 'tkinter.Tk' = None # type: ignore # Don't use Optional[] - mypy thinks no methods
|
self.root: 'tkinter.Tk' = None # type: ignore # Don't use Optional[] - mypy thinks no methods
|
||||||
self.currentdir: str | None = None # The actual logdir that we're monitoring
|
self.currentdir: Path | None = None # The actual logdir that we're monitoring
|
||||||
self.logfile: str | None = None
|
self.logfile: str | None = None
|
||||||
self.observer: BaseObserver | None = None
|
self.observer: BaseObserver | None = None
|
||||||
self.observed = None # a watchdog ObservedWatch, or None if polling
|
self.observed = None # a watchdog ObservedWatch, or None if polling
|
||||||
@ -204,9 +203,9 @@ class EDLogs(FileSystemEventHandler):
|
|||||||
if journal_dir == '' or journal_dir is None:
|
if journal_dir == '' or journal_dir is None:
|
||||||
journal_dir = config.default_journal_dir
|
journal_dir = config.default_journal_dir
|
||||||
|
|
||||||
logdir = expanduser(journal_dir)
|
logdir = Path(journal_dir).expanduser()
|
||||||
|
|
||||||
if not logdir or not isdir(logdir):
|
if not logdir or not Path.is_dir(logdir):
|
||||||
logger.error(f'Journal Directory is invalid: "{logdir}"')
|
logger.error(f'Journal Directory is invalid: "{logdir}"')
|
||||||
self.stop()
|
self.stop()
|
||||||
return False
|
return False
|
||||||
@ -279,9 +278,10 @@ class EDLogs(FileSystemEventHandler):
|
|||||||
# Odyssey Update 11 has, e.g. Journal.2022-03-15T152503.01.log
|
# Odyssey Update 11 has, e.g. Journal.2022-03-15T152503.01.log
|
||||||
# Horizons Update 11 equivalent: Journal.220315152335.01.log
|
# Horizons Update 11 equivalent: Journal.220315152335.01.log
|
||||||
# So we can no longer use a naive sort.
|
# So we can no longer use a naive sort.
|
||||||
journals_dir_path = pathlib.Path(journals_dir)
|
journals_dir_path = Path(journals_dir)
|
||||||
journal_files = (journals_dir_path / pathlib.Path(x) for x in journal_files)
|
journal_files = (journals_dir_path / Path(x) for x in journal_files)
|
||||||
return str(max(journal_files, key=getctime))
|
latest_file = max(journal_files, key=lambda f: Path(f).stat().st_ctime)
|
||||||
|
return str(latest_file)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -350,7 +350,7 @@ class EDLogs(FileSystemEventHandler):
|
|||||||
|
|
||||||
def on_created(self, event: 'FileSystemEvent') -> None:
|
def on_created(self, event: 'FileSystemEvent') -> None:
|
||||||
"""Watchdog callback when, e.g. client (re)started."""
|
"""Watchdog callback when, e.g. client (re)started."""
|
||||||
if not event.is_directory and self._RE_LOGFILE.search(basename(event.src_path)):
|
if not event.is_directory and self._RE_LOGFILE.search(Path(event.src_path).name):
|
||||||
|
|
||||||
self.logfile = event.src_path
|
self.logfile = event.src_path
|
||||||
|
|
||||||
@ -1086,7 +1086,7 @@ class EDLogs(FileSystemEventHandler):
|
|||||||
self.state['Cargo'] = defaultdict(int)
|
self.state['Cargo'] = defaultdict(int)
|
||||||
# From 3.3 full Cargo event (after the first one) is written to a separate file
|
# From 3.3 full Cargo event (after the first one) is written to a separate file
|
||||||
if 'Inventory' not in entry:
|
if 'Inventory' not in entry:
|
||||||
with open(join(self.currentdir, 'Cargo.json'), 'rb') as h: # type: ignore
|
with open(self.currentdir / 'Cargo.json', 'rb') as h: # type: ignore
|
||||||
entry = json.load(h)
|
entry = json.load(h)
|
||||||
self.state['CargoJSON'] = entry
|
self.state['CargoJSON'] = entry
|
||||||
|
|
||||||
@ -1113,7 +1113,7 @@ class EDLogs(FileSystemEventHandler):
|
|||||||
# Always attempt loading of this, but if it fails we'll hope this was
|
# Always attempt loading of this, but if it fails we'll hope this was
|
||||||
# a startup/boarding version and thus `entry` contains
|
# a startup/boarding version and thus `entry` contains
|
||||||
# the data anyway.
|
# the data anyway.
|
||||||
currentdir_path = pathlib.Path(str(self.currentdir))
|
currentdir_path = Path(str(self.currentdir))
|
||||||
shiplocker_filename = currentdir_path / 'ShipLocker.json'
|
shiplocker_filename = currentdir_path / 'ShipLocker.json'
|
||||||
shiplocker_max_attempts = 5
|
shiplocker_max_attempts = 5
|
||||||
shiplocker_fail_sleep = 0.01
|
shiplocker_fail_sleep = 0.01
|
||||||
@ -1182,7 +1182,7 @@ class EDLogs(FileSystemEventHandler):
|
|||||||
|
|
||||||
# TODO: v31 doc says this is`backpack.json` ... but Howard Chalkley
|
# TODO: v31 doc says this is`backpack.json` ... but Howard Chalkley
|
||||||
# said it's `Backpack.json`
|
# said it's `Backpack.json`
|
||||||
backpack_file = pathlib.Path(str(self.currentdir)) / 'Backpack.json'
|
backpack_file = Path(str(self.currentdir)) / 'Backpack.json'
|
||||||
backpack_data = None
|
backpack_data = None
|
||||||
|
|
||||||
if not backpack_file.exists():
|
if not backpack_file.exists():
|
||||||
@ -1558,7 +1558,7 @@ class EDLogs(FileSystemEventHandler):
|
|||||||
entry = fcmaterials
|
entry = fcmaterials
|
||||||
|
|
||||||
elif event_type == 'moduleinfo':
|
elif event_type == 'moduleinfo':
|
||||||
with open(join(self.currentdir, 'ModulesInfo.json'), 'rb') as mf: # type: ignore
|
with open(self.currentdir / 'ModulesInfo.json', 'rb') as mf: # type: ignore
|
||||||
try:
|
try:
|
||||||
entry = json.load(mf)
|
entry = json.load(mf)
|
||||||
|
|
||||||
@ -2272,14 +2272,14 @@ class EDLogs(FileSystemEventHandler):
|
|||||||
oldfiles = sorted((x for x in listdir(config.get_str('outdir')) if regexp.match(x)))
|
oldfiles = sorted((x for x in listdir(config.get_str('outdir')) if regexp.match(x)))
|
||||||
if oldfiles:
|
if oldfiles:
|
||||||
try:
|
try:
|
||||||
with open(join(config.get_str('outdir'), oldfiles[-1]), encoding='utf-8') as h:
|
with open(config.get_str('outdir') / Path(oldfiles[-1]), encoding='utf-8') as h:
|
||||||
if h.read() == string:
|
if h.read() == string:
|
||||||
return # same as last time - don't write
|
return # same as last time - don't write
|
||||||
|
|
||||||
except UnicodeError:
|
except UnicodeError:
|
||||||
logger.exception("UnicodeError reading old ship loadout with utf-8 encoding, trying without...")
|
logger.exception("UnicodeError reading old ship loadout with utf-8 encoding, trying without...")
|
||||||
try:
|
try:
|
||||||
with open(join(config.get_str('outdir'), oldfiles[-1])) as h:
|
with open(config.get_str('outdir') / Path(oldfiles[-1])) as h:
|
||||||
if h.read() == string:
|
if h.read() == string:
|
||||||
return # same as last time - don't write
|
return # same as last time - don't write
|
||||||
|
|
||||||
@ -2298,7 +2298,7 @@ class EDLogs(FileSystemEventHandler):
|
|||||||
|
|
||||||
# Write
|
# Write
|
||||||
ts = strftime('%Y-%m-%dT%H.%M.%S', localtime(time()))
|
ts = strftime('%Y-%m-%dT%H.%M.%S', localtime(time()))
|
||||||
filename = join(config.get_str('outdir'), f'{ship}.{ts}.txt')
|
filename = config.get_str('outdir') / Path(f'{ship}.{ts}.txt')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(filename, 'wt', encoding='utf-8') as h:
|
with open(filename, 'wt', encoding='utf-8') as h:
|
||||||
@ -2385,7 +2385,7 @@ class EDLogs(FileSystemEventHandler):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
with open(join(self.currentdir, 'NavRoute.json')) as f:
|
with open(self.currentdir / 'NavRoute.json') as f:
|
||||||
raw = f.read()
|
raw = f.read()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -2411,7 +2411,7 @@ class EDLogs(FileSystemEventHandler):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
with open(join(self.currentdir, 'FCMaterials.json')) as f:
|
with open(self.currentdir / 'FCMaterials.json') as f:
|
||||||
raw = f.read()
|
raw = f.read()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
22
plug.py
22
plug.py
@ -14,6 +14,7 @@ import operator
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import tkinter as tk
|
import tkinter as tk
|
||||||
|
from pathlib import Path
|
||||||
from tkinter import ttk
|
from tkinter import ttk
|
||||||
from typing import Any, Mapping, MutableMapping
|
from typing import Any, Mapping, MutableMapping
|
||||||
|
|
||||||
@ -47,7 +48,7 @@ last_error = LastError()
|
|||||||
class Plugin:
|
class Plugin:
|
||||||
"""An EDMC plugin."""
|
"""An EDMC plugin."""
|
||||||
|
|
||||||
def __init__(self, name: str, loadfile: str | None, plugin_logger: logging.Logger | None): # noqa: CCR001
|
def __init__(self, name: str, loadfile: Path | None, plugin_logger: logging.Logger | None): # noqa: CCR001
|
||||||
"""
|
"""
|
||||||
Load a single plugin.
|
Load a single plugin.
|
||||||
|
|
||||||
@ -73,7 +74,7 @@ class Plugin:
|
|||||||
sys.modules[module.__name__] = module
|
sys.modules[module.__name__] = module
|
||||||
spec.loader.exec_module(module)
|
spec.loader.exec_module(module)
|
||||||
if getattr(module, 'plugin_start3', None):
|
if getattr(module, 'plugin_start3', None):
|
||||||
newname = module.plugin_start3(os.path.dirname(loadfile))
|
newname = module.plugin_start3(Path(loadfile).resolve().parent)
|
||||||
self.name = str(newname) if newname else self.name
|
self.name = str(newname) if newname else self.name
|
||||||
self.module = module
|
self.module = module
|
||||||
elif getattr(module, 'plugin_start', None):
|
elif getattr(module, 'plugin_start', None):
|
||||||
@ -171,7 +172,9 @@ def _load_internal_plugins():
|
|||||||
for name in sorted(os.listdir(config.internal_plugin_dir_path)):
|
for name in sorted(os.listdir(config.internal_plugin_dir_path)):
|
||||||
if name.endswith('.py') and name[0] not in ('.', '_'):
|
if name.endswith('.py') and name[0] not in ('.', '_'):
|
||||||
try:
|
try:
|
||||||
plugin = Plugin(name[:-3], os.path.join(config.internal_plugin_dir_path, name), logger)
|
plugin_name = name[:-3]
|
||||||
|
plugin_path = config.internal_plugin_dir_path / name
|
||||||
|
plugin = Plugin(plugin_name, plugin_path, logger)
|
||||||
plugin.folder = None
|
plugin.folder = None
|
||||||
internal.append(plugin)
|
internal.append(plugin)
|
||||||
except Exception:
|
except Exception:
|
||||||
@ -186,9 +189,12 @@ def _load_found_plugins():
|
|||||||
# The intent here is to e.g. have EDMC-Overlay load before any plugins
|
# The intent here is to e.g. have EDMC-Overlay load before any plugins
|
||||||
# that depend on it.
|
# that depend on it.
|
||||||
|
|
||||||
for name in sorted(os.listdir(config.plugin_dir_path), key=lambda n: (
|
plugin_files = sorted(config.plugin_dir_path.iterdir(), key=lambda p: (
|
||||||
not os.path.isfile(os.path.join(config.plugin_dir_path, n, '__init__.py')), n.lower())):
|
not (p / '__init__.py').is_file(), p.name.lower()))
|
||||||
if not os.path.isdir(os.path.join(config.plugin_dir_path, name)) or name[0] in ('.', '_'):
|
|
||||||
|
for plugin_file in plugin_files:
|
||||||
|
name = plugin_file.name
|
||||||
|
if not (config.plugin_dir_path / name).is_dir() or name.startswith(('.', '_')):
|
||||||
pass
|
pass
|
||||||
elif name.endswith('.disabled'):
|
elif name.endswith('.disabled'):
|
||||||
name, discard = name.rsplit('.', 1)
|
name, discard = name.rsplit('.', 1)
|
||||||
@ -196,12 +202,12 @@ def _load_found_plugins():
|
|||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
# Add plugin's folder to load path in case plugin has internal package dependencies
|
# Add plugin's folder to load path in case plugin has internal package dependencies
|
||||||
sys.path.append(os.path.join(config.plugin_dir_path, name))
|
sys.path.append(str(config.plugin_dir_path / name))
|
||||||
|
|
||||||
import EDMCLogging
|
import EDMCLogging
|
||||||
# Create a logger for this 'found' plugin. Must be before the load.py is loaded.
|
# Create a logger for this 'found' plugin. Must be before the load.py is loaded.
|
||||||
plugin_logger = EDMCLogging.get_plugin_logger(name)
|
plugin_logger = EDMCLogging.get_plugin_logger(name)
|
||||||
found.append(Plugin(name, os.path.join(config.plugin_dir_path, name, 'load.py'), plugin_logger))
|
found.append(Plugin(name, config.plugin_dir_path / name / 'load.py', plugin_logger))
|
||||||
except Exception:
|
except Exception:
|
||||||
PLUGINS_broken.append(Plugin(name, None, logger))
|
PLUGINS_broken.append(Plugin(name, None, logger))
|
||||||
logger.exception(f'Failure loading found Plugin "{name}"')
|
logger.exception(f'Failure loading found Plugin "{name}"')
|
||||||
|
22
prefs.py
22
prefs.py
@ -4,13 +4,13 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import logging
|
import logging
|
||||||
import pathlib
|
from os.path import expandvars
|
||||||
|
from pathlib import Path
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tkinter as tk
|
import tkinter as tk
|
||||||
import warnings
|
import warnings
|
||||||
from os import system
|
from os import system
|
||||||
from os.path import expanduser, expandvars, join, normpath
|
|
||||||
from tkinter import colorchooser as tkColorChooser # type: ignore # noqa: N812
|
from tkinter import colorchooser as tkColorChooser # type: ignore # noqa: N812
|
||||||
from tkinter import ttk
|
from tkinter import ttk
|
||||||
from types import TracebackType
|
from types import TracebackType
|
||||||
@ -41,10 +41,10 @@ def help_open_log_folder() -> None:
|
|||||||
"""Open the folder logs are stored in."""
|
"""Open the folder logs are stored in."""
|
||||||
warnings.warn('prefs.help_open_log_folder is deprecated, use open_log_folder instead. '
|
warnings.warn('prefs.help_open_log_folder is deprecated, use open_log_folder instead. '
|
||||||
'This function will be removed in 6.0 or later', DeprecationWarning, stacklevel=2)
|
'This function will be removed in 6.0 or later', DeprecationWarning, stacklevel=2)
|
||||||
open_folder(pathlib.Path(config.app_dir_path / 'logs'))
|
open_folder(Path(config.app_dir_path / 'logs'))
|
||||||
|
|
||||||
|
|
||||||
def open_folder(file: pathlib.Path) -> None:
|
def open_folder(file: Path) -> None:
|
||||||
"""Open the given file in the OS file explorer."""
|
"""Open the given file in the OS file explorer."""
|
||||||
if sys.platform.startswith('win'):
|
if sys.platform.startswith('win'):
|
||||||
# On Windows, use the "start" command to open the folder
|
# On Windows, use the "start" command to open the folder
|
||||||
@ -56,7 +56,7 @@ def open_folder(file: pathlib.Path) -> None:
|
|||||||
|
|
||||||
def help_open_system_profiler(parent) -> None:
|
def help_open_system_profiler(parent) -> None:
|
||||||
"""Open the EDMC System Profiler."""
|
"""Open the EDMC System Profiler."""
|
||||||
profiler_path = pathlib.Path(config.respath_path)
|
profiler_path = config.respath_path
|
||||||
try:
|
try:
|
||||||
if getattr(sys, 'frozen', False):
|
if getattr(sys, 'frozen', False):
|
||||||
profiler_path /= 'EDMCSystemProfiler.exe'
|
profiler_path /= 'EDMCSystemProfiler.exe'
|
||||||
@ -322,7 +322,7 @@ class PreferencesDialog(tk.Toplevel):
|
|||||||
self.geometry(f"+{position.left}+{position.top}")
|
self.geometry(f"+{position.left}+{position.top}")
|
||||||
|
|
||||||
# Set Log Directory
|
# Set Log Directory
|
||||||
self.logfile_loc = pathlib.Path(config.app_dir_path / 'logs')
|
self.logfile_loc = Path(config.app_dir_path / 'logs')
|
||||||
|
|
||||||
# Set minimum size to prevent content cut-off
|
# Set minimum size to prevent content cut-off
|
||||||
self.update_idletasks() # Update "requested size" from geometry manager
|
self.update_idletasks() # Update "requested size" from geometry manager
|
||||||
@ -1082,7 +1082,7 @@ class PreferencesDialog(tk.Toplevel):
|
|||||||
import tkinter.filedialog
|
import tkinter.filedialog
|
||||||
directory = tkinter.filedialog.askdirectory(
|
directory = tkinter.filedialog.askdirectory(
|
||||||
parent=self,
|
parent=self,
|
||||||
initialdir=expanduser(pathvar.get()),
|
initialdir=Path(pathvar.get()).expanduser(),
|
||||||
title=title,
|
title=title,
|
||||||
mustexist=tk.TRUE
|
mustexist=tk.TRUE
|
||||||
)
|
)
|
||||||
@ -1104,7 +1104,7 @@ class PreferencesDialog(tk.Toplevel):
|
|||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
start = len(config.home.split('\\')) if pathvar.get().lower().startswith(config.home.lower()) else 0
|
start = len(config.home.split('\\')) if pathvar.get().lower().startswith(config.home.lower()) else 0
|
||||||
display = []
|
display = []
|
||||||
components = normpath(pathvar.get()).split('\\')
|
components = Path(pathvar.get()).resolve().parts
|
||||||
buf = ctypes.create_unicode_buffer(MAX_PATH)
|
buf = ctypes.create_unicode_buffer(MAX_PATH)
|
||||||
pidsRes = ctypes.c_int() # noqa: N806 # Windows convention
|
pidsRes = ctypes.c_int() # noqa: N806 # Windows convention
|
||||||
for i in range(start, len(components)):
|
for i in range(start, len(components)):
|
||||||
@ -1256,7 +1256,7 @@ class PreferencesDialog(tk.Toplevel):
|
|||||||
|
|
||||||
config.set(
|
config.set(
|
||||||
'outdir',
|
'outdir',
|
||||||
join(config.home_path, self.outdir.get()[2:]) if self.outdir.get().startswith('~') else self.outdir.get()
|
str(config.home_path / self.outdir.get()[2:]) if self.outdir.get().startswith('~') else self.outdir.get()
|
||||||
)
|
)
|
||||||
|
|
||||||
logdir = self.logdir.get()
|
logdir = self.logdir.get()
|
||||||
@ -1299,8 +1299,8 @@ class PreferencesDialog(tk.Toplevel):
|
|||||||
if self.plugdir.get() != config.get('plugin_dir'):
|
if self.plugdir.get() != config.get('plugin_dir'):
|
||||||
config.set(
|
config.set(
|
||||||
'plugin_dir',
|
'plugin_dir',
|
||||||
join(config.home_path, self.plugdir.get()[2:]) if self.plugdir.get().startswith(
|
str(Path(config.home_path, self.plugdir.get()[2:])) if self.plugdir.get().startswith('~') else
|
||||||
'~') else self.plugdir.get()
|
str(Path(self.plugdir.get()))
|
||||||
)
|
)
|
||||||
self.req_restart = True
|
self.req_restart = True
|
||||||
|
|
||||||
|
4
theme.py
4
theme.py
@ -13,7 +13,6 @@ from __future__ import annotations
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import tkinter as tk
|
import tkinter as tk
|
||||||
from os.path import join
|
|
||||||
from tkinter import font as tk_font
|
from tkinter import font as tk_font
|
||||||
from tkinter import ttk
|
from tkinter import ttk
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
@ -38,7 +37,8 @@ if sys.platform == 'win32':
|
|||||||
AddFontResourceEx.restypes = [LPCWSTR, DWORD, LPCVOID] # type: ignore
|
AddFontResourceEx.restypes = [LPCWSTR, DWORD, LPCVOID] # type: ignore
|
||||||
FR_PRIVATE = 0x10
|
FR_PRIVATE = 0x10
|
||||||
FR_NOT_ENUM = 0x20
|
FR_NOT_ENUM = 0x20
|
||||||
AddFontResourceEx(join(config.respath, 'EUROCAPS.TTF'), FR_PRIVATE, 0)
|
font_path = config.respath_path / 'EUROCAPS.TTF'
|
||||||
|
AddFontResourceEx(str(font_path), FR_PRIVATE, 0)
|
||||||
|
|
||||||
elif sys.platform == 'linux':
|
elif sys.platform == 'linux':
|
||||||
# pyright: reportUnboundVariable=false
|
# pyright: reportUnboundVariable=false
|
||||||
|
@ -28,7 +28,6 @@ from tkinter import font as tk_font
|
|||||||
from tkinter import ttk
|
from tkinter import ttk
|
||||||
from typing import Any
|
from typing import Any
|
||||||
import plug
|
import plug
|
||||||
from os import path
|
|
||||||
from config import config, logger
|
from config import config, logger
|
||||||
from l10n import translations as tr
|
from l10n import translations as tr
|
||||||
from monitor import monitor
|
from monitor import monitor
|
||||||
@ -96,7 +95,7 @@ class HyperlinkLabel(tk.Label or ttk.Label): # type: ignore
|
|||||||
else:
|
else:
|
||||||
# Avoid file length limits if possible
|
# Avoid file length limits if possible
|
||||||
target = plug.invoke(url, 'EDSY', 'shipyard_url', loadout, monitor.is_beta)
|
target = plug.invoke(url, 'EDSY', 'shipyard_url', loadout, monitor.is_beta)
|
||||||
file_name = path.join(config.app_dir_path, "last_shipyard.html")
|
file_name = config.app_dir_path / "last_shipyard.html"
|
||||||
|
|
||||||
with open(file_name, 'w') as f:
|
with open(file_name, 'w') as f:
|
||||||
f.write(SHIPYARD_HTML_TEMPLATE.format(
|
f.write(SHIPYARD_HTML_TEMPLATE.format(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user