mirror of
https://github.com/EDCD/EDMarketConnector.git
synced 2025-04-16 09:10:35 +03:00
Merge pull request #2094 from HullSeals/enhancement/2051/docs-workflows-utils
[2051] Utilities, Scripts, Workflow Audits LGTM
This commit is contained in:
commit
27dba61ba0
15
.github/workflows/windows-build.yml
vendored
15
.github/workflows/windows-build.yml
vendored
@ -103,18 +103,11 @@ jobs:
|
||||
pip install wheel
|
||||
pip install -r requirements-dev.txt
|
||||
|
||||
- name: Download latest WinSparkle release
|
||||
- name: Download winsparkle
|
||||
run: |
|
||||
$url = "https://api.github.com/repos/vslavik/winsparkle/releases/latest"
|
||||
$response = Invoke-RestMethod -Uri $url
|
||||
$latestAsset = $response.assets | Where-Object { $_.name -match "WinSparkle.*\.zip" -and $_.name -notmatch "-src" }
|
||||
|
||||
$downloadUrl = $latestAsset.browser_download_url
|
||||
Invoke-WebRequest -Uri $downloadUrl -OutFile WinSparkle-Latest.zip
|
||||
|
||||
Expand-Archive -Path WinSparkle-Latest.zip -DestinationPath .
|
||||
$extractedFolder = Get-ChildItem -Filter "WinSparkle-*" -Directory
|
||||
Move-Item -Path "$($extractedFolder.FullName)\Release\*" -Destination .
|
||||
Invoke-Webrequest -UseBasicParsing https://github.com/vslavik/winsparkle/releases/download/v0.8.0/WinSparkle-0.8.0.zip -OutFile out.zip
|
||||
Expand-Archive out.zip
|
||||
Move-Item 'out\WinSparkle-0.8.0\Release\*' '.\'
|
||||
|
||||
- name: Build EDMC
|
||||
run: |
|
||||
|
@ -79,7 +79,7 @@ class ClickCounter:
|
||||
"""
|
||||
# You need to cast to `int` here to store *as* an `int`, so that
|
||||
# `config.get_int()` will work for re-loading the value.
|
||||
config.set('click_counter_count', int(self.click_count.get())) # type: ignore
|
||||
config.set('click_counter_count', int(self.click_count.get()))
|
||||
|
||||
def setup_main_ui(self, parent: tk.Frame) -> tk.Frame:
|
||||
"""
|
||||
@ -95,7 +95,7 @@ class ClickCounter:
|
||||
button = tk.Button(
|
||||
frame,
|
||||
text="Count me",
|
||||
command=lambda: self.click_count.set(str(int(self.click_count.get()) + 1)) # type: ignore
|
||||
command=lambda: self.click_count.set(str(int(self.click_count.get()) + 1))
|
||||
)
|
||||
button.grid(row=current_row)
|
||||
current_row += 1
|
||||
|
@ -44,7 +44,7 @@ class This:
|
||||
this = This()
|
||||
|
||||
|
||||
class PluginTest(object):
|
||||
class PluginTest:
|
||||
"""Class that performs actual tests on bundled modules."""
|
||||
|
||||
def __init__(self, directory: str):
|
||||
@ -83,7 +83,6 @@ class PluginTest(object):
|
||||
logger.debug(f'timestamp = "{timestamp}", cmdr = "{cmdrname}", system = "{system}", station = "{station}", event = "{event}"') # noqa: E501
|
||||
self.sqlc.execute('INSERT INTO entries VALUES(?, ?, ?, ?, ?)', (timestamp, cmdrname, system, station, event))
|
||||
self.sqlconn.commit()
|
||||
return None
|
||||
|
||||
|
||||
def plugin_start3(plugin_dir: str) -> str:
|
||||
|
@ -309,7 +309,7 @@ def plugin_prefs(parent: ttk.Notebook, cmdr: str | None, is_beta: bool) -> tk.Fr
|
||||
this.log = tk.IntVar(value=config.get_int('edsm_out') and 1)
|
||||
this.log_button = nb.Checkbutton(
|
||||
frame,
|
||||
text=_('Send flight log and Cmdr status to EDSM'),
|
||||
text=_('Send flight log and CMDR status to EDSM'), # LANG: Send flight log and CMDR Status to EDSM
|
||||
variable=this.log,
|
||||
command=prefsvarchanged
|
||||
)
|
||||
@ -320,7 +320,7 @@ def plugin_prefs(parent: ttk.Notebook, cmdr: str | None, is_beta: bool) -> tk.Fr
|
||||
|
||||
this.label = HyperlinkLabel(
|
||||
frame,
|
||||
text=_('Elite Dangerous Star Map credentials'),
|
||||
text=_('Elite Dangerous Star Map credentials'), # LANG: Elite Dangerous Star Map credentials
|
||||
background=nb.Label().cget('background'),
|
||||
url='https://www.edsm.net/settings/api',
|
||||
underline=True
|
||||
|
@ -18,24 +18,24 @@ flake8-noqa==1.3.2
|
||||
flake8-polyfill==1.0.2
|
||||
flake8-use-fstring==1.4
|
||||
|
||||
mypy==1.6.1
|
||||
mypy==1.7.0
|
||||
pep8-naming==0.13.3
|
||||
safety==2.3.5
|
||||
types-requests==2.31.0.2
|
||||
types-requests==2.31.0.10
|
||||
types-pkg-resources==0.1.3
|
||||
|
||||
# Code formatting tools
|
||||
autopep8==2.0.4
|
||||
|
||||
# Git pre-commit checking
|
||||
pre-commit==3.3.3
|
||||
pre-commit==3.5.0
|
||||
|
||||
# HTML changelogs
|
||||
grip==4.6.1
|
||||
grip==4.6.2
|
||||
|
||||
# Packaging
|
||||
# We only need py2exe on windows.
|
||||
py2exe==0.13.0.0; sys_platform == 'win32'
|
||||
py2exe==0.13.0.1; sys_platform == 'win32'
|
||||
|
||||
# Testing
|
||||
pytest==7.4.3
|
||||
|
@ -1,4 +1,6 @@
|
||||
"""Search all given paths recursively for localised string calls."""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import ast
|
||||
import dataclasses
|
||||
@ -6,9 +8,6 @@ import json
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
# spell-checker: words dedupe deduping deduped
|
||||
|
||||
|
||||
def get_func_name(thing: ast.AST) -> str:
|
||||
@ -16,11 +15,9 @@ def get_func_name(thing: ast.AST) -> str:
|
||||
if isinstance(thing, ast.Name):
|
||||
return thing.id
|
||||
|
||||
elif isinstance(thing, ast.Attribute):
|
||||
if isinstance(thing, ast.Attribute):
|
||||
return get_func_name(thing.value)
|
||||
|
||||
else:
|
||||
return ''
|
||||
return ''
|
||||
|
||||
|
||||
def get_arg(call: ast.Call) -> str:
|
||||
@ -31,10 +28,9 @@ def get_arg(call: ast.Call) -> str:
|
||||
arg = call.args[0]
|
||||
if isinstance(arg, ast.Constant):
|
||||
return arg.value
|
||||
elif isinstance(arg, ast.Name):
|
||||
if isinstance(arg, ast.Name):
|
||||
return f'VARIABLE! CHECK CODE! {arg.id}'
|
||||
else:
|
||||
return f'Unknown! {type(arg)=} {ast.dump(arg)} ||| {ast.unparse(arg)}'
|
||||
return f'Unknown! {type(arg)=} {ast.dump(arg)} ||| {ast.unparse(arg)}'
|
||||
|
||||
|
||||
def find_calls_in_stmt(statement: ast.AST) -> list[ast.Call]:
|
||||
@ -43,9 +39,7 @@ def find_calls_in_stmt(statement: ast.AST) -> list[ast.Call]:
|
||||
for n in ast.iter_child_nodes(statement):
|
||||
out.extend(find_calls_in_stmt(n))
|
||||
if isinstance(statement, ast.Call) and get_func_name(statement.func) == '_':
|
||||
|
||||
out.append(statement)
|
||||
|
||||
return out
|
||||
|
||||
|
||||
@ -62,7 +56,7 @@ COMMENT_SAME_LINE_RE = re.compile(r'^.*?(#.*)$')
|
||||
COMMENT_OWN_LINE_RE = re.compile(r'^\s*?(#.*)$')
|
||||
|
||||
|
||||
def extract_comments(call: ast.Call, lines: list[str], file: pathlib.Path) -> Optional[str]: # noqa: CCR001
|
||||
def extract_comments(call: ast.Call, lines: list[str], file: pathlib.Path) -> str | None: # noqa: CCR001
|
||||
"""
|
||||
Extract comments from source code based on the given call.
|
||||
|
||||
@ -74,16 +68,16 @@ def extract_comments(call: ast.Call, lines: list[str], file: pathlib.Path) -> Op
|
||||
:param file: The path to the file this call node came from
|
||||
:return: The first comment that matches the rules, or None
|
||||
"""
|
||||
out: Optional[str] = None
|
||||
out: str | None = None
|
||||
above = call.lineno - 2
|
||||
current = call.lineno - 1
|
||||
|
||||
above_line = lines[above].strip() if len(lines) >= above else None
|
||||
above_comment: Optional[str] = None
|
||||
above_comment: str | None = None
|
||||
current_line = lines[current].strip()
|
||||
current_comment: Optional[str] = None
|
||||
current_comment: str | None = None
|
||||
|
||||
bad_comment: Optional[str] = None
|
||||
bad_comment: str | None = None
|
||||
if above_line is not None:
|
||||
match = COMMENT_OWN_LINE_RE.match(above_line)
|
||||
if match:
|
||||
@ -108,16 +102,13 @@ def extract_comments(call: ast.Call, lines: list[str], file: pathlib.Path) -> Op
|
||||
|
||||
if current_comment is not None:
|
||||
out = current_comment
|
||||
|
||||
elif above_comment is not None:
|
||||
out = above_comment
|
||||
|
||||
elif bad_comment is not None:
|
||||
print(bad_comment, file=sys.stderr)
|
||||
|
||||
if out is None:
|
||||
print(f'No comment for {file}:{call.lineno} {current_line}', file=sys.stderr)
|
||||
|
||||
return out
|
||||
|
||||
|
||||
@ -146,22 +137,17 @@ def scan_directory(path: pathlib.Path, skip: list[pathlib.Path] | None = None) -
|
||||
:param path: path to scan
|
||||
:param skip: paths to skip, if any, defaults to None
|
||||
"""
|
||||
if skip is None:
|
||||
skip = []
|
||||
out = {}
|
||||
for thing in path.iterdir():
|
||||
if skip is not None and any(s.name == thing.name for s in skip):
|
||||
if any(same_path.name == thing.name for same_path in skip):
|
||||
continue
|
||||
|
||||
if thing.is_file():
|
||||
if not thing.name.endswith('.py'):
|
||||
continue
|
||||
|
||||
if thing.is_file() and thing.suffix == '.py':
|
||||
out[thing] = scan_file(thing)
|
||||
|
||||
elif thing.is_dir():
|
||||
out |= scan_directory(thing)
|
||||
|
||||
else:
|
||||
raise ValueError(type(thing), thing)
|
||||
out.update(scan_directory(thing, skip))
|
||||
|
||||
return out
|
||||
|
||||
@ -174,14 +160,13 @@ def parse_template(path) -> set[str]:
|
||||
|
||||
:param path: The path to the lang file
|
||||
"""
|
||||
lang_re = re.compile(r'\s*"((?:[^"]|(?:\"))+)"\s*=\s*"((?:[^"]|(?:\"))+)"\s*;\s*$')
|
||||
lang_re = re.compile(r'\s*"([^"]+)"\s*=\s*"([^"]+)"\s*;\s*$')
|
||||
out = set()
|
||||
for line in pathlib.Path(path).read_text(encoding='utf-8').splitlines():
|
||||
match = lang_re.match(line)
|
||||
if not match:
|
||||
continue
|
||||
if match.group(1) != '!Language':
|
||||
out.add(match.group(1))
|
||||
with open(path, encoding='utf-8') as file:
|
||||
for line in file:
|
||||
match = lang_re.match(line.strip())
|
||||
if match and match.group(1) != '!Language':
|
||||
out.add(match.group(1))
|
||||
|
||||
return out
|
||||
|
||||
@ -193,8 +178,8 @@ class FileLocation:
|
||||
path: pathlib.Path
|
||||
line_start: int
|
||||
line_start_col: int
|
||||
line_end: Optional[int]
|
||||
line_end_col: Optional[int]
|
||||
line_end: int | None
|
||||
line_end_col: int | None
|
||||
|
||||
@staticmethod
|
||||
def from_call(path: pathlib.Path, c: ast.Call) -> 'FileLocation':
|
||||
@ -213,18 +198,15 @@ class LangEntry:
|
||||
|
||||
locations: list[FileLocation]
|
||||
string: str
|
||||
comments: list[Optional[str]]
|
||||
comments: list[str | None]
|
||||
|
||||
def files(self) -> str:
|
||||
"""Return a string representation of all the files this LangEntry is in, and its location therein."""
|
||||
out = ''
|
||||
for loc in self.locations:
|
||||
start = loc.line_start
|
||||
end = loc.line_end
|
||||
end_str = f':{end}' if end is not None and end != start else ''
|
||||
out += f'{loc.path.name}:{start}{end_str}; '
|
||||
|
||||
return out
|
||||
file_locations = [
|
||||
f"{loc.path.name}:{loc.line_start}:{loc.line_end or ''}"
|
||||
for loc in self.locations
|
||||
]
|
||||
return "; ".join(file_locations)
|
||||
|
||||
|
||||
def dedupe_lang_entries(entries: list[LangEntry]) -> list[LangEntry]:
|
||||
@ -237,21 +219,17 @@ def dedupe_lang_entries(entries: list[LangEntry]) -> list[LangEntry]:
|
||||
:param entries: The list to deduplicate
|
||||
:return: The deduplicated list
|
||||
"""
|
||||
deduped: list[LangEntry] = []
|
||||
deduped: dict[str, LangEntry] = {}
|
||||
for e in entries:
|
||||
cont = False
|
||||
for d in deduped:
|
||||
if d.string == e.string:
|
||||
cont = True
|
||||
d.locations.append(e.locations[0])
|
||||
d.comments.extend(e.comments)
|
||||
|
||||
if cont:
|
||||
continue
|
||||
|
||||
deduped.append(e)
|
||||
|
||||
return deduped
|
||||
existing = deduped.get(e.string)
|
||||
if existing:
|
||||
existing.locations.extend(e.locations)
|
||||
existing.comments.extend(e.comments)
|
||||
else:
|
||||
deduped[e.string] = LangEntry(
|
||||
locations=e.locations[:], string=e.string, comments=e.comments[:]
|
||||
)
|
||||
return list(deduped.values())
|
||||
|
||||
|
||||
def generate_lang_template(data: dict[pathlib.Path, list[ast.Call]]) -> str:
|
||||
@ -269,23 +247,19 @@ def generate_lang_template(data: dict[pathlib.Path, list[ast.Call]]) -> str:
|
||||
print(f'Done Deduping entries {len(entries)=} {len(deduped)=}', file=sys.stderr)
|
||||
for entry in deduped:
|
||||
assert len(entry.comments) == len(entry.locations)
|
||||
comment = ''
|
||||
|
||||
comment_set = set()
|
||||
for comment, loc in zip(entry.comments, entry.locations):
|
||||
if comment:
|
||||
comment_set.add(f'{loc.path.name}: {comment};')
|
||||
|
||||
files = 'In files: ' + entry.files()
|
||||
comment = ' '.join(comment_set).strip()
|
||||
|
||||
header = f'{comment} {files}'.strip()
|
||||
string = f'"{entry.string}"'
|
||||
|
||||
for i in range(len(entry.comments)):
|
||||
if entry.comments[i] is None:
|
||||
continue
|
||||
|
||||
loc = entry.locations[i]
|
||||
to_append = f'{loc.path.name}: {entry.comments[i]}; '
|
||||
if to_append not in comment:
|
||||
comment += to_append
|
||||
|
||||
header = f'{comment.strip()} {files}'.strip()
|
||||
out += f'/* {header} */\n'
|
||||
out += f'{string} = {string};\n'
|
||||
out += '\n'
|
||||
out += f'{string} = {string};\n\n'
|
||||
|
||||
return out
|
||||
|
||||
|
@ -116,9 +116,14 @@ if __name__ == '__main__':
|
||||
if file_name == '-':
|
||||
file = sys.stdin
|
||||
else:
|
||||
file = open(file_name)
|
||||
|
||||
res = json.load(file)
|
||||
file.close()
|
||||
try:
|
||||
with open(file_name) as file:
|
||||
res = json.load(file)
|
||||
except FileNotFoundError:
|
||||
print(f"File '{file_name}' not found.")
|
||||
sys.exit(1)
|
||||
except json.JSONDecodeError:
|
||||
print(f"Error decoding JSON in '{file_name}'.")
|
||||
sys.exit(1)
|
||||
|
||||
show_killswitch_set_info(KillSwitchSet(parse_kill_switches(res)))
|
||||
|
@ -1,11 +1,10 @@
|
||||
#!/usr/bin/env python
|
||||
"""Find the reverse dependencies of a package according to pip."""
|
||||
"""Search for dependencies given a package."""
|
||||
import sys
|
||||
|
||||
import pkg_resources
|
||||
|
||||
|
||||
def find_reverse_deps(package_name: str):
|
||||
def find_reverse_deps(package_name: str) -> list[str]:
|
||||
"""
|
||||
Find the packages that depend on the named one.
|
||||
|
||||
@ -19,4 +18,16 @@ def find_reverse_deps(package_name: str):
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print(find_reverse_deps(sys.argv[1]))
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: python reverse_deps.py <package_name>")
|
||||
sys.exit(1)
|
||||
|
||||
package_name = sys.argv[1]
|
||||
reverse_deps = find_reverse_deps(package_name)
|
||||
|
||||
if reverse_deps:
|
||||
print(f"Reverse dependencies of '{package_name}':")
|
||||
for dep in reverse_deps:
|
||||
print(dep)
|
||||
else:
|
||||
print(f"No reverse dependencies found for '{package_name}'.")
|
||||
|
@ -1,4 +1,10 @@
|
||||
"""Utilities for dealing with text (and byte representations thereof)."""
|
||||
"""
|
||||
text.py - Dealing with Text and Bytes.
|
||||
|
||||
Copyright (c) EDCD, All Rights Reserved
|
||||
Licensed under the GNU General Public License.
|
||||
See LICENSE file.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from gzip import compress
|
||||
|
@ -1,4 +1,10 @@
|
||||
"""Utility functions relating to ships."""
|
||||
"""
|
||||
util_ships.py - Ship Utilities.
|
||||
|
||||
Copyright (c) EDCD, All Rights Reserved
|
||||
Licensed under the GNU General Public License.
|
||||
See LICENSE file.
|
||||
"""
|
||||
from edmc_data import ship_name_map
|
||||
|
||||
|
||||
@ -11,6 +17,6 @@ def ship_file_name(ship_name: str, ship_type: str) -> str:
|
||||
if name.lower() in ('con', 'prn', 'aux', 'nul',
|
||||
'com0', 'com2', 'com3', 'com4', 'com5', 'com6', 'com7', 'com8', 'com9',
|
||||
'lpt0', 'lpt2', 'lpt3', 'lpt4', 'lpt5', 'lpt6', 'lpt7', 'lpt8', 'lpt9'):
|
||||
name = name + '_'
|
||||
name += '_'
|
||||
|
||||
return name.translate({ord(x): u'_' for x in ('\0', '<', '>', ':', '"', '/', '\\', '|', '?', '*')})
|
||||
return name.translate({ord(x): '_' for x in ('\0', '<', '>', ':', '"', '/', '\\', '|', '?', '*')})
|
||||
|
Loading…
x
Reference in New Issue
Block a user