pylint checks

This commit is contained in:
Matthias Mair 2024-08-20 00:26:32 +02:00
parent c197358ce4
commit 8a163237ac
No known key found for this signature in database
GPG Key ID: A593429DDA23B66A
24 changed files with 54 additions and 46 deletions

View File

@ -71,7 +71,7 @@ def check_prohibited_tags(data):
for filename in pathlib.Path(js_i18n_dir).rglob('*.js'):
print(f"Checking file 'translated/{os.path.basename(filename)}':")
with open(filename) as js_file:
with open(filename, encoding='utf-8') as js_file:
data = js_file.readlines()
errors += check_invalid_tag(data)
@ -81,7 +81,7 @@ for filename in pathlib.Path(js_dynamic_dir).rglob('*.js'):
print(f"Checking file 'dynamic/{os.path.basename(filename)}':")
# Check that the 'dynamic' files do not contains any translated strings
with open(filename) as js_file:
with open(filename, encoding='utf-8') as js_file:
data = js_file.readlines()
invalid_tags = ['blocktrans', 'blocktranslate', 'trans', 'translate']

View File

@ -134,7 +134,7 @@ if __name__ == '__main__':
version = None
with open(version_file) as f:
with open(version_file, encoding='utf-8') as f:
text = f.read()
# Extract the InvenTree software version
@ -199,7 +199,7 @@ if __name__ == '__main__':
target_repos = [REPO.lower(), f'ghcr.io/{REPO.lower()}']
# Ref: https://getridbug.com/python/how-to-set-environment-variables-in-github-actions-using-python/
with open(os.getenv('GITHUB_ENV'), 'a') as env_file:
with open(os.getenv('GITHUB_ENV'), 'a', encoding='utf-8') as env_file:
# Construct tag string
tag_list = [[f'{r}:{t}' for t in docker_tags] for r in target_repos]
tags = ','.join(itertools.chain(*tag_list))

View File

@ -10,7 +10,7 @@ tld = os.path.abspath(os.path.join(here, '..'))
config_file = os.path.join(tld, 'mkdocs.yml')
with open(config_file) as f:
with open(config_file, encoding='utf-8') as f:
data = yaml.load(f, yaml.BaseLoader)
assert data['strict'] == 'true'

View File

@ -82,7 +82,7 @@ def fetch_rtd_versions():
print('Discovered the following versions:')
print(versions)
with open(output_filename, 'w') as file:
with open(output_filename, 'w', encoding='utf-8') as file:
json.dump(versions, file, indent=2)
@ -100,7 +100,7 @@ def get_release_data():
# Release information has been cached to file
print("Loading release information from 'releases.json'")
with open(json_file) as f:
with open(json_file, encoding='utf-8') as f:
return json.loads(f.read())
# Download release information via the GitHub API
@ -127,7 +127,7 @@ def get_release_data():
page += 1
# Cache these results to file
with open(json_file, 'w') as f:
with open(json_file, 'w', encoding='utf-8') as f:
print("Saving release information to 'releases.json'")
f.write(json.dumps(releases))

View File

@ -73,7 +73,7 @@ def generate_schema_file(key: str) -> None:
print('Writing schema file to:', output_file)
with open(output_file, 'w') as f:
with open(output_file, 'w', encoding='utf-8') as f:
f.write(output)
@ -119,7 +119,7 @@ def generate_index_file(version: str):
print('Writing index file to:', output_file)
with open(output_file, 'w') as f:
with open(output_file, 'w', encoding='utf-8') as f:
f.write(output)
@ -171,7 +171,7 @@ def parse_api_file(filename: str):
The intent is to make the API schema easier to peruse on the documentation.
"""
with open(filename) as f:
with open(filename, encoding='utf-8') as f:
data = yaml.safe_load(f)
paths = data['paths']
@ -211,7 +211,7 @@ def parse_api_file(filename: str):
output_file = os.path.abspath(output_file)
with open(output_file, 'w') as f:
with open(output_file, 'w', encoding='utf-8') as f:
yaml.dump(output, f)
# Generate a markdown file for the schema

View File

@ -16,7 +16,7 @@ global USER_SETTINGS
here = os.path.dirname(__file__)
settings_file = os.path.join(here, 'inventree_settings.json')
with open(settings_file) as sf:
with open(settings_file, encoding='utf-8') as sf:
settings = json.load(sf)
GLOBAL_SETTINGS = settings['global']
@ -27,7 +27,7 @@ def get_repo_url(raw=False):
"""Return the repository URL for the current project."""
mkdocs_yml = os.path.join(os.path.dirname(__file__), 'mkdocs.yml')
with open(mkdocs_yml) as f:
with open(mkdocs_yml, encoding='utf-8') as f:
mkdocs_config = yaml.safe_load(f)
repo_name = mkdocs_config['repo_name']
@ -47,7 +47,7 @@ def check_link(url) -> bool:
# Keep a local cache file of URLs we have already checked
if os.path.exists(CACHE_FILE):
with open(CACHE_FILE) as f:
with open(CACHE_FILE, encoding='utf-8') as f:
cache = f.read().splitlines()
if url in cache:
@ -59,7 +59,7 @@ def check_link(url) -> bool:
response = requests.head(url, timeout=5000)
if response.status_code == 200:
# Update the cache file
with open(CACHE_FILE, 'a') as f:
with open(CACHE_FILE, 'a', encoding='utf-8') as f:
f.write(f'{url}\n')
return True
@ -177,7 +177,7 @@ def define_env(env):
assert subprocess.call(command, shell=True) == 0
with open(output) as f:
with open(output, encoding='utf-8') as f:
content = f.read()
return content
@ -214,7 +214,7 @@ def define_env(env):
if not os.path.exists(path):
raise FileNotFoundError(f'Required file {path} does not exist.')
with open(path) as f:
with open(path, encoding='utf-8') as f:
content = f.read()
data = f'??? abstract "{title}"\n\n'

View File

@ -20,13 +20,18 @@ src = ["src/backend/InvenTree"]
"__init__.py" = ["D104"]
[tool.ruff.lint]
select = ["A", "B", "C", "C4", "D", "F", "I", "N", "SIM", "PIE", "RUF", "UP", "W"]
select = ["A", "B", "C", "C4", "D", "F", "I", "N", "SIM", "PIE", "PLE", "PLW", "RUF", "UP", "W"]
# Things that should be enabled in the future:
# - LOG
# - DJ # for Django stuff
# - S # for security stuff (bandit)
ignore = [
"PLE1205",
# - PLE1205 - Too many arguments for logging format string
"PLW2901",
# - PLW2901 - Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
"PLW0602","PLW0603","PLW0604", # global variable things
"RUF015",
# - RUF015 - Prefer next({iterable}) over single element slice
"RUF012",

View File

@ -131,7 +131,7 @@ def load_config_data(set_cache: bool = False) -> map:
cfg_file = get_config_file()
with open(cfg_file) as cfg:
with open(cfg_file, encoding='utf-8') as cfg:
data = yaml.safe_load(cfg)
# Set the cache if requested

View File

@ -47,7 +47,7 @@ class Command(BaseCommand):
filename = kwargs.get('filename', 'inventree_settings.json')
with open(filename, 'w') as f:
with open(filename, 'w', encoding='utf-8') as f:
json.dump(settings, f, indent=4)
print(f"Exported InvenTree settings definitions to '{filename}'")

View File

@ -103,14 +103,14 @@ class Command(BaseCommand):
})
self.stdout.write(f'Writing icon map for {len(icons.keys())} icons')
with open(kwargs['output_file'], 'w') as f:
with open(kwargs['output_file'], 'w', encoding='utf-8') as f:
json.dump(icons, f, indent=2)
self.stdout.write(f'Icon map written to {kwargs["output_file"]}')
# Import icon map file
if kwargs['input_file']:
with open(kwargs['input_file']) as f:
with open(kwargs['input_file'], encoding='utf-8') as f:
icons = json.load(f)
self.stdout.write(f'Loaded icon map for {len(icons.keys())} icons')

View File

@ -19,7 +19,9 @@ def render_file(file_name, source, target, locales, ctx):
target_file = os.path.join(target, locale + '.' + file_name)
with open(target_file, 'w') as localised_file, lang_over(locale):
with open(target_file, 'w', encoding='utf-8') as localised_file, lang_over(
locale
):
rendered = render_to_string(os.path.join(source, file_name), ctx)
localised_file.write(rendered)

View File

@ -70,7 +70,7 @@ class URLTest(TestCase):
pattern = '{% url [\'"]([^\'"]+)[\'"]([^%]*)%}'
with open(input_file) as f:
with open(input_file, encoding='utf-8') as f:
data = f.read()
results = re.findall(pattern, data)

View File

@ -15,7 +15,7 @@ def reload_translation_stats():
STATS_FILE = settings.BASE_DIR.joinpath('InvenTree/locale_stats.json').absolute()
try:
with open(STATS_FILE) as f:
with open(STATS_FILE, encoding='utf-8') as f:
_translation_stats = json.load(f)
except Exception:
_translation_stats = None

View File

@ -55,7 +55,7 @@ def get_icon_packs():
tabler_icons_path = Path(__file__).parent.parent.joinpath(
'InvenTree/static/tabler-icons/icons.json'
)
with open(tabler_icons_path) as tabler_icons_file:
with open(tabler_icons_path, encoding='utf-8') as tabler_icons_file:
tabler_icons = json.load(tabler_icons_file)
icon_packs = [

View File

@ -2060,7 +2060,7 @@ class InvenTreeSetting(BaseInvenTreeSetting):
'description': _(
'Check that all plugins are installed on startup - enable in container environments'
),
'default': str(os.getenv('INVENTREE_DOCKER', False)).lower()
'default': str(os.getenv('INVENTREE_DOCKER', 'False')).lower()
in ['1', 'true'],
'validator': bool,
'requires_restart': True,

View File

@ -5,4 +5,4 @@ from django import template
register = template.Library()
from generic.states.tags import status_label
__all__ = [status_label]
__all__ = ['status_label']

View File

@ -19,7 +19,7 @@ class ImporterTest(InvenTreeTestCase):
fn = os.path.join(os.path.dirname(__file__), 'test_data', 'companies.csv')
with open(fn) as input_file:
with open(fn, encoding='utf-8') as input_file:
data = input_file.read()
session = DataImportSession.objects.create(

View File

@ -1618,7 +1618,7 @@ class PartDetailTests(PartAPITestBase):
# Try to upload a non-image file
test_path = BASE_DIR / '_testfolder' / 'dummy_image'
with open(f'{test_path}.txt', 'w') as dummy_image:
with open(f'{test_path}.txt', 'w', encoding='utf-8') as dummy_image:
dummy_image.write('hello world')
with open(f'{test_path}.txt', 'rb') as dummy_image:

View File

@ -49,7 +49,7 @@ class BomExportTest(InvenTreeTestCase):
with open(filename, 'wb') as f:
f.write(response.getvalue())
with open(filename) as f:
with open(filename, encoding='utf-8') as f:
reader = csv.reader(f, delimiter=',')
for line in reader:
@ -96,7 +96,7 @@ class BomExportTest(InvenTreeTestCase):
f.write(response.getvalue())
# Read the file
with open(filename) as f:
with open(filename, encoding='utf-8') as f:
reader = csv.reader(f, delimiter=',')
for line in reader:

View File

@ -7,7 +7,7 @@ import sys
def calculate_coverage(filename):
"""Calculate translation coverage for a .po file."""
with open(filename) as f:
with open(filename, encoding='utf-8') as f:
lines = f.readlines()
lines_count = 0
@ -72,7 +72,7 @@ if __name__ == '__main__':
print('-' * 16)
# write locale stats
with open(STAT_FILE, 'w') as target:
with open(STAT_FILE, 'w', encoding='utf-8') as target:
json.dump(locales_perc, target)
avg = int(sum(percentages) / len(percentages)) if len(percentages) > 0 else 0

View File

@ -45,7 +45,8 @@ if __name__ == '__main__':
print('Generating icon list...')
with open(
os.path.join(TMP_FOLDER, 'node_modules', '@tabler', 'icons', 'icons.json')
os.path.join(TMP_FOLDER, 'node_modules', '@tabler', 'icons', 'icons.json'),
encoding='utf-8',
) as f:
icons = json.load(f)
@ -60,7 +61,7 @@ if __name__ == '__main__':
},
}
with open(os.path.join(STATIC_FOLDER, 'icons.json'), 'w') as f:
with open(os.path.join(STATIC_FOLDER, 'icons.json'), 'w', encoding='utf-8') as f:
json.dump(res, f, separators=(',', ':'))
print('Cleaning up...')

View File

@ -41,7 +41,7 @@ from InvenTree.fields import InvenTreeModelMoneyField, InvenTreeURLField
from order.status_codes import SalesOrderStatusGroups
from part import models as PartModels
from plugin.events import trigger_event
from stock import models as StockModels
from stock import models as StockModels # noqa: PLW0406
from stock.generators import generate_batch_code
from stock.status_codes import StockHistoryCode, StockStatus, StockStatusGroups
from users.models import Owner

View File

@ -38,7 +38,7 @@ class TemplateTagTest(InvenTreeTestCase):
manifest_file = Path(__file__).parent.joinpath('static/web/.vite/manifest.json')
# Try with removed manifest file
manifest_file.rename(manifest_file.with_suffix('.json.bak')) # Rename
resp = resp = spa_helper.spa_bundle()
resp = spa_helper.spa_bundle()
self.assertIsNone(resp)
manifest_file.with_suffix('.json.bak').rename(
manifest_file.with_suffix('.json')

View File

@ -618,7 +618,7 @@ def export_records(
print('Running data post-processing step...')
# Post-process the file, to remove any "permissions" specified for a user or group
with open(tmpfile) as f_in:
with open(tmpfile, encoding='utf-8') as f_in:
data = json.loads(f_in.read())
data_out = []
@ -641,7 +641,7 @@ def export_records(
data_out.append(entry)
# Write the processed data to file
with open(target, 'w') as f_out:
with open(target, 'w', encoding='utf-8') as f_out:
f_out.write(json.dumps(data_out, indent=2))
print('Data export completed')
@ -684,7 +684,7 @@ def import_records(
# Pre-process the data, to remove any "permissions" specified for a user or group
datafile = f'{target}.data.json'
with open(target) as f_in:
with open(target, encoding='utf-8') as f_in:
try:
data = json.loads(f_in.read())
except json.JSONDecodeError as exc:
@ -714,11 +714,11 @@ def import_records(
print(entry)
# Write the auth file data
with open(authfile, 'w') as f_out:
with open(authfile, 'w', encoding='utf-8') as f_out:
f_out.write(json.dumps(auth_data, indent=2))
# Write the processed data to the tmp file
with open(datafile, 'w') as f_out:
with open(datafile, 'w', encoding='utf-8') as f_out:
f_out.write(json.dumps(load_data, indent=2))
excludes = content_excludes(allow_auth=False)
@ -888,8 +888,8 @@ def test_translations(c):
last_string = ''
# loop through input file lines
with open(file_path) as file_org:
with open(new_file_path, 'w') as file_new:
with open(file_path, encoding='utf-8') as file_org:
with open(new_file_path, 'w', encoding='utf-8') as file_new:
for line in file_org:
if line.startswith('msgstr "'):
# write output -> replace regex matches with x in the read in (multi)string