mirror of
https://github.com/inventree/InvenTree
synced 2024-08-30 18:33:04 +00:00
Add more checks
This commit is contained in:
parent
c26d73036d
commit
3b9d485403
4
.github/scripts/check_js_templates.py
vendored
4
.github/scripts/check_js_templates.py
vendored
@ -71,7 +71,7 @@ def check_prohibited_tags(data):
|
||||
for filename in pathlib.Path(js_i18n_dir).rglob('*.js'):
|
||||
print(f"Checking file 'translated/{os.path.basename(filename)}':")
|
||||
|
||||
with open(filename, 'r') as js_file:
|
||||
with open(filename) as js_file:
|
||||
data = js_file.readlines()
|
||||
|
||||
errors += check_invalid_tag(data)
|
||||
@ -81,7 +81,7 @@ for filename in pathlib.Path(js_dynamic_dir).rglob('*.js'):
|
||||
print(f"Checking file 'dynamic/{os.path.basename(filename)}':")
|
||||
|
||||
# Check that the 'dynamic' files do not contains any translated strings
|
||||
with open(filename, 'r') as js_file:
|
||||
with open(filename) as js_file:
|
||||
data = js_file.readlines()
|
||||
|
||||
invalid_tags = ['blocktrans', 'blocktranslate', 'trans', 'translate']
|
||||
|
4
.github/scripts/check_migration_files.py
vendored
4
.github/scripts/check_migration_files.py
vendored
@ -20,9 +20,9 @@ for line in str(out.decode()).split('\n'):
|
||||
if len(migrations) == 0:
|
||||
sys.exit(0)
|
||||
|
||||
print('There are {n} unstaged migration files:'.format(n=len(migrations)))
|
||||
print(f'There are {len(migrations)} unstaged migration files:')
|
||||
|
||||
for m in migrations:
|
||||
print(' - {m}'.format(m=m))
|
||||
print(f' - {m}')
|
||||
|
||||
sys.exit(len(migrations))
|
||||
|
4
.github/scripts/version_check.py
vendored
4
.github/scripts/version_check.py
vendored
@ -89,7 +89,7 @@ def check_version_number(version_string, allow_duplicate=False):
|
||||
|
||||
if release > version_tuple:
|
||||
highest_release = False
|
||||
print(f'Found newer release: {str(release)}')
|
||||
print(f'Found newer release: {release!s}')
|
||||
|
||||
return highest_release
|
||||
|
||||
@ -134,7 +134,7 @@ if __name__ == '__main__':
|
||||
|
||||
version = None
|
||||
|
||||
with open(version_file, 'r') as f:
|
||||
with open(version_file) as f:
|
||||
text = f.read()
|
||||
|
||||
# Extract the InvenTree software version
|
||||
|
@ -10,7 +10,7 @@ tld = os.path.abspath(os.path.join(here, '..'))
|
||||
|
||||
config_file = os.path.join(tld, 'mkdocs.yml')
|
||||
|
||||
with open(config_file, 'r') as f:
|
||||
with open(config_file) as f:
|
||||
data = yaml.load(f, yaml.BaseLoader)
|
||||
|
||||
assert data['strict'] == 'true'
|
||||
|
@ -57,7 +57,7 @@ def fetch_rtd_versions():
|
||||
versions = sorted(versions, key=lambda x: StrictVersion(x['version']), reverse=True)
|
||||
|
||||
# Add "latest" version first
|
||||
if not any((x['title'] == 'latest' for x in versions)):
|
||||
if not any(x['title'] == 'latest' for x in versions):
|
||||
versions.insert(
|
||||
0,
|
||||
{
|
||||
@ -70,7 +70,7 @@ def fetch_rtd_versions():
|
||||
# Ensure we have the 'latest' version
|
||||
current_version = os.environ.get('READTHEDOCS_VERSION', None)
|
||||
|
||||
if current_version and not any((x['title'] == current_version for x in versions)):
|
||||
if current_version and not any(x['title'] == current_version for x in versions):
|
||||
versions.append({
|
||||
'version': current_version,
|
||||
'title': current_version,
|
||||
|
@ -46,7 +46,7 @@ def top_level_path(path: str) -> str:
|
||||
|
||||
key = path.split('/')[1]
|
||||
|
||||
if key in SPECIAL_PATHS.keys():
|
||||
if key in SPECIAL_PATHS:
|
||||
return key
|
||||
|
||||
return GENERAL_PATH
|
||||
@ -173,7 +173,7 @@ def parse_api_file(filename: str):
|
||||
|
||||
The intent is to make the API schema easier to peruse on the documentation.
|
||||
"""
|
||||
with open(filename, 'r') as f:
|
||||
with open(filename) as f:
|
||||
data = yaml.safe_load(f)
|
||||
|
||||
paths = data['paths']
|
||||
|
14
docs/main.py
14
docs/main.py
@ -16,7 +16,7 @@ global USER_SETTINGS
|
||||
here = os.path.dirname(__file__)
|
||||
settings_file = os.path.join(here, 'inventree_settings.json')
|
||||
|
||||
with open(settings_file, 'r') as sf:
|
||||
with open(settings_file) as sf:
|
||||
settings = json.load(sf)
|
||||
|
||||
GLOBAL_SETTINGS = settings['global']
|
||||
@ -27,7 +27,7 @@ def get_repo_url(raw=False):
|
||||
"""Return the repository URL for the current project."""
|
||||
mkdocs_yml = os.path.join(os.path.dirname(__file__), 'mkdocs.yml')
|
||||
|
||||
with open(mkdocs_yml, 'r') as f:
|
||||
with open(mkdocs_yml) as f:
|
||||
mkdocs_config = yaml.safe_load(f)
|
||||
repo_name = mkdocs_config['repo_name']
|
||||
|
||||
@ -47,7 +47,7 @@ def check_link(url) -> bool:
|
||||
|
||||
# Keep a local cache file of URLs we have already checked
|
||||
if os.path.exists(CACHE_FILE):
|
||||
with open(CACHE_FILE, 'r') as f:
|
||||
with open(CACHE_FILE) as f:
|
||||
cache = f.read().splitlines()
|
||||
|
||||
if url in cache:
|
||||
@ -177,7 +177,7 @@ def define_env(env):
|
||||
|
||||
assert subprocess.call(command, shell=True) == 0
|
||||
|
||||
with open(output, 'r') as f:
|
||||
with open(output) as f:
|
||||
content = f.read()
|
||||
|
||||
return content
|
||||
@ -214,7 +214,7 @@ def define_env(env):
|
||||
if not os.path.exists(path):
|
||||
raise FileNotFoundError(f'Required file {path} does not exist.')
|
||||
|
||||
with open(path, 'r') as f:
|
||||
with open(path) as f:
|
||||
content = f.read()
|
||||
|
||||
data = f'??? abstract "{title}"\n\n'
|
||||
@ -240,8 +240,8 @@ def define_env(env):
|
||||
"""Render a provided setting object into a table row."""
|
||||
name = setting['name']
|
||||
description = setting['description']
|
||||
default = setting.get('default', None)
|
||||
units = setting.get('units', None)
|
||||
default = setting.get('default')
|
||||
units = setting.get('units')
|
||||
|
||||
return f'| {name} | {description} | {default if default is not None else ""} | {units if units is not None else ""} |'
|
||||
|
||||
|
@ -20,13 +20,15 @@ src = ["src/backend/InvenTree"]
|
||||
"__init__.py" = ["D104"]
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["A", "B", "C4", "D", "I", "N", "F"]
|
||||
select = ["A", "B", "C", "C4", "D", "F", "I", "N", "PIE", "UP", "W"]
|
||||
# Things that should be enabled in the future:
|
||||
# - LOG
|
||||
# - DJ # for Django stuff
|
||||
# - S # for security stuff (bandit)
|
||||
|
||||
ignore = [
|
||||
"C901",
|
||||
# - C901 - function is too complex
|
||||
"N999",
|
||||
# - N802 - function name should be lowercase
|
||||
"N802",
|
||||
|
@ -321,7 +321,6 @@ class BulkDeleteMixin:
|
||||
Raises:
|
||||
ValidationError: If the deletion should not proceed
|
||||
"""
|
||||
pass
|
||||
|
||||
def filter_delete_queryset(self, queryset, request):
|
||||
"""Provide custom filtering for the queryset *before* it is deleted.
|
||||
@ -398,8 +397,6 @@ class BulkDeleteMixin:
|
||||
class ListCreateDestroyAPIView(BulkDeleteMixin, ListCreateAPI):
|
||||
"""Custom API endpoint which provides BulkDelete functionality in addition to List and Create."""
|
||||
|
||||
...
|
||||
|
||||
|
||||
class APISearchViewSerializer(serializers.Serializer):
|
||||
"""Serializer for the APISearchView."""
|
||||
|
@ -125,7 +125,7 @@ class InvenTreeConfig(AppConfig):
|
||||
for task in tasks:
|
||||
ref_name = f'{task.func.__module__}.{task.func.__name__}'
|
||||
|
||||
if ref_name in existing_tasks.keys():
|
||||
if ref_name in existing_tasks:
|
||||
# This task already exists - update the details if required
|
||||
existing_task = existing_tasks[ref_name]
|
||||
|
||||
|
@ -131,7 +131,7 @@ def load_config_data(set_cache: bool = False) -> map:
|
||||
|
||||
cfg_file = get_config_file()
|
||||
|
||||
with open(cfg_file, 'r') as cfg:
|
||||
with open(cfg_file) as cfg:
|
||||
data = yaml.safe_load(cfg)
|
||||
|
||||
# Set the cache if requested
|
||||
|
@ -1,5 +1,3 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Provides extra global data to all templates."""
|
||||
|
||||
import InvenTree.email
|
||||
|
@ -95,7 +95,7 @@ def from_engineering_notation(value):
|
||||
"""
|
||||
value = str(value).strip()
|
||||
|
||||
pattern = '(\d+)([a-zA-Z]+)(\d+)(.*)'
|
||||
pattern = r'(\d+)([a-zA-Z]+)(\d+)(.*)'
|
||||
|
||||
if match := re.match(pattern, value):
|
||||
left, prefix, right, suffix = match.groups()
|
||||
|
@ -1,7 +1,6 @@
|
||||
"""Custom exception handling for the DRF API."""
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
@ -107,7 +107,7 @@ def construct_format_regex(fmt_string: str) -> str:
|
||||
if name:
|
||||
# Check if integer values are required
|
||||
if _fmt.endswith('d'):
|
||||
c = '\d'
|
||||
c = r'\d'
|
||||
else:
|
||||
c = '.'
|
||||
|
||||
@ -199,7 +199,7 @@ def format_money(
|
||||
Raises:
|
||||
ValueError: format string is incorrectly specified
|
||||
"""
|
||||
language = None and translation.get_language() or settings.LANGUAGE_CODE
|
||||
language = (None and translation.get_language()) or settings.LANGUAGE_CODE
|
||||
locale = Locale.parse(translation.to_locale(language))
|
||||
if format:
|
||||
pattern = parse_pattern(format)
|
||||
|
@ -266,9 +266,8 @@ class RegistratonMixin:
|
||||
raise forms.ValidationError(
|
||||
_('The provided primary email address is not valid.')
|
||||
)
|
||||
else:
|
||||
if split_email[1] == option[1:]:
|
||||
return super().clean_email(email)
|
||||
elif split_email[1] == option[1:]:
|
||||
return super().clean_email(email)
|
||||
|
||||
logger.info('The provided email domain for %s is not approved', email)
|
||||
raise forms.ValidationError(_('The provided email domain is not approved.'))
|
||||
|
@ -12,12 +12,12 @@ from pathlib import Path
|
||||
from typing import TypeVar, Union
|
||||
from wsgiref.util import FileWrapper
|
||||
|
||||
import django.utils.timezone as timezone
|
||||
from django.conf import settings
|
||||
from django.contrib.staticfiles.storage import StaticFilesStorage
|
||||
from django.core.exceptions import FieldError, ValidationError
|
||||
from django.core.files.storage import Storage, default_storage
|
||||
from django.http import StreamingHttpResponse
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
import pytz
|
||||
@ -800,10 +800,10 @@ def remove_non_printable_characters(
|
||||
if remove_unicode:
|
||||
# Remove Unicode control characters
|
||||
if remove_newline:
|
||||
cleaned = regex.sub('[^\P{C}]+', '', cleaned)
|
||||
cleaned = regex.sub(r'[^\P{C}]+', '', cleaned)
|
||||
else:
|
||||
# Use 'negative-lookahead' to exclude newline character
|
||||
cleaned = regex.sub('(?![\x0a])[^\P{C}]+', '', cleaned)
|
||||
cleaned = regex.sub('(?![\x0a])[^\\P{C}]+', '', cleaned)
|
||||
|
||||
return cleaned
|
||||
|
||||
|
@ -130,7 +130,7 @@ def download_image_from_url(remote_url, timeout=2.5):
|
||||
# Throw an error if anything goes wrong
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.ConnectionError as exc:
|
||||
raise Exception(_('Connection error') + f': {str(exc)}')
|
||||
raise Exception(_('Connection error') + f': {exc!s}')
|
||||
except requests.exceptions.Timeout as exc:
|
||||
raise exc
|
||||
except requests.exceptions.HTTPError:
|
||||
@ -138,7 +138,7 @@ def download_image_from_url(remote_url, timeout=2.5):
|
||||
_('Server responded with invalid status code') + f': {response.status_code}'
|
||||
)
|
||||
except Exception as exc:
|
||||
raise Exception(_('Exception occurred') + f': {str(exc)}')
|
||||
raise Exception(_('Exception occurred') + f': {exc!s}')
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(
|
||||
|
@ -110,7 +110,7 @@ class Command(BaseCommand):
|
||||
|
||||
# Import icon map file
|
||||
if kwargs['input_file']:
|
||||
with open(kwargs['input_file'], 'r') as f:
|
||||
with open(kwargs['input_file']) as f:
|
||||
icons = json.load(f)
|
||||
|
||||
self.stdout.write(f'Loaded icon map for {len(icons.keys())} icons')
|
||||
|
@ -35,7 +35,7 @@ class Command(BaseCommand):
|
||||
img_paths.append(x.path)
|
||||
|
||||
if len(img_paths) > 0:
|
||||
if all((os.path.exists(path) for path in img_paths)):
|
||||
if all(os.path.exists(path) for path in img_paths):
|
||||
# All images exist - skip further work
|
||||
return
|
||||
|
||||
|
@ -35,4 +35,4 @@ class Command(BaseCommand):
|
||||
mfa_user[0].staticdevice_set.all().delete()
|
||||
# TOTP tokens
|
||||
mfa_user[0].totpdevice_set.all().delete()
|
||||
print(f'Removed all MFA methods for user {str(mfa_user[0])}')
|
||||
print(f'Removed all MFA methods for user {mfa_user[0]!s}')
|
||||
|
@ -36,7 +36,7 @@ def get_token_from_request(request):
|
||||
return None
|
||||
|
||||
|
||||
class AuthRequiredMiddleware(object):
|
||||
class AuthRequiredMiddleware:
|
||||
"""Check for user to be authenticated."""
|
||||
|
||||
def __init__(self, get_response):
|
||||
@ -92,23 +92,18 @@ class AuthRequiredMiddleware(object):
|
||||
|
||||
# Allow static files to be accessed without auth
|
||||
# Important for e.g. login page
|
||||
if request.path_info.startswith('/static/'):
|
||||
authorized = True
|
||||
|
||||
# Unauthorized users can access the login page
|
||||
elif request.path_info.startswith('/accounts/'):
|
||||
authorized = True
|
||||
|
||||
elif (
|
||||
request.path_info.startswith(f'/{settings.FRONTEND_URL_BASE}/')
|
||||
or request.path_info.startswith('/assets/')
|
||||
or request.path_info == f'/{settings.FRONTEND_URL_BASE}'
|
||||
if (
|
||||
request.path_info.startswith('/static/')
|
||||
or request.path_info.startswith('/accounts/')
|
||||
or (
|
||||
request.path_info.startswith(f'/{settings.FRONTEND_URL_BASE}/')
|
||||
or request.path_info.startswith('/assets/')
|
||||
or request.path_info == f'/{settings.FRONTEND_URL_BASE}'
|
||||
)
|
||||
or self.check_token(request)
|
||||
):
|
||||
authorized = True
|
||||
|
||||
elif self.check_token(request):
|
||||
authorized = True
|
||||
|
||||
# No authorization was found for the request
|
||||
if not authorized:
|
||||
path = request.path_info
|
||||
|
@ -128,14 +128,10 @@ class CreateAPI(CleanMixin, generics.CreateAPIView):
|
||||
class RetrieveAPI(generics.RetrieveAPIView):
|
||||
"""View for retrieve API."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class RetrieveUpdateAPI(CleanMixin, generics.RetrieveUpdateAPIView):
|
||||
"""View for retrieve and update API."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class CustomDestroyModelMixin:
|
||||
"""This mixin was created pass the kwargs from the API to the models."""
|
||||
|
@ -1099,4 +1099,4 @@ def after_error_logged(sender, instance: Error, created: bool, **kwargs):
|
||||
|
||||
except Exception as exc:
|
||||
"""We do not want to throw an exception while reporting an exception"""
|
||||
logger.error(exc) # noqa: LOG005
|
||||
logger.error(exc)
|
||||
|
@ -105,8 +105,7 @@ class IsStaffOrReadOnly(permissions.IsAdminUser):
|
||||
def has_permission(self, request, view):
|
||||
"""Check if the user is a superuser."""
|
||||
return bool(
|
||||
request.user
|
||||
and request.user.is_staff
|
||||
(request.user and request.user.is_staff)
|
||||
or request.method in permissions.SAFE_METHODS
|
||||
)
|
||||
|
||||
|
@ -11,43 +11,37 @@ def isInTestMode():
|
||||
|
||||
def isImportingData():
|
||||
"""Returns True if the database is currently importing (or exporting) data, e.g. 'loaddata' command is performed."""
|
||||
return any((x in sys.argv for x in ['flush', 'loaddata', 'dumpdata']))
|
||||
return any(x in sys.argv for x in ['flush', 'loaddata', 'dumpdata'])
|
||||
|
||||
|
||||
def isRunningMigrations():
|
||||
"""Return True if the database is currently running migrations."""
|
||||
return any(
|
||||
(
|
||||
x in sys.argv
|
||||
for x in ['migrate', 'makemigrations', 'showmigrations', 'runmigrations']
|
||||
)
|
||||
x in sys.argv
|
||||
for x in ['migrate', 'makemigrations', 'showmigrations', 'runmigrations']
|
||||
)
|
||||
|
||||
|
||||
def isRebuildingData():
|
||||
"""Return true if any of the rebuilding commands are being executed."""
|
||||
return any(
|
||||
(
|
||||
x in sys.argv
|
||||
for x in ['prerender', 'rebuild_models', 'rebuild_thumbnails', 'rebuild']
|
||||
)
|
||||
x in sys.argv
|
||||
for x in ['prerender', 'rebuild_models', 'rebuild_thumbnails', 'rebuild']
|
||||
)
|
||||
|
||||
|
||||
def isRunningBackup():
|
||||
"""Return true if any of the backup commands are being executed."""
|
||||
return any(
|
||||
(
|
||||
x in sys.argv
|
||||
for x in [
|
||||
'backup',
|
||||
'restore',
|
||||
'dbbackup',
|
||||
'dbresotore',
|
||||
'mediabackup',
|
||||
'mediarestore',
|
||||
]
|
||||
)
|
||||
x in sys.argv
|
||||
for x in [
|
||||
'backup',
|
||||
'restore',
|
||||
'dbbackup',
|
||||
'dbresotore',
|
||||
'mediabackup',
|
||||
'mediarestore',
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
|
@ -390,8 +390,6 @@ class InvenTreeTaggitSerializer(TaggitSerializer):
|
||||
class InvenTreeTagModelSerializer(InvenTreeTaggitSerializer, InvenTreeModelSerializer):
|
||||
"""Combination of InvenTreeTaggitSerializer and InvenTreeModelSerializer."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class UserSerializer(InvenTreeModelSerializer):
|
||||
"""Serializer for a User."""
|
||||
@ -704,7 +702,6 @@ class DataFileUploadSerializer(serializers.Serializer):
|
||||
|
||||
def save(self):
|
||||
"""Empty overwrite for save."""
|
||||
...
|
||||
|
||||
|
||||
class DataFileExtractSerializer(serializers.Serializer):
|
||||
@ -824,7 +821,6 @@ class DataFileExtractSerializer(serializers.Serializer):
|
||||
|
||||
def save(self):
|
||||
"""No "save" action for this serializer."""
|
||||
pass
|
||||
|
||||
|
||||
class NotesFieldMixin:
|
||||
|
@ -281,7 +281,7 @@ QUERYCOUNT = {
|
||||
'MIN_TIME_TO_LOG': 0.1,
|
||||
'MIN_QUERY_COUNT_TO_LOG': 25,
|
||||
},
|
||||
'IGNORE_REQUEST_PATTERNS': ['^(?!\/(api)?(plugin)?\/).*'],
|
||||
'IGNORE_REQUEST_PATTERNS': [r'^(?!\/(api)?(plugin)?\/).*'],
|
||||
'IGNORE_SQL_PATTERNS': [],
|
||||
'DISPLAY_DUPLICATES': 1,
|
||||
'RESPONSE_HEADER': 'X-Django-Query-Count',
|
||||
@ -298,7 +298,7 @@ if (
|
||||
and INVENTREE_ADMIN_ENABLED
|
||||
and not TESTING
|
||||
and get_boolean_setting('INVENTREE_DEBUG_SHELL', 'debug_shell', False)
|
||||
): # noqa
|
||||
):
|
||||
try:
|
||||
import django_admin_shell # noqa: F401
|
||||
|
||||
@ -1065,7 +1065,7 @@ COOKIE_MODE = (
|
||||
|
||||
valid_cookie_modes = {'lax': 'Lax', 'strict': 'Strict', 'none': None, 'null': None}
|
||||
|
||||
if COOKIE_MODE not in valid_cookie_modes.keys():
|
||||
if COOKIE_MODE not in valid_cookie_modes:
|
||||
logger.error('Invalid cookie samesite mode: %s', COOKIE_MODE)
|
||||
sys.exit(-1)
|
||||
|
||||
|
@ -94,20 +94,19 @@ for name, provider in providers.registry.provider_map.items():
|
||||
urls = []
|
||||
if len(adapters) == 1:
|
||||
urls = handle_oauth2(adapter=adapters[0])
|
||||
elif provider.id in legacy:
|
||||
logger.warning(
|
||||
'`%s` is not supported on platform UI. Use `%s` instead.',
|
||||
provider.id,
|
||||
legacy[provider.id],
|
||||
)
|
||||
continue
|
||||
else:
|
||||
if provider.id in legacy:
|
||||
logger.warning(
|
||||
'`%s` is not supported on platform UI. Use `%s` instead.',
|
||||
provider.id,
|
||||
legacy[provider.id],
|
||||
)
|
||||
continue
|
||||
else:
|
||||
logger.error(
|
||||
'Found handler that is not yet ready for platform UI: `%s`. Open an feature request on GitHub if you need it implemented.',
|
||||
provider.id,
|
||||
)
|
||||
continue
|
||||
logger.error(
|
||||
'Found handler that is not yet ready for platform UI: `%s`. Open an feature request on GitHub if you need it implemented.',
|
||||
provider.id,
|
||||
)
|
||||
continue
|
||||
provider_urlpatterns += [path(f'{provider.id}/', include(urls))]
|
||||
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""Provides system status functionality checks."""
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
|
@ -206,7 +206,7 @@ def offload_task(
|
||||
raise_warning(f"WARNING: '{taskname}' not offloaded - Function not found")
|
||||
return False
|
||||
except Exception as exc:
|
||||
raise_warning(f"WARNING: '{taskname}' not offloaded due to {str(exc)}")
|
||||
raise_warning(f"WARNING: '{taskname}' not offloaded due to {exc!s}")
|
||||
log_error('InvenTree.offload_task')
|
||||
return False
|
||||
else:
|
||||
@ -256,7 +256,7 @@ def offload_task(
|
||||
_func(*args, **kwargs)
|
||||
except Exception as exc:
|
||||
log_error('InvenTree.offload_task')
|
||||
raise_warning(f"WARNING: '{taskname}' failed due to {str(exc)}")
|
||||
raise_warning(f"WARNING: '{taskname}' failed due to {exc!s}")
|
||||
raise exc
|
||||
|
||||
# Finally, task either completed successfully or was offloaded
|
||||
|
@ -80,7 +80,7 @@ def do_translate(parser, token):
|
||||
"""
|
||||
bits = token.split_contents()
|
||||
if len(bits) < 2:
|
||||
raise TemplateSyntaxError("'%s' takes at least one argument" % bits[0])
|
||||
raise TemplateSyntaxError(f"'{bits[0]}' takes at least one argument")
|
||||
message_string = parser.compile_filter(bits[1])
|
||||
remaining = bits[2:]
|
||||
|
||||
@ -95,7 +95,7 @@ def do_translate(parser, token):
|
||||
option = remaining.pop(0)
|
||||
if option in seen:
|
||||
raise TemplateSyntaxError(
|
||||
"The '%s' option was specified more than once." % option
|
||||
f"The '{option}' option was specified more than once."
|
||||
)
|
||||
elif option == 'noop':
|
||||
noop = True
|
||||
@ -104,13 +104,12 @@ def do_translate(parser, token):
|
||||
value = remaining.pop(0)
|
||||
except IndexError:
|
||||
raise TemplateSyntaxError(
|
||||
"No argument provided to the '%s' tag for the context option."
|
||||
% bits[0]
|
||||
f"No argument provided to the '{bits[0]}' tag for the context option."
|
||||
)
|
||||
if value in invalid_context:
|
||||
raise TemplateSyntaxError(
|
||||
"Invalid argument '%s' provided to the '%s' tag for the context "
|
||||
'option' % (value, bits[0])
|
||||
f"Invalid argument '{value}' provided to the '{bits[0]}' tag for the context "
|
||||
'option'
|
||||
)
|
||||
message_context = parser.compile_filter(value)
|
||||
elif option == 'as':
|
||||
@ -118,16 +117,15 @@ def do_translate(parser, token):
|
||||
value = remaining.pop(0)
|
||||
except IndexError:
|
||||
raise TemplateSyntaxError(
|
||||
"No argument provided to the '%s' tag for the as option." % bits[0]
|
||||
f"No argument provided to the '{bits[0]}' tag for the as option."
|
||||
)
|
||||
asvar = value
|
||||
elif option == 'escape':
|
||||
escape = True
|
||||
else:
|
||||
raise TemplateSyntaxError(
|
||||
"Unknown argument for '%s' tag: '%s'. The only options "
|
||||
f"Unknown argument for '{bits[0]}' tag: '{option}'. The only options "
|
||||
"available are 'noop', 'context' \"xxx\", and 'as VAR'."
|
||||
% (bits[0], option)
|
||||
)
|
||||
seen.add(option)
|
||||
|
||||
|
@ -498,7 +498,7 @@ def primitive_to_javascript(primitive):
|
||||
elif type(primitive) in [int, float]:
|
||||
return primitive
|
||||
# Wrap with quotes
|
||||
return format_html("'{}'", primitive) # noqa: P103
|
||||
return format_html("'{}'", primitive)
|
||||
|
||||
|
||||
@register.simple_tag()
|
||||
|
@ -70,11 +70,11 @@ class APITests(InvenTreeAPITestCase):
|
||||
"""Helper function to use basic auth."""
|
||||
# Use basic authentication
|
||||
|
||||
authstring = bytes('{u}:{p}'.format(u=self.username, p=self.password), 'ascii')
|
||||
authstring = bytes(f'{self.username}:{self.password}', 'ascii')
|
||||
|
||||
# Use "basic" auth by default
|
||||
auth = b64encode(authstring).decode('ascii')
|
||||
self.client.credentials(HTTP_AUTHORIZATION='Basic {auth}'.format(auth=auth))
|
||||
self.client.credentials(HTTP_AUTHORIZATION=f'Basic {auth}')
|
||||
|
||||
def tokenAuth(self):
|
||||
"""Helper function to use token auth."""
|
||||
|
@ -70,7 +70,7 @@ class URLTest(TestCase):
|
||||
|
||||
pattern = '{% url [\'"]([^\'"]+)[\'"]([^%]*)%}'
|
||||
|
||||
with open(input_file, 'r') as f:
|
||||
with open(input_file) as f:
|
||||
data = f.read()
|
||||
|
||||
results = re.findall(pattern, data)
|
||||
|
@ -543,22 +543,22 @@ class FormatTest(TestCase):
|
||||
def test_currency_formatting(self):
|
||||
"""Test that currency formatting works correctly for multiple currencies."""
|
||||
test_data = (
|
||||
(Money(3651.285718, 'USD'), 4, True, '$3,651.2857'), # noqa: E201,E202
|
||||
(Money(487587.849178, 'CAD'), 5, True, 'CA$487,587.84918'), # noqa: E201,E202
|
||||
(Money(0.348102, 'EUR'), 1, False, '0.3'), # noqa: E201,E202
|
||||
(Money(0.916530, 'GBP'), 1, True, '£0.9'), # noqa: E201,E202
|
||||
(Money(61.031024, 'JPY'), 3, False, '61.031'), # noqa: E201,E202
|
||||
(Money(49609.694602, 'JPY'), 1, True, '¥49,609.7'), # noqa: E201,E202
|
||||
(Money(155565.264777, 'AUD'), 2, False, '155,565.26'), # noqa: E201,E202
|
||||
(Money(0.820437, 'CNY'), 4, True, 'CN¥0.8204'), # noqa: E201,E202
|
||||
(Money(7587.849178, 'EUR'), 0, True, '€7,588'), # noqa: E201,E202
|
||||
(Money(0.348102, 'GBP'), 3, False, '0.348'), # noqa: E201,E202
|
||||
(Money(0.652923, 'CHF'), 0, True, 'CHF1'), # noqa: E201,E202
|
||||
(Money(0.820437, 'CNY'), 1, True, 'CN¥0.8'), # noqa: E201,E202
|
||||
(Money(98789.5295680, 'CHF'), 0, False, '98,790'), # noqa: E201,E202
|
||||
(Money(0.585787, 'USD'), 1, True, '$0.6'), # noqa: E201,E202
|
||||
(Money(0.690541, 'CAD'), 3, True, 'CA$0.691'), # noqa: E201,E202
|
||||
(Money(427.814104, 'AUD'), 5, True, 'A$427.81410'), # noqa: E201,E202
|
||||
(Money(3651.285718, 'USD'), 4, True, '$3,651.2857'),
|
||||
(Money(487587.849178, 'CAD'), 5, True, 'CA$487,587.84918'),
|
||||
(Money(0.348102, 'EUR'), 1, False, '0.3'),
|
||||
(Money(0.916530, 'GBP'), 1, True, '£0.9'),
|
||||
(Money(61.031024, 'JPY'), 3, False, '61.031'),
|
||||
(Money(49609.694602, 'JPY'), 1, True, '¥49,609.7'),
|
||||
(Money(155565.264777, 'AUD'), 2, False, '155,565.26'),
|
||||
(Money(0.820437, 'CNY'), 4, True, 'CN¥0.8204'),
|
||||
(Money(7587.849178, 'EUR'), 0, True, '€7,588'),
|
||||
(Money(0.348102, 'GBP'), 3, False, '0.348'),
|
||||
(Money(0.652923, 'CHF'), 0, True, 'CHF1'),
|
||||
(Money(0.820437, 'CNY'), 1, True, 'CN¥0.8'),
|
||||
(Money(98789.5295680, 'CHF'), 0, False, '98,790'),
|
||||
(Money(0.585787, 'USD'), 1, True, '$0.6'),
|
||||
(Money(0.690541, 'CAD'), 3, True, 'CA$0.691'),
|
||||
(Money(427.814104, 'AUD'), 5, True, 'A$427.81410'),
|
||||
)
|
||||
|
||||
with self.settings(LANGUAGE_CODE='en-us'):
|
||||
@ -794,7 +794,7 @@ class TestDownloadFile(TestCase):
|
||||
def test_download(self):
|
||||
"""Tests for DownloadFile."""
|
||||
helpers.DownloadFile('hello world', 'out.txt')
|
||||
helpers.DownloadFile(bytes(b'hello world'), 'out.bin')
|
||||
helpers.DownloadFile(b'hello world', 'out.bin')
|
||||
|
||||
|
||||
class TestMPTT(TestCase):
|
||||
@ -1557,15 +1557,12 @@ class ClassValidationMixinTest(TestCase):
|
||||
|
||||
def test(self):
|
||||
"""Test function."""
|
||||
...
|
||||
|
||||
def test1(self):
|
||||
"""Test function."""
|
||||
...
|
||||
|
||||
def test2(self):
|
||||
"""Test function."""
|
||||
...
|
||||
|
||||
required_attributes = ['NAME']
|
||||
required_overrides = [test, [test1, test2]]
|
||||
@ -1573,8 +1570,6 @@ class ClassValidationMixinTest(TestCase):
|
||||
class InvalidClass:
|
||||
"""An invalid class that does not inherit from ClassValidationMixin."""
|
||||
|
||||
pass
|
||||
|
||||
def test_valid_class(self):
|
||||
"""Test that a valid class passes the validation."""
|
||||
|
||||
@ -1585,11 +1580,9 @@ class ClassValidationMixinTest(TestCase):
|
||||
|
||||
def test(self):
|
||||
"""Test function."""
|
||||
...
|
||||
|
||||
def test2(self):
|
||||
"""Test function."""
|
||||
...
|
||||
|
||||
TestClass.validate()
|
||||
|
||||
@ -1612,7 +1605,6 @@ class ClassValidationMixinTest(TestCase):
|
||||
|
||||
def test2(self):
|
||||
"""Test function."""
|
||||
...
|
||||
|
||||
with self.assertRaisesRegex(
|
||||
NotImplementedError,
|
||||
@ -1627,8 +1619,6 @@ class ClassProviderMixinTest(TestCase):
|
||||
class TestClass(ClassProviderMixin):
|
||||
"""This class is a dummy class to test the ClassProviderMixin."""
|
||||
|
||||
pass
|
||||
|
||||
def test_get_provider_file(self):
|
||||
"""Test the get_provider_file function."""
|
||||
self.assertEqual(self.TestClass.get_provider_file(), __file__)
|
||||
|
@ -15,7 +15,7 @@ def reload_translation_stats():
|
||||
STATS_FILE = settings.BASE_DIR.joinpath('InvenTree/locale_stats.json').absolute()
|
||||
|
||||
try:
|
||||
with open(STATS_FILE, 'r') as f:
|
||||
with open(STATS_FILE) as f:
|
||||
_translation_stats = json.load(f)
|
||||
except Exception:
|
||||
_translation_stats = None
|
||||
|
@ -246,8 +246,6 @@ class ExchangeRateMixin:
|
||||
class InvenTreeTestCase(ExchangeRateMixin, UserMixin, TestCase):
|
||||
"""Testcase with user setup build in."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class InvenTreeAPITestCase(ExchangeRateMixin, UserMixin, APITestCase):
|
||||
"""Base class for running InvenTree API tests."""
|
||||
@ -283,9 +281,7 @@ class InvenTreeAPITestCase(ExchangeRateMixin, UserMixin, APITestCase):
|
||||
) # pragma: no cover
|
||||
|
||||
if verbose or n >= value:
|
||||
msg = '\r\n%s' % json.dumps(
|
||||
context.captured_queries, indent=4
|
||||
) # pragma: no cover
|
||||
msg = f'\r\n{json.dumps(context.captured_queries, indent=4)}' # pragma: no cover
|
||||
else:
|
||||
msg = None
|
||||
|
||||
|
@ -456,7 +456,7 @@ urlpatterns = []
|
||||
if settings.INVENTREE_ADMIN_ENABLED:
|
||||
admin_url = settings.INVENTREE_ADMIN_URL
|
||||
|
||||
if settings.ADMIN_SHELL_ENABLE: # noqa
|
||||
if settings.ADMIN_SHELL_ENABLE:
|
||||
urlpatterns += [path(f'{admin_url}/shell/', include('django_admin_shell.urls'))]
|
||||
|
||||
urlpatterns += [
|
||||
|
@ -96,7 +96,6 @@ def validate_sales_order_reference(value):
|
||||
|
||||
def validate_tree_name(value):
|
||||
"""Placeholder for legacy function used in migrations."""
|
||||
...
|
||||
|
||||
|
||||
def validate_overage(value):
|
||||
|
@ -180,7 +180,7 @@ class InvenTreeRoleMixin(PermissionRequiredMixin):
|
||||
AjaxUpdateView: 'change',
|
||||
}
|
||||
|
||||
for view_class in permission_map.keys():
|
||||
for view_class in permission_map:
|
||||
if issubclass(type(self), view_class):
|
||||
return permission_map[view_class]
|
||||
|
||||
@ -238,7 +238,6 @@ class AjaxMixin(InvenTreeRoleMixin):
|
||||
Ref: https://docs.djangoproject.com/en/dev/topics/forms/
|
||||
"""
|
||||
# Do nothing by default
|
||||
pass
|
||||
|
||||
def renderJsonResponse(self, request, form=None, data=None, context=None):
|
||||
"""Render a JSON response based on specific class context.
|
||||
@ -578,14 +577,10 @@ class UserSessionOverride:
|
||||
class CustomSessionDeleteView(UserSessionOverride, SessionDeleteView):
|
||||
"""Revert to settings after session delete."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class CustomSessionDeleteOtherView(UserSessionOverride, SessionDeleteOtherView):
|
||||
"""Revert to settings after session delete."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class CustomLoginView(LoginView):
|
||||
"""Custom login view that allows login with urlargs."""
|
||||
|
@ -47,7 +47,7 @@ from plugin.models import NotificationUserSetting
|
||||
from plugin.serializers import NotificationUserSettingSerializer
|
||||
|
||||
|
||||
class CsrfExemptMixin(object):
|
||||
class CsrfExemptMixin:
|
||||
"""Exempts the view from CSRF requirements."""
|
||||
|
||||
@method_decorator(csrf_exempt)
|
||||
|
@ -55,7 +55,7 @@ def get_icon_packs():
|
||||
tabler_icons_path = Path(__file__).parent.parent.joinpath(
|
||||
'InvenTree/static/tabler-icons/icons.json'
|
||||
)
|
||||
with open(tabler_icons_path, 'r') as tabler_icons_file:
|
||||
with open(tabler_icons_path) as tabler_icons_file:
|
||||
tabler_icons = json.load(tabler_icons_file)
|
||||
|
||||
icon_packs = [
|
||||
|
@ -246,7 +246,7 @@ class BaseInvenTreeSetting(models.Model):
|
||||
|
||||
If a particular setting is not present, create it with the default value
|
||||
"""
|
||||
cache_key = f'BUILD_DEFAULT_VALUES:{str(cls.__name__)}'
|
||||
cache_key = f'BUILD_DEFAULT_VALUES:{cls.__name__!s}'
|
||||
|
||||
try:
|
||||
if InvenTree.helpers.str2bool(cache.get(cache_key, False)):
|
||||
@ -329,7 +329,7 @@ class BaseInvenTreeSetting(models.Model):
|
||||
- The unique KEY string
|
||||
- Any key:value kwargs associated with the particular setting type (e.g. user-id)
|
||||
"""
|
||||
key = f'{str(cls.__name__)}:{setting_key}'
|
||||
key = f'{cls.__name__!s}:{setting_key}'
|
||||
|
||||
for k, v in kwargs.items():
|
||||
key += f'_{k}:{v}'
|
||||
|
@ -365,7 +365,7 @@ def trigger_notification(obj, category=None, obj_ref='pk', **kwargs):
|
||||
obj_ref_value = getattr(obj, 'id', None)
|
||||
if not obj_ref_value:
|
||||
raise KeyError(
|
||||
f"Could not resolve an object reference for '{str(obj)}' with {obj_ref}, pk, id"
|
||||
f"Could not resolve an object reference for '{obj!s}' with {obj_ref}, pk, id"
|
||||
)
|
||||
|
||||
# Check if we have notified recently...
|
||||
@ -432,9 +432,9 @@ def trigger_notification(obj, category=None, obj_ref='pk', **kwargs):
|
||||
deliver_notification(method, obj, category, target_users, context)
|
||||
except NotImplementedError as error:
|
||||
# Allow any single notification method to fail, without failing the others
|
||||
logger.error(error) # noqa: LOG005
|
||||
logger.error(error)
|
||||
except Exception as error:
|
||||
logger.error(error) # noqa: LOG005
|
||||
logger.error(error)
|
||||
|
||||
# Set delivery flag
|
||||
common.models.NotificationEntry.notify(category, obj_ref_value)
|
||||
|
@ -831,11 +831,9 @@ class PluginSettingsApiTest(PluginMixin, InvenTreeAPITestCase):
|
||||
|
||||
def test_invalid_setting_key(self):
|
||||
"""Test that an invalid setting key returns a 404."""
|
||||
...
|
||||
|
||||
def test_uninitialized_setting(self):
|
||||
"""Test that requesting an uninitialized setting creates the setting."""
|
||||
...
|
||||
|
||||
|
||||
class ErrorReportTest(InvenTreeAPITestCase):
|
||||
@ -933,7 +931,7 @@ class WebhookMessageTests(TestCase):
|
||||
def test_bad_token(self):
|
||||
"""Test that a wrong token is not working."""
|
||||
response = self.client.post(
|
||||
self.url, content_type=CONTENT_TYPE_JSON, **{'HTTP_TOKEN': '1234567fghj'}
|
||||
self.url, content_type=CONTENT_TYPE_JSON, HTTP_TOKEN='1234567fghj'
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.FORBIDDEN
|
||||
@ -956,7 +954,7 @@ class WebhookMessageTests(TestCase):
|
||||
self.url,
|
||||
data="{'this': 123}",
|
||||
content_type=CONTENT_TYPE_JSON,
|
||||
**{'HTTP_TOKEN': str(self.endpoint_def.token)},
|
||||
HTTP_TOKEN=str(self.endpoint_def.token),
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.NOT_ACCEPTABLE
|
||||
@ -1004,7 +1002,7 @@ class WebhookMessageTests(TestCase):
|
||||
response = self.client.post(
|
||||
self.url,
|
||||
content_type=CONTENT_TYPE_JSON,
|
||||
**{'HTTP_TOKEN': str('68MXtc/OiXdA5e2Nq9hATEVrZFpLb3Zb0oau7n8s31I=')},
|
||||
HTTP_TOKEN='68MXtc/OiXdA5e2Nq9hATEVrZFpLb3Zb0oau7n8s31I=',
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@ -1019,7 +1017,7 @@ class WebhookMessageTests(TestCase):
|
||||
self.url,
|
||||
data={'this': 'is a message'},
|
||||
content_type=CONTENT_TYPE_JSON,
|
||||
**{'HTTP_TOKEN': str(self.endpoint_def.token)},
|
||||
HTTP_TOKEN=str(self.endpoint_def.token),
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
@ -312,7 +312,6 @@ class FileManagementFormView(MultiStepFormView):
|
||||
This method is very specific to the type of data found in the file,
|
||||
therefore overwrite it in the subclass.
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_clean_items(self):
|
||||
"""Returns dict with all cleaned values."""
|
||||
|
@ -10,4 +10,3 @@ class CompanyConfig(AppConfig):
|
||||
|
||||
def ready(self):
|
||||
"""This function is called whenever the Company app is loaded."""
|
||||
pass
|
||||
|
@ -9,4 +9,4 @@ States can be extended with custom options for each InvenTree instance - those o
|
||||
from .states import StatusCode
|
||||
from .transition import StateTransitionMixin, TransitionMethod, storage
|
||||
|
||||
__all__ = ['StatusCode', 'storage', 'TransitionMethod', 'StateTransitionMixin']
|
||||
__all__ = ['StateTransitionMixin', 'StatusCode', 'TransitionMethod', 'storage']
|
||||
|
@ -11,7 +11,7 @@ from .states import StatusCode
|
||||
@register.simple_tag
|
||||
def status_label(typ: str, key: int, *args, **kwargs):
|
||||
"""Render a status label."""
|
||||
state = {cls.tag(): cls for cls in inheritors(StatusCode)}.get(typ, None)
|
||||
state = {cls.tag(): cls for cls in inheritors(StatusCode)}.get(typ)
|
||||
if state:
|
||||
return mark_safe(state.render(key, large=kwargs.get('large', False)))
|
||||
raise ValueError(f"Unknown status type '{typ}'")
|
||||
|
@ -23,7 +23,6 @@ class GeneralStatus(StatusCode):
|
||||
|
||||
def GHI(self): # This should be ignored
|
||||
"""A invalid function."""
|
||||
...
|
||||
|
||||
|
||||
class GeneralStateTest(InvenTreeTestCase):
|
||||
|
@ -3,6 +3,6 @@
|
||||
from django import template
|
||||
|
||||
register = template.Library()
|
||||
from generic.states.tags import status_label # noqa: E402
|
||||
from generic.states.tags import status_label
|
||||
|
||||
__all__ = [status_label]
|
||||
|
@ -213,8 +213,6 @@ class DataImportExportSerializerMixin(
|
||||
):
|
||||
"""Mixin class for adding data import/export functionality to a DRF serializer."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class DataExportViewMixin:
|
||||
"""Mixin class for exporting a dataset via the API.
|
||||
|
@ -215,7 +215,7 @@ class DataImportSession(models.Model):
|
||||
continue
|
||||
|
||||
# A default value exists
|
||||
if field in field_defaults and field_defaults[field]:
|
||||
if field_defaults.get(field):
|
||||
continue
|
||||
|
||||
# The field has been mapped to a data column
|
||||
|
@ -36,7 +36,7 @@ def load_data_file(data_file, file_format=None):
|
||||
|
||||
try:
|
||||
data = file_object.read()
|
||||
except (IOError, FileNotFoundError):
|
||||
except (OSError, FileNotFoundError):
|
||||
raise ValidationError(_('Failed to open data file'))
|
||||
|
||||
# Excel formats expect binary data
|
||||
|
@ -19,7 +19,7 @@ class ImporterTest(InvenTreeTestCase):
|
||||
|
||||
fn = os.path.join(os.path.dirname(__file__), 'test_data', 'companies.csv')
|
||||
|
||||
with open(fn, 'r') as input_file:
|
||||
with open(fn) as input_file:
|
||||
data = input_file.read()
|
||||
|
||||
session = DataImportSession.objects.create(
|
||||
@ -61,4 +61,3 @@ class ImporterTest(InvenTreeTestCase):
|
||||
|
||||
def test_field_defaults(self):
|
||||
"""Test default field values."""
|
||||
...
|
||||
|
@ -1,4 +1,4 @@
|
||||
from machine.machine_type import BaseDriver, BaseMachineType, MachineStatus
|
||||
from machine.registry import registry
|
||||
|
||||
__all__ = ['registry', 'BaseMachineType', 'BaseDriver', 'MachineStatus']
|
||||
__all__ = ['BaseDriver', 'BaseMachineType', 'MachineStatus', 'registry']
|
||||
|
@ -4,8 +4,8 @@ from machine.machine_types.label_printer import (
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# machine types
|
||||
'LabelPrinterMachine',
|
||||
# base drivers
|
||||
'LabelPrinterBaseDriver',
|
||||
# machine types
|
||||
'LabelPrinterMachine',
|
||||
]
|
||||
|
@ -84,13 +84,11 @@ class MachineAPITest(TestMachineRegistryMixin, InvenTreeAPITestCase):
|
||||
machine_type,
|
||||
{
|
||||
**machine_type,
|
||||
**{
|
||||
'slug': 'label-printer',
|
||||
'name': 'Label Printer',
|
||||
'description': 'Directly print labels for various items.',
|
||||
'provider_plugin': None,
|
||||
'is_builtin': True,
|
||||
},
|
||||
'slug': 'label-printer',
|
||||
'name': 'Label Printer',
|
||||
'description': 'Directly print labels for various items.',
|
||||
'provider_plugin': None,
|
||||
'is_builtin': True,
|
||||
},
|
||||
)
|
||||
self.assertTrue(
|
||||
@ -109,15 +107,13 @@ class MachineAPITest(TestMachineRegistryMixin, InvenTreeAPITestCase):
|
||||
driver,
|
||||
{
|
||||
**driver,
|
||||
**{
|
||||
'slug': 'test-label-printer-api',
|
||||
'name': 'Test label printer',
|
||||
'description': 'This is a test label printer driver for testing.',
|
||||
'provider_plugin': None,
|
||||
'is_builtin': True,
|
||||
'machine_type': 'label-printer',
|
||||
'driver_errors': [],
|
||||
},
|
||||
'slug': 'test-label-printer-api',
|
||||
'name': 'Test label printer',
|
||||
'description': 'This is a test label printer driver for testing.',
|
||||
'provider_plugin': None,
|
||||
'is_builtin': True,
|
||||
'machine_type': 'label-printer',
|
||||
'driver_errors': [],
|
||||
},
|
||||
)
|
||||
self.assertEqual(driver['provider_file'], __file__)
|
||||
@ -173,17 +169,15 @@ class MachineAPITest(TestMachineRegistryMixin, InvenTreeAPITestCase):
|
||||
response.data[0],
|
||||
{
|
||||
**response.data[0],
|
||||
**{
|
||||
'name': 'Test Machine',
|
||||
'machine_type': 'label-printer',
|
||||
'driver': 'test-label-printer-api',
|
||||
'initialized': True,
|
||||
'active': True,
|
||||
'status': 101,
|
||||
'status_model': 'LabelPrinterStatus',
|
||||
'status_text': '',
|
||||
'is_driver_available': True,
|
||||
},
|
||||
'name': 'Test Machine',
|
||||
'machine_type': 'label-printer',
|
||||
'driver': 'test-label-printer-api',
|
||||
'initialized': True,
|
||||
'active': True,
|
||||
'status': 101,
|
||||
'status_model': 'LabelPrinterStatus',
|
||||
'status_text': '',
|
||||
'is_driver_available': True,
|
||||
},
|
||||
)
|
||||
|
||||
@ -216,9 +210,7 @@ class MachineAPITest(TestMachineRegistryMixin, InvenTreeAPITestCase):
|
||||
reverse('api-machine-detail', kwargs={'pk': pk}),
|
||||
{'name': 'Updated Machine'},
|
||||
)
|
||||
self.assertEqual(
|
||||
response.data, {**response.data, **{'name': 'Updated Machine'}}
|
||||
)
|
||||
self.assertEqual(response.data, {**response.data, 'name': 'Updated Machine'})
|
||||
self.assertEqual(MachineConfig.objects.get(pk=pk).name, 'Updated Machine')
|
||||
|
||||
# Delete the machine
|
||||
|
@ -337,8 +337,6 @@ class PurchaseOrderList(PurchaseOrderMixin, DataExportViewMixin, ListCreateAPI):
|
||||
class PurchaseOrderDetail(PurchaseOrderMixin, RetrieveUpdateDestroyAPI):
|
||||
"""API endpoint for detail view of a PurchaseOrder object."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class PurchaseOrderContextMixin:
|
||||
"""Mixin to add purchase order object as serializer context variable."""
|
||||
@ -603,8 +601,6 @@ class PurchaseOrderLineItemList(
|
||||
class PurchaseOrderLineItemDetail(PurchaseOrderLineItemMixin, RetrieveUpdateDestroyAPI):
|
||||
"""Detail API endpoint for PurchaseOrderLineItem object."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class PurchaseOrderExtraLineList(GeneralExtraLineList, ListCreateAPI):
|
||||
"""API endpoint for accessing a list of PurchaseOrderExtraLine objects."""
|
||||
@ -746,8 +742,6 @@ class SalesOrderList(SalesOrderMixin, DataExportViewMixin, ListCreateAPI):
|
||||
class SalesOrderDetail(SalesOrderMixin, RetrieveUpdateDestroyAPI):
|
||||
"""API endpoint for detail view of a SalesOrder object."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class SalesOrderLineItemFilter(LineItemFilter):
|
||||
"""Custom filters for SalesOrderLineItemList endpoint."""
|
||||
@ -865,8 +859,6 @@ class SalesOrderLineItemList(
|
||||
class SalesOrderLineItemDetail(SalesOrderLineItemMixin, RetrieveUpdateDestroyAPI):
|
||||
"""API endpoint for detail view of a SalesOrderLineItem object."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class SalesOrderExtraLineList(GeneralExtraLineList, ListCreateAPI):
|
||||
"""API endpoint for accessing a list of SalesOrderExtraLine objects."""
|
||||
@ -1181,8 +1173,6 @@ class ReturnOrderList(ReturnOrderMixin, DataExportViewMixin, ListCreateAPI):
|
||||
class ReturnOrderDetail(ReturnOrderMixin, RetrieveUpdateDestroyAPI):
|
||||
"""API endpoint for detail view of a single ReturnOrder object."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ReturnOrderContextMixin:
|
||||
"""Simple mixin class to add a ReturnOrder to the serializer context."""
|
||||
@ -1310,8 +1300,6 @@ class ReturnOrderLineItemList(
|
||||
class ReturnOrderLineItemDetail(ReturnOrderLineItemMixin, RetrieveUpdateDestroyAPI):
|
||||
"""API endpoint for detail view of a ReturnOrderLineItem object."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ReturnOrderExtraLineList(GeneralExtraLineList, ListCreateAPI):
|
||||
"""API endpoint for accessing a list of ReturnOrderExtraLine objects."""
|
||||
|
@ -294,7 +294,7 @@ class SalesOrderExport(AjaxView):
|
||||
|
||||
export_format = request.GET.get('format', 'csv')
|
||||
|
||||
filename = f'{str(order)} - {order.customer.name}.{export_format}'
|
||||
filename = f'{order!s} - {order.customer.name}.{export_format}'
|
||||
|
||||
dataset = SalesOrderLineItemResource().export(queryset=order.lines.all())
|
||||
|
||||
@ -321,7 +321,7 @@ class PurchaseOrderExport(AjaxView):
|
||||
|
||||
export_format = request.GET.get('format', 'csv')
|
||||
|
||||
filename = f'{str(order)} - {order.supplier.name}.{export_format}'
|
||||
filename = f'{order!s} - {order.supplier.name}.{export_format}'
|
||||
|
||||
dataset = PurchaseOrderLineItemResource().export(queryset=order.lines.all())
|
||||
|
||||
|
@ -466,8 +466,6 @@ class PartTestTemplateMixin:
|
||||
class PartTestTemplateDetail(PartTestTemplateMixin, RetrieveUpdateDestroyAPI):
|
||||
"""Detail endpoint for PartTestTemplate model."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class PartTestTemplateList(PartTestTemplateMixin, DataExportViewMixin, ListCreateAPI):
|
||||
"""API endpoint for listing (and creating) a PartTestTemplate."""
|
||||
@ -1570,8 +1568,6 @@ class PartParameterTemplateList(
|
||||
class PartParameterTemplateDetail(PartParameterTemplateMixin, RetrieveUpdateDestroyAPI):
|
||||
"""API endpoint for accessing the detail view for a PartParameterTemplate object."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class PartParameterAPIMixin:
|
||||
"""Mixin class for PartParameter API endpoints."""
|
||||
@ -1663,8 +1659,6 @@ class PartParameterList(PartParameterAPIMixin, DataExportViewMixin, ListCreateAP
|
||||
class PartParameterDetail(PartParameterAPIMixin, RetrieveUpdateDestroyAPI):
|
||||
"""API endpoint for detail view of a single PartParameter object."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class PartStocktakeFilter(rest_filters.FilterSet):
|
||||
"""Custom filter for the PartStocktakeList endpoint."""
|
||||
@ -1922,8 +1916,6 @@ class BomList(BomMixin, DataExportViewMixin, ListCreateDestroyAPIView):
|
||||
class BomDetail(BomMixin, RetrieveUpdateDestroyAPI):
|
||||
"""API endpoint for detail view of a single BomItem object."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class BomImportUpload(CreateAPI):
|
||||
"""API endpoint for uploading a complete Bill of Materials.
|
||||
|
@ -4328,7 +4328,7 @@ class BomItem(
|
||||
- allow_variants
|
||||
"""
|
||||
# Seed the hash with the ID of this BOM item
|
||||
result_hash = hashlib.md5(''.encode())
|
||||
result_hash = hashlib.md5(b'')
|
||||
|
||||
# The following components are used to calculate the checksum
|
||||
components = [
|
||||
@ -4422,8 +4422,7 @@ class BomItem(
|
||||
try:
|
||||
ovg = float(overage)
|
||||
|
||||
if ovg < 0:
|
||||
ovg = 0
|
||||
ovg = max(ovg, 0)
|
||||
|
||||
return ovg
|
||||
except ValueError:
|
||||
@ -4435,10 +4434,8 @@ class BomItem(
|
||||
|
||||
try:
|
||||
percent = float(overage) / 100.0
|
||||
if percent > 1:
|
||||
percent = 1
|
||||
if percent < 0:
|
||||
percent = 0
|
||||
percent = min(percent, 1)
|
||||
percent = max(percent, 0)
|
||||
|
||||
# Must be represented as a decimal
|
||||
percent = Decimal(percent)
|
||||
|
@ -1979,9 +1979,8 @@ class BomImportExtractSerializer(InvenTree.serializers.DataFileExtractSerializer
|
||||
|
||||
if part is None:
|
||||
row['errors']['part'] = _('No matching part found')
|
||||
else:
|
||||
if not part.component:
|
||||
row['errors']['part'] = _('Part is not designated as a component')
|
||||
elif not part.component:
|
||||
row['errors']['part'] = _('Part is not designated as a component')
|
||||
|
||||
# Update the 'part' value in the row
|
||||
row['part'] = part.pk if part is not None else None
|
||||
|
@ -49,7 +49,7 @@ class BomExportTest(InvenTreeTestCase):
|
||||
with open(filename, 'wb') as f:
|
||||
f.write(response.getvalue())
|
||||
|
||||
with open(filename, 'r') as f:
|
||||
with open(filename) as f:
|
||||
reader = csv.reader(f, delimiter=',')
|
||||
|
||||
for line in reader:
|
||||
@ -96,7 +96,7 @@ class BomExportTest(InvenTreeTestCase):
|
||||
f.write(response.getvalue())
|
||||
|
||||
# Read the file
|
||||
with open(filename, 'r') as f:
|
||||
with open(filename) as f:
|
||||
reader = csv.reader(f, delimiter=',')
|
||||
|
||||
for line in reader:
|
||||
|
@ -419,7 +419,7 @@ class PartParameterTest(InvenTreeAPITestCase):
|
||||
|
||||
response = self.get(
|
||||
url,
|
||||
{'ordering': 'parameter_{pk}'.format(pk=template.pk), 'parameters': 'true'},
|
||||
{'ordering': f'parameter_{template.pk}', 'parameters': 'true'},
|
||||
expected_code=200,
|
||||
)
|
||||
|
||||
@ -436,10 +436,7 @@ class PartParameterTest(InvenTreeAPITestCase):
|
||||
# Next, check reverse ordering
|
||||
response = self.get(
|
||||
url,
|
||||
{
|
||||
'ordering': '-parameter_{pk}'.format(pk=template.pk),
|
||||
'parameters': 'true',
|
||||
},
|
||||
{'ordering': f'-parameter_{template.pk}', 'parameters': 'true'},
|
||||
expected_code=200,
|
||||
)
|
||||
|
||||
|
@ -111,7 +111,6 @@ class PartPricingTests(InvenTreeTestCase):
|
||||
|
||||
def test_invalid_rate(self):
|
||||
"""Ensure that conversion behaves properly with missing rates."""
|
||||
...
|
||||
|
||||
def test_simple(self):
|
||||
"""Tests for hard-coded values."""
|
||||
|
@ -414,7 +414,7 @@ class PartDetailFromIPN(PartDetail):
|
||||
if not self.object:
|
||||
return HttpResponseRedirect(reverse('part-index'))
|
||||
|
||||
return super(PartDetailFromIPN, self).get(request, *args, **kwargs)
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
|
||||
class PartImageSelect(AjaxUpdateView):
|
||||
|
@ -5,8 +5,8 @@ from .plugin import InvenTreePlugin
|
||||
from .registry import registry
|
||||
|
||||
__all__ = [
|
||||
'registry',
|
||||
'InvenTreePlugin',
|
||||
'MixinNotImplementedError',
|
||||
'MixinImplementationError',
|
||||
'MixinNotImplementedError',
|
||||
'registry',
|
||||
]
|
||||
|
@ -594,7 +594,7 @@ class BarcodeSOAllocate(BarcodeView):
|
||||
raise ValidationError(response)
|
||||
|
||||
# If we have sufficient information, we can allocate the stock item
|
||||
if all((x is not None for x in [line_item, sales_order, shipment, quantity])):
|
||||
if all(x is not None for x in [line_item, sales_order, shipment, quantity]):
|
||||
order.models.SalesOrderAllocation.objects.create(
|
||||
line=line_item, shipment=shipment, item=stock_item, quantity=quantity
|
||||
)
|
||||
|
@ -471,9 +471,9 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
||||
# 2. check if it's defined on the part
|
||||
# 3. check if there's 1 or 0 stock locations defined in InvenTree
|
||||
# -> assume all stock is going into that location (or no location)
|
||||
if location := line_item.destination:
|
||||
pass
|
||||
elif location := supplier_part.part.get_default_location():
|
||||
if (location := line_item.destination) or (
|
||||
location := supplier_part.part.get_default_location()
|
||||
):
|
||||
pass
|
||||
elif StockLocation.objects.count() <= 1:
|
||||
if not (location := StockLocation.objects.first()):
|
||||
|
@ -33,7 +33,6 @@ class ReportMixin:
|
||||
request: The request object which initiated the report generation
|
||||
context: The context dictionary to add to
|
||||
"""
|
||||
pass
|
||||
|
||||
def add_label_context(self, label_instance, model_instance, request, context):
|
||||
"""Add extra context to the provided label instance.
|
||||
@ -46,7 +45,6 @@ class ReportMixin:
|
||||
request: The request object which initiated the label generation
|
||||
context: The context dictionary to add to
|
||||
"""
|
||||
pass
|
||||
|
||||
def report_callback(self, template, instance, report, request):
|
||||
"""Callback function called after a report is generated.
|
||||
@ -59,4 +57,3 @@ class ReportMixin:
|
||||
|
||||
The default implementation does nothing.
|
||||
"""
|
||||
pass
|
||||
|
@ -15,8 +15,6 @@ else:
|
||||
class SettingsKeyType:
|
||||
"""Dummy class, so that python throws no error."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class SettingsMixin:
|
||||
"""Mixin that enables global settings for the plugin."""
|
||||
|
@ -205,4 +205,3 @@ class ValidationMixin:
|
||||
Raises:
|
||||
ValidationError if the proposed parameter value is objectionable
|
||||
"""
|
||||
pass
|
||||
|
@ -251,8 +251,6 @@ class LabelPrintingMixin:
|
||||
|
||||
def before_printing(self):
|
||||
"""Hook method called before printing labels."""
|
||||
pass
|
||||
|
||||
def after_printing(self):
|
||||
"""Hook method called after printing labels."""
|
||||
pass
|
||||
|
@ -44,14 +44,10 @@ class MixinImplementationError(ValueError):
|
||||
Mostly raised if constant is missing
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class MixinNotImplementedError(NotImplementedError):
|
||||
"""Error if necessary mixin function was not overwritten."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def log_error(error, reference: str = 'general'):
|
||||
"""Log an plugin error."""
|
||||
|
@ -220,15 +220,11 @@ def install_plugin(url=None, packagename=None, user=None, version=None):
|
||||
full_pkg = f'{packagename}@{url}'
|
||||
else:
|
||||
full_pkg = url
|
||||
else: # pragma: no cover
|
||||
# using a custom package repositories
|
||||
# This is only for pypa compliant directory services (all current are tested above)
|
||||
# and not covered by tests.
|
||||
if url:
|
||||
install_name.append('-i')
|
||||
full_pkg = url
|
||||
elif packagename:
|
||||
full_pkg = packagename
|
||||
elif url:
|
||||
install_name.append('-i')
|
||||
full_pkg = url
|
||||
elif packagename:
|
||||
full_pkg = packagename
|
||||
|
||||
elif packagename:
|
||||
# use pypi
|
||||
|
@ -1,3 +1,3 @@
|
||||
from machine import BaseDriver, BaseMachineType, MachineStatus, registry
|
||||
|
||||
__all__ = ['registry', 'BaseDriver', 'BaseMachineType', 'MachineStatus']
|
||||
__all__ = ['BaseDriver', 'BaseMachineType', 'MachineStatus', 'registry']
|
||||
|
@ -1,3 +1,3 @@
|
||||
"""just re-export the machine types from the plugin InvenTree app."""
|
||||
|
||||
from machine.machine_types import * # noqa: F403, F401
|
||||
from machine.machine_types import * # noqa: F403
|
||||
|
@ -20,23 +20,23 @@ from plugin.base.locate.mixins import LocateMixin
|
||||
|
||||
__all__ = [
|
||||
'APICallMixin',
|
||||
'ActionMixin',
|
||||
'AppMixin',
|
||||
'BarcodeMixin',
|
||||
'BulkNotificationMethod',
|
||||
'CurrencyExchangeMixin',
|
||||
'EventMixin',
|
||||
'IconPackMixin',
|
||||
'LabelPrintingMixin',
|
||||
'LocateMixin',
|
||||
'NavigationMixin',
|
||||
'PanelMixin',
|
||||
'ReportMixin',
|
||||
'ScheduleMixin',
|
||||
'SettingsContentMixin',
|
||||
'SettingsMixin',
|
||||
'UrlsMixin',
|
||||
'PanelMixin',
|
||||
'ActionMixin',
|
||||
'BarcodeMixin',
|
||||
'SupplierBarcodeMixin',
|
||||
'LocateMixin',
|
||||
'ValidationMixin',
|
||||
'SingleNotificationMethod',
|
||||
'BulkNotificationMethod',
|
||||
'SupplierBarcodeMixin',
|
||||
'UrlsMixin',
|
||||
'ValidationMixin',
|
||||
]
|
||||
|
@ -85,8 +85,6 @@ class ScheduledTaskPluginTests(TestCase):
|
||||
class NoSchedules(Base):
|
||||
"""Plugin without schedules."""
|
||||
|
||||
pass
|
||||
|
||||
with self.assertRaises(MixinImplementationError):
|
||||
NoSchedules().register_tasks()
|
||||
|
||||
|
@ -18,8 +18,9 @@ class SampleTransitionPlugin(InvenTreePlugin):
|
||||
def transition(current_state, target_state, instance, default_action, **kwargs): # noqa: N805
|
||||
"""Example override function for state transition."""
|
||||
# Only act on ReturnOrders that should be completed
|
||||
if not isinstance(instance, ReturnOrder) or not (
|
||||
target_state == ReturnOrderStatus.COMPLETE.value
|
||||
if (
|
||||
not isinstance(instance, ReturnOrder)
|
||||
or target_state != ReturnOrderStatus.COMPLETE.value
|
||||
):
|
||||
return False
|
||||
|
||||
|
@ -144,8 +144,6 @@ class PluginConfigInstallSerializer(serializers.Serializer):
|
||||
class PluginConfigEmptySerializer(serializers.Serializer):
|
||||
"""Serializer for a PluginConfig."""
|
||||
|
||||
...
|
||||
|
||||
|
||||
class PluginReloadSerializer(serializers.Serializer):
|
||||
"""Serializer for remotely forcing plugin registry reload."""
|
||||
|
@ -195,8 +195,7 @@ class PluginDetailAPITest(PluginMixin, InvenTreeAPITestCase):
|
||||
mixin_dict = plg.mixins()
|
||||
self.assertIn('base', mixin_dict)
|
||||
self.assertEqual(
|
||||
mixin_dict,
|
||||
{**mixin_dict, **{'base': {'key': 'base', 'human_name': 'base'}}},
|
||||
mixin_dict, {**mixin_dict, 'base': {'key': 'base', 'human_name': 'base'}}
|
||||
)
|
||||
|
||||
# check reload on save
|
||||
|
@ -190,12 +190,12 @@ def uploaded_image(
|
||||
elif width is not None:
|
||||
# Resize the image, width only
|
||||
wpercent = width / float(img.size[0])
|
||||
hsize = int((float(img.size[1]) * float(wpercent)))
|
||||
hsize = int(float(img.size[1]) * float(wpercent))
|
||||
img = img.resize((width, hsize))
|
||||
elif height is not None:
|
||||
# Resize the image, height only
|
||||
hpercent = height / float(img.size[1])
|
||||
wsize = int((float(img.size[0]) * float(hpercent)))
|
||||
wsize = int(float(img.size[0]) * float(hpercent))
|
||||
img = img.resize((wsize, height))
|
||||
|
||||
# Optionally rotate the image
|
||||
|
@ -7,7 +7,7 @@ import sys
|
||||
|
||||
def calculate_coverage(filename):
|
||||
"""Calculate translation coverage for a .po file."""
|
||||
with open(filename, 'r') as f:
|
||||
with open(filename) as f:
|
||||
lines = f.readlines()
|
||||
|
||||
lines_count = 0
|
||||
@ -19,7 +19,7 @@ def calculate_coverage(filename):
|
||||
lines_count += 1
|
||||
|
||||
elif line.startswith('msgstr'):
|
||||
if line.startswith('msgstr ""') or line.startswith("msgstr ''"):
|
||||
if line.startswith(('msgstr ""', "msgstr ''")):
|
||||
lines_uncovered += 1
|
||||
else:
|
||||
lines_covered += 1
|
||||
@ -53,7 +53,7 @@ if __name__ == '__main__':
|
||||
|
||||
percentages = []
|
||||
|
||||
for locale in locales.keys():
|
||||
for locale in locales:
|
||||
locale_file = locales[locale]
|
||||
stats = calculate_coverage(locale_file)
|
||||
|
||||
|
@ -45,7 +45,7 @@ if __name__ == '__main__':
|
||||
|
||||
print('Generating icon list...')
|
||||
with open(
|
||||
os.path.join(TMP_FOLDER, 'node_modules', '@tabler', 'icons', 'icons.json'), 'r'
|
||||
os.path.join(TMP_FOLDER, 'node_modules', '@tabler', 'icons', 'icons.json')
|
||||
) as f:
|
||||
icons = json.load(f)
|
||||
|
||||
|
@ -974,23 +974,17 @@ class StockList(DataExportViewMixin, ListCreateDestroyAPIView):
|
||||
'The supplier part has a pack size defined, but flag use_pack_size not set'
|
||||
)
|
||||
})
|
||||
else:
|
||||
if bool(data.get('use_pack_size')):
|
||||
quantity = data['quantity'] = supplier_part.base_quantity(
|
||||
quantity
|
||||
)
|
||||
elif bool(data.get('use_pack_size')):
|
||||
quantity = data['quantity'] = supplier_part.base_quantity(quantity)
|
||||
|
||||
# Divide purchase price by pack size, to save correct price per stock item
|
||||
if (
|
||||
data['purchase_price']
|
||||
and supplier_part.pack_quantity_native
|
||||
):
|
||||
try:
|
||||
data['purchase_price'] = float(
|
||||
data['purchase_price']
|
||||
) / float(supplier_part.pack_quantity_native)
|
||||
except ValueError:
|
||||
pass
|
||||
# Divide purchase price by pack size, to save correct price per stock item
|
||||
if data['purchase_price'] and supplier_part.pack_quantity_native:
|
||||
try:
|
||||
data['purchase_price'] = float(
|
||||
data['purchase_price']
|
||||
) / float(supplier_part.pack_quantity_native)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# Now remove the flag from data, so that it doesn't interfere with saving
|
||||
# Do this regardless of results above
|
||||
@ -1241,8 +1235,6 @@ class StockItemTestResultMixin:
|
||||
class StockItemTestResultDetail(StockItemTestResultMixin, RetrieveUpdateDestroyAPI):
|
||||
"""Detail endpoint for StockItemTestResult."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class StockItemTestResultFilter(rest_filters.FilterSet):
|
||||
"""API filter for the StockItemTestResult list."""
|
||||
|
@ -477,8 +477,7 @@ class StockItem(
|
||||
|
||||
serial_int = abs(serial_int)
|
||||
|
||||
if serial_int > clip:
|
||||
serial_int = clip
|
||||
serial_int = min(serial_int, clip)
|
||||
|
||||
self.serial_int = serial_int
|
||||
return
|
||||
@ -584,7 +583,7 @@ class StockItem(
|
||||
except (ValueError, StockItem.DoesNotExist):
|
||||
pass
|
||||
|
||||
super(StockItem, self).save(*args, **kwargs)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
# If user information is provided, and no existing note exists, create one!
|
||||
if user and self.tracking_info.count() == 0:
|
||||
@ -619,7 +618,7 @@ class StockItem(
|
||||
If the StockItem is serialized, the same serial number.
|
||||
cannot exist for the same part (or part tree).
|
||||
"""
|
||||
super(StockItem, self).validate_unique(exclude)
|
||||
super().validate_unique(exclude)
|
||||
|
||||
# If the serial number is set, make sure it is not a duplicate
|
||||
if self.serial:
|
||||
@ -2022,8 +2021,7 @@ class StockItem(
|
||||
except (InvalidOperation, ValueError):
|
||||
return
|
||||
|
||||
if quantity < 0:
|
||||
quantity = 0
|
||||
quantity = max(quantity, 0)
|
||||
|
||||
self.quantity = quantity
|
||||
|
||||
@ -2208,7 +2206,7 @@ class StockItem(
|
||||
|
||||
for key in item_results.keys():
|
||||
# Results from sub items should not override master ones
|
||||
if key not in result_map.keys():
|
||||
if key not in result_map:
|
||||
result_map[key] = item_results[key]
|
||||
|
||||
return result_map
|
||||
@ -2586,6 +2584,4 @@ class StockItemTestResult(InvenTree.models.InvenTreeMetadataModel):
|
||||
help_text=_('The timestamp of the test finish'),
|
||||
)
|
||||
|
||||
user = models.ForeignKey(User, on_delete=models.SET_NULL, blank=True, null=True)
|
||||
|
||||
date = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
|
@ -421,7 +421,7 @@ class StockItemSerializer(
|
||||
tests = kwargs.pop('tests', False)
|
||||
path_detail = kwargs.pop('path_detail', False)
|
||||
|
||||
super(StockItemSerializer, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if not part_detail:
|
||||
self.fields.pop('part_detail', None)
|
||||
|
@ -640,7 +640,7 @@ class StockItemListTest(StockAPITestCase):
|
||||
StockStatus.REJECTED.value: 0,
|
||||
}
|
||||
|
||||
for code in codes.keys():
|
||||
for code in codes:
|
||||
num = codes[code]
|
||||
|
||||
response = self.get_stock(status=code)
|
||||
|
@ -293,8 +293,6 @@ class InvenTreeUserAdmin(UserAdmin):
|
||||
class OwnerAdmin(admin.ModelAdmin):
|
||||
"""Custom admin interface for the Owner model."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
admin.site.unregister(Group)
|
||||
admin.site.register(Group, RoleGroupAdmin)
|
||||
|
@ -557,10 +557,8 @@ def update_group_roles(group, debug=False):
|
||||
|
||||
permissions_to_add.add(permission_string)
|
||||
|
||||
else:
|
||||
# A forbidden action will be ignored if we have already allowed it
|
||||
if permission_string not in permissions_to_add:
|
||||
permissions_to_delete.add(permission_string)
|
||||
elif permission_string not in permissions_to_add:
|
||||
permissions_to_delete.add(permission_string)
|
||||
|
||||
# Pre-fetch all the RuleSet objects
|
||||
rulesets = {
|
||||
|
25
tasks.py
25
tasks.py
@ -25,10 +25,9 @@ def checkPythonVersion():
|
||||
|
||||
valid = True
|
||||
|
||||
if sys.version_info.major < REQ_MAJOR:
|
||||
valid = False
|
||||
|
||||
elif sys.version_info.major == REQ_MAJOR and sys.version_info.minor < REQ_MINOR:
|
||||
if sys.version_info.major < REQ_MAJOR or (
|
||||
sys.version_info.major == REQ_MAJOR and sys.version_info.minor < REQ_MINOR
|
||||
):
|
||||
valid = False
|
||||
|
||||
if not valid:
|
||||
@ -618,7 +617,7 @@ def export_records(
|
||||
print('Running data post-processing step...')
|
||||
|
||||
# Post-process the file, to remove any "permissions" specified for a user or group
|
||||
with open(tmpfile, 'r') as f_in:
|
||||
with open(tmpfile) as f_in:
|
||||
data = json.loads(f_in.read())
|
||||
|
||||
data_out = []
|
||||
@ -684,7 +683,7 @@ def import_records(
|
||||
# Pre-process the data, to remove any "permissions" specified for a user or group
|
||||
datafile = f'{target}.data.json'
|
||||
|
||||
with open(target, 'r') as f_in:
|
||||
with open(target) as f_in:
|
||||
try:
|
||||
data = json.loads(f_in.read())
|
||||
except json.JSONDecodeError as exc:
|
||||
@ -836,7 +835,7 @@ def server(c, address='127.0.0.1:8000'):
|
||||
|
||||
Note: This is *not* sufficient for a production installation.
|
||||
"""
|
||||
manage(c, 'runserver {address}'.format(address=address), pty=True)
|
||||
manage(c, f'runserver {address}', pty=True)
|
||||
|
||||
|
||||
@task(pre=[wait])
|
||||
@ -880,16 +879,16 @@ def test_translations(c):
|
||||
|
||||
# compile regex
|
||||
reg = re.compile(
|
||||
r'[a-zA-Z0-9]{1}' # match any single letter and number # noqa: W504
|
||||
+ r'(?![^{\(\<]*[}\)\>])' # that is not inside curly brackets, brackets or a tag # noqa: W504
|
||||
+ r'(?<![^\%][^\(][)][a-z])' # that is not a specially formatted variable with singles # noqa: W504
|
||||
r'[a-zA-Z0-9]{1}' # match any single letter and number
|
||||
+ r'(?![^{\(\<]*[}\)\>])' # that is not inside curly brackets, brackets or a tag
|
||||
+ r'(?<![^\%][^\(][)][a-z])' # that is not a specially formatted variable with singles
|
||||
+ r'(?![^\\][\n])' # that is not a newline
|
||||
)
|
||||
last_string = ''
|
||||
|
||||
# loop through input file lines
|
||||
with open(file_path, 'rt') as file_org:
|
||||
with open(new_file_path, 'wt') as file_new:
|
||||
with open(file_path) as file_org:
|
||||
with open(new_file_path, 'w') as file_new:
|
||||
for line in file_org:
|
||||
if line.startswith('msgstr "'):
|
||||
# write output -> replace regex matches with x in the read in (multi)string
|
||||
@ -1330,7 +1329,7 @@ def frontend_download(
|
||||
version_file = localDir().joinpath('VERSION')
|
||||
if not version_file.exists():
|
||||
return
|
||||
from dotenv import dotenv_values # noqa: WPS433
|
||||
from dotenv import dotenv_values
|
||||
|
||||
content = dotenv_values(version_file)
|
||||
if (
|
||||
|
Loading…
Reference in New Issue
Block a user