mirror of
https://github.com/inventree/InvenTree
synced 2024-08-30 18:33:04 +00:00
Add more checks
This commit is contained in:
parent
c26d73036d
commit
3b9d485403
4
.github/scripts/check_js_templates.py
vendored
4
.github/scripts/check_js_templates.py
vendored
@ -71,7 +71,7 @@ def check_prohibited_tags(data):
|
|||||||
for filename in pathlib.Path(js_i18n_dir).rglob('*.js'):
|
for filename in pathlib.Path(js_i18n_dir).rglob('*.js'):
|
||||||
print(f"Checking file 'translated/{os.path.basename(filename)}':")
|
print(f"Checking file 'translated/{os.path.basename(filename)}':")
|
||||||
|
|
||||||
with open(filename, 'r') as js_file:
|
with open(filename) as js_file:
|
||||||
data = js_file.readlines()
|
data = js_file.readlines()
|
||||||
|
|
||||||
errors += check_invalid_tag(data)
|
errors += check_invalid_tag(data)
|
||||||
@ -81,7 +81,7 @@ for filename in pathlib.Path(js_dynamic_dir).rglob('*.js'):
|
|||||||
print(f"Checking file 'dynamic/{os.path.basename(filename)}':")
|
print(f"Checking file 'dynamic/{os.path.basename(filename)}':")
|
||||||
|
|
||||||
# Check that the 'dynamic' files do not contains any translated strings
|
# Check that the 'dynamic' files do not contains any translated strings
|
||||||
with open(filename, 'r') as js_file:
|
with open(filename) as js_file:
|
||||||
data = js_file.readlines()
|
data = js_file.readlines()
|
||||||
|
|
||||||
invalid_tags = ['blocktrans', 'blocktranslate', 'trans', 'translate']
|
invalid_tags = ['blocktrans', 'blocktranslate', 'trans', 'translate']
|
||||||
|
4
.github/scripts/check_migration_files.py
vendored
4
.github/scripts/check_migration_files.py
vendored
@ -20,9 +20,9 @@ for line in str(out.decode()).split('\n'):
|
|||||||
if len(migrations) == 0:
|
if len(migrations) == 0:
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
print('There are {n} unstaged migration files:'.format(n=len(migrations)))
|
print(f'There are {len(migrations)} unstaged migration files:')
|
||||||
|
|
||||||
for m in migrations:
|
for m in migrations:
|
||||||
print(' - {m}'.format(m=m))
|
print(f' - {m}')
|
||||||
|
|
||||||
sys.exit(len(migrations))
|
sys.exit(len(migrations))
|
||||||
|
4
.github/scripts/version_check.py
vendored
4
.github/scripts/version_check.py
vendored
@ -89,7 +89,7 @@ def check_version_number(version_string, allow_duplicate=False):
|
|||||||
|
|
||||||
if release > version_tuple:
|
if release > version_tuple:
|
||||||
highest_release = False
|
highest_release = False
|
||||||
print(f'Found newer release: {str(release)}')
|
print(f'Found newer release: {release!s}')
|
||||||
|
|
||||||
return highest_release
|
return highest_release
|
||||||
|
|
||||||
@ -134,7 +134,7 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
version = None
|
version = None
|
||||||
|
|
||||||
with open(version_file, 'r') as f:
|
with open(version_file) as f:
|
||||||
text = f.read()
|
text = f.read()
|
||||||
|
|
||||||
# Extract the InvenTree software version
|
# Extract the InvenTree software version
|
||||||
|
@ -10,7 +10,7 @@ tld = os.path.abspath(os.path.join(here, '..'))
|
|||||||
|
|
||||||
config_file = os.path.join(tld, 'mkdocs.yml')
|
config_file = os.path.join(tld, 'mkdocs.yml')
|
||||||
|
|
||||||
with open(config_file, 'r') as f:
|
with open(config_file) as f:
|
||||||
data = yaml.load(f, yaml.BaseLoader)
|
data = yaml.load(f, yaml.BaseLoader)
|
||||||
|
|
||||||
assert data['strict'] == 'true'
|
assert data['strict'] == 'true'
|
||||||
|
@ -57,7 +57,7 @@ def fetch_rtd_versions():
|
|||||||
versions = sorted(versions, key=lambda x: StrictVersion(x['version']), reverse=True)
|
versions = sorted(versions, key=lambda x: StrictVersion(x['version']), reverse=True)
|
||||||
|
|
||||||
# Add "latest" version first
|
# Add "latest" version first
|
||||||
if not any((x['title'] == 'latest' for x in versions)):
|
if not any(x['title'] == 'latest' for x in versions):
|
||||||
versions.insert(
|
versions.insert(
|
||||||
0,
|
0,
|
||||||
{
|
{
|
||||||
@ -70,7 +70,7 @@ def fetch_rtd_versions():
|
|||||||
# Ensure we have the 'latest' version
|
# Ensure we have the 'latest' version
|
||||||
current_version = os.environ.get('READTHEDOCS_VERSION', None)
|
current_version = os.environ.get('READTHEDOCS_VERSION', None)
|
||||||
|
|
||||||
if current_version and not any((x['title'] == current_version for x in versions)):
|
if current_version and not any(x['title'] == current_version for x in versions):
|
||||||
versions.append({
|
versions.append({
|
||||||
'version': current_version,
|
'version': current_version,
|
||||||
'title': current_version,
|
'title': current_version,
|
||||||
|
@ -46,7 +46,7 @@ def top_level_path(path: str) -> str:
|
|||||||
|
|
||||||
key = path.split('/')[1]
|
key = path.split('/')[1]
|
||||||
|
|
||||||
if key in SPECIAL_PATHS.keys():
|
if key in SPECIAL_PATHS:
|
||||||
return key
|
return key
|
||||||
|
|
||||||
return GENERAL_PATH
|
return GENERAL_PATH
|
||||||
@ -173,7 +173,7 @@ def parse_api_file(filename: str):
|
|||||||
|
|
||||||
The intent is to make the API schema easier to peruse on the documentation.
|
The intent is to make the API schema easier to peruse on the documentation.
|
||||||
"""
|
"""
|
||||||
with open(filename, 'r') as f:
|
with open(filename) as f:
|
||||||
data = yaml.safe_load(f)
|
data = yaml.safe_load(f)
|
||||||
|
|
||||||
paths = data['paths']
|
paths = data['paths']
|
||||||
|
14
docs/main.py
14
docs/main.py
@ -16,7 +16,7 @@ global USER_SETTINGS
|
|||||||
here = os.path.dirname(__file__)
|
here = os.path.dirname(__file__)
|
||||||
settings_file = os.path.join(here, 'inventree_settings.json')
|
settings_file = os.path.join(here, 'inventree_settings.json')
|
||||||
|
|
||||||
with open(settings_file, 'r') as sf:
|
with open(settings_file) as sf:
|
||||||
settings = json.load(sf)
|
settings = json.load(sf)
|
||||||
|
|
||||||
GLOBAL_SETTINGS = settings['global']
|
GLOBAL_SETTINGS = settings['global']
|
||||||
@ -27,7 +27,7 @@ def get_repo_url(raw=False):
|
|||||||
"""Return the repository URL for the current project."""
|
"""Return the repository URL for the current project."""
|
||||||
mkdocs_yml = os.path.join(os.path.dirname(__file__), 'mkdocs.yml')
|
mkdocs_yml = os.path.join(os.path.dirname(__file__), 'mkdocs.yml')
|
||||||
|
|
||||||
with open(mkdocs_yml, 'r') as f:
|
with open(mkdocs_yml) as f:
|
||||||
mkdocs_config = yaml.safe_load(f)
|
mkdocs_config = yaml.safe_load(f)
|
||||||
repo_name = mkdocs_config['repo_name']
|
repo_name = mkdocs_config['repo_name']
|
||||||
|
|
||||||
@ -47,7 +47,7 @@ def check_link(url) -> bool:
|
|||||||
|
|
||||||
# Keep a local cache file of URLs we have already checked
|
# Keep a local cache file of URLs we have already checked
|
||||||
if os.path.exists(CACHE_FILE):
|
if os.path.exists(CACHE_FILE):
|
||||||
with open(CACHE_FILE, 'r') as f:
|
with open(CACHE_FILE) as f:
|
||||||
cache = f.read().splitlines()
|
cache = f.read().splitlines()
|
||||||
|
|
||||||
if url in cache:
|
if url in cache:
|
||||||
@ -177,7 +177,7 @@ def define_env(env):
|
|||||||
|
|
||||||
assert subprocess.call(command, shell=True) == 0
|
assert subprocess.call(command, shell=True) == 0
|
||||||
|
|
||||||
with open(output, 'r') as f:
|
with open(output) as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
|
|
||||||
return content
|
return content
|
||||||
@ -214,7 +214,7 @@ def define_env(env):
|
|||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
raise FileNotFoundError(f'Required file {path} does not exist.')
|
raise FileNotFoundError(f'Required file {path} does not exist.')
|
||||||
|
|
||||||
with open(path, 'r') as f:
|
with open(path) as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
|
|
||||||
data = f'??? abstract "{title}"\n\n'
|
data = f'??? abstract "{title}"\n\n'
|
||||||
@ -240,8 +240,8 @@ def define_env(env):
|
|||||||
"""Render a provided setting object into a table row."""
|
"""Render a provided setting object into a table row."""
|
||||||
name = setting['name']
|
name = setting['name']
|
||||||
description = setting['description']
|
description = setting['description']
|
||||||
default = setting.get('default', None)
|
default = setting.get('default')
|
||||||
units = setting.get('units', None)
|
units = setting.get('units')
|
||||||
|
|
||||||
return f'| {name} | {description} | {default if default is not None else ""} | {units if units is not None else ""} |'
|
return f'| {name} | {description} | {default if default is not None else ""} | {units if units is not None else ""} |'
|
||||||
|
|
||||||
|
@ -20,13 +20,15 @@ src = ["src/backend/InvenTree"]
|
|||||||
"__init__.py" = ["D104"]
|
"__init__.py" = ["D104"]
|
||||||
|
|
||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = ["A", "B", "C4", "D", "I", "N", "F"]
|
select = ["A", "B", "C", "C4", "D", "F", "I", "N", "PIE", "UP", "W"]
|
||||||
# Things that should be enabled in the future:
|
# Things that should be enabled in the future:
|
||||||
# - LOG
|
# - LOG
|
||||||
# - DJ # for Django stuff
|
# - DJ # for Django stuff
|
||||||
# - S # for security stuff (bandit)
|
# - S # for security stuff (bandit)
|
||||||
|
|
||||||
ignore = [
|
ignore = [
|
||||||
|
"C901",
|
||||||
|
# - C901 - function is too complex
|
||||||
"N999",
|
"N999",
|
||||||
# - N802 - function name should be lowercase
|
# - N802 - function name should be lowercase
|
||||||
"N802",
|
"N802",
|
||||||
|
@ -321,7 +321,6 @@ class BulkDeleteMixin:
|
|||||||
Raises:
|
Raises:
|
||||||
ValidationError: If the deletion should not proceed
|
ValidationError: If the deletion should not proceed
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
def filter_delete_queryset(self, queryset, request):
|
def filter_delete_queryset(self, queryset, request):
|
||||||
"""Provide custom filtering for the queryset *before* it is deleted.
|
"""Provide custom filtering for the queryset *before* it is deleted.
|
||||||
@ -398,8 +397,6 @@ class BulkDeleteMixin:
|
|||||||
class ListCreateDestroyAPIView(BulkDeleteMixin, ListCreateAPI):
|
class ListCreateDestroyAPIView(BulkDeleteMixin, ListCreateAPI):
|
||||||
"""Custom API endpoint which provides BulkDelete functionality in addition to List and Create."""
|
"""Custom API endpoint which provides BulkDelete functionality in addition to List and Create."""
|
||||||
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class APISearchViewSerializer(serializers.Serializer):
|
class APISearchViewSerializer(serializers.Serializer):
|
||||||
"""Serializer for the APISearchView."""
|
"""Serializer for the APISearchView."""
|
||||||
|
@ -125,7 +125,7 @@ class InvenTreeConfig(AppConfig):
|
|||||||
for task in tasks:
|
for task in tasks:
|
||||||
ref_name = f'{task.func.__module__}.{task.func.__name__}'
|
ref_name = f'{task.func.__module__}.{task.func.__name__}'
|
||||||
|
|
||||||
if ref_name in existing_tasks.keys():
|
if ref_name in existing_tasks:
|
||||||
# This task already exists - update the details if required
|
# This task already exists - update the details if required
|
||||||
existing_task = existing_tasks[ref_name]
|
existing_task = existing_tasks[ref_name]
|
||||||
|
|
||||||
|
@ -131,7 +131,7 @@ def load_config_data(set_cache: bool = False) -> map:
|
|||||||
|
|
||||||
cfg_file = get_config_file()
|
cfg_file = get_config_file()
|
||||||
|
|
||||||
with open(cfg_file, 'r') as cfg:
|
with open(cfg_file) as cfg:
|
||||||
data = yaml.safe_load(cfg)
|
data = yaml.safe_load(cfg)
|
||||||
|
|
||||||
# Set the cache if requested
|
# Set the cache if requested
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
"""Provides extra global data to all templates."""
|
"""Provides extra global data to all templates."""
|
||||||
|
|
||||||
import InvenTree.email
|
import InvenTree.email
|
||||||
|
@ -95,7 +95,7 @@ def from_engineering_notation(value):
|
|||||||
"""
|
"""
|
||||||
value = str(value).strip()
|
value = str(value).strip()
|
||||||
|
|
||||||
pattern = '(\d+)([a-zA-Z]+)(\d+)(.*)'
|
pattern = r'(\d+)([a-zA-Z]+)(\d+)(.*)'
|
||||||
|
|
||||||
if match := re.match(pattern, value):
|
if match := re.match(pattern, value):
|
||||||
left, prefix, right, suffix = match.groups()
|
left, prefix, right, suffix = match.groups()
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
"""Custom exception handling for the DRF API."""
|
"""Custom exception handling for the DRF API."""
|
||||||
|
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
@ -107,7 +107,7 @@ def construct_format_regex(fmt_string: str) -> str:
|
|||||||
if name:
|
if name:
|
||||||
# Check if integer values are required
|
# Check if integer values are required
|
||||||
if _fmt.endswith('d'):
|
if _fmt.endswith('d'):
|
||||||
c = '\d'
|
c = r'\d'
|
||||||
else:
|
else:
|
||||||
c = '.'
|
c = '.'
|
||||||
|
|
||||||
@ -199,7 +199,7 @@ def format_money(
|
|||||||
Raises:
|
Raises:
|
||||||
ValueError: format string is incorrectly specified
|
ValueError: format string is incorrectly specified
|
||||||
"""
|
"""
|
||||||
language = None and translation.get_language() or settings.LANGUAGE_CODE
|
language = (None and translation.get_language()) or settings.LANGUAGE_CODE
|
||||||
locale = Locale.parse(translation.to_locale(language))
|
locale = Locale.parse(translation.to_locale(language))
|
||||||
if format:
|
if format:
|
||||||
pattern = parse_pattern(format)
|
pattern = parse_pattern(format)
|
||||||
|
@ -266,9 +266,8 @@ class RegistratonMixin:
|
|||||||
raise forms.ValidationError(
|
raise forms.ValidationError(
|
||||||
_('The provided primary email address is not valid.')
|
_('The provided primary email address is not valid.')
|
||||||
)
|
)
|
||||||
else:
|
elif split_email[1] == option[1:]:
|
||||||
if split_email[1] == option[1:]:
|
return super().clean_email(email)
|
||||||
return super().clean_email(email)
|
|
||||||
|
|
||||||
logger.info('The provided email domain for %s is not approved', email)
|
logger.info('The provided email domain for %s is not approved', email)
|
||||||
raise forms.ValidationError(_('The provided email domain is not approved.'))
|
raise forms.ValidationError(_('The provided email domain is not approved.'))
|
||||||
|
@ -12,12 +12,12 @@ from pathlib import Path
|
|||||||
from typing import TypeVar, Union
|
from typing import TypeVar, Union
|
||||||
from wsgiref.util import FileWrapper
|
from wsgiref.util import FileWrapper
|
||||||
|
|
||||||
import django.utils.timezone as timezone
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.staticfiles.storage import StaticFilesStorage
|
from django.contrib.staticfiles.storage import StaticFilesStorage
|
||||||
from django.core.exceptions import FieldError, ValidationError
|
from django.core.exceptions import FieldError, ValidationError
|
||||||
from django.core.files.storage import Storage, default_storage
|
from django.core.files.storage import Storage, default_storage
|
||||||
from django.http import StreamingHttpResponse
|
from django.http import StreamingHttpResponse
|
||||||
|
from django.utils import timezone
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
@ -800,10 +800,10 @@ def remove_non_printable_characters(
|
|||||||
if remove_unicode:
|
if remove_unicode:
|
||||||
# Remove Unicode control characters
|
# Remove Unicode control characters
|
||||||
if remove_newline:
|
if remove_newline:
|
||||||
cleaned = regex.sub('[^\P{C}]+', '', cleaned)
|
cleaned = regex.sub(r'[^\P{C}]+', '', cleaned)
|
||||||
else:
|
else:
|
||||||
# Use 'negative-lookahead' to exclude newline character
|
# Use 'negative-lookahead' to exclude newline character
|
||||||
cleaned = regex.sub('(?![\x0a])[^\P{C}]+', '', cleaned)
|
cleaned = regex.sub('(?![\x0a])[^\\P{C}]+', '', cleaned)
|
||||||
|
|
||||||
return cleaned
|
return cleaned
|
||||||
|
|
||||||
|
@ -130,7 +130,7 @@ def download_image_from_url(remote_url, timeout=2.5):
|
|||||||
# Throw an error if anything goes wrong
|
# Throw an error if anything goes wrong
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
except requests.exceptions.ConnectionError as exc:
|
except requests.exceptions.ConnectionError as exc:
|
||||||
raise Exception(_('Connection error') + f': {str(exc)}')
|
raise Exception(_('Connection error') + f': {exc!s}')
|
||||||
except requests.exceptions.Timeout as exc:
|
except requests.exceptions.Timeout as exc:
|
||||||
raise exc
|
raise exc
|
||||||
except requests.exceptions.HTTPError:
|
except requests.exceptions.HTTPError:
|
||||||
@ -138,7 +138,7 @@ def download_image_from_url(remote_url, timeout=2.5):
|
|||||||
_('Server responded with invalid status code') + f': {response.status_code}'
|
_('Server responded with invalid status code') + f': {response.status_code}'
|
||||||
)
|
)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise Exception(_('Exception occurred') + f': {str(exc)}')
|
raise Exception(_('Exception occurred') + f': {exc!s}')
|
||||||
|
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
|
@ -110,7 +110,7 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
# Import icon map file
|
# Import icon map file
|
||||||
if kwargs['input_file']:
|
if kwargs['input_file']:
|
||||||
with open(kwargs['input_file'], 'r') as f:
|
with open(kwargs['input_file']) as f:
|
||||||
icons = json.load(f)
|
icons = json.load(f)
|
||||||
|
|
||||||
self.stdout.write(f'Loaded icon map for {len(icons.keys())} icons')
|
self.stdout.write(f'Loaded icon map for {len(icons.keys())} icons')
|
||||||
|
@ -35,7 +35,7 @@ class Command(BaseCommand):
|
|||||||
img_paths.append(x.path)
|
img_paths.append(x.path)
|
||||||
|
|
||||||
if len(img_paths) > 0:
|
if len(img_paths) > 0:
|
||||||
if all((os.path.exists(path) for path in img_paths)):
|
if all(os.path.exists(path) for path in img_paths):
|
||||||
# All images exist - skip further work
|
# All images exist - skip further work
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -35,4 +35,4 @@ class Command(BaseCommand):
|
|||||||
mfa_user[0].staticdevice_set.all().delete()
|
mfa_user[0].staticdevice_set.all().delete()
|
||||||
# TOTP tokens
|
# TOTP tokens
|
||||||
mfa_user[0].totpdevice_set.all().delete()
|
mfa_user[0].totpdevice_set.all().delete()
|
||||||
print(f'Removed all MFA methods for user {str(mfa_user[0])}')
|
print(f'Removed all MFA methods for user {mfa_user[0]!s}')
|
||||||
|
@ -36,7 +36,7 @@ def get_token_from_request(request):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class AuthRequiredMiddleware(object):
|
class AuthRequiredMiddleware:
|
||||||
"""Check for user to be authenticated."""
|
"""Check for user to be authenticated."""
|
||||||
|
|
||||||
def __init__(self, get_response):
|
def __init__(self, get_response):
|
||||||
@ -92,23 +92,18 @@ class AuthRequiredMiddleware(object):
|
|||||||
|
|
||||||
# Allow static files to be accessed without auth
|
# Allow static files to be accessed without auth
|
||||||
# Important for e.g. login page
|
# Important for e.g. login page
|
||||||
if request.path_info.startswith('/static/'):
|
if (
|
||||||
authorized = True
|
request.path_info.startswith('/static/')
|
||||||
|
or request.path_info.startswith('/accounts/')
|
||||||
# Unauthorized users can access the login page
|
or (
|
||||||
elif request.path_info.startswith('/accounts/'):
|
request.path_info.startswith(f'/{settings.FRONTEND_URL_BASE}/')
|
||||||
authorized = True
|
or request.path_info.startswith('/assets/')
|
||||||
|
or request.path_info == f'/{settings.FRONTEND_URL_BASE}'
|
||||||
elif (
|
)
|
||||||
request.path_info.startswith(f'/{settings.FRONTEND_URL_BASE}/')
|
or self.check_token(request)
|
||||||
or request.path_info.startswith('/assets/')
|
|
||||||
or request.path_info == f'/{settings.FRONTEND_URL_BASE}'
|
|
||||||
):
|
):
|
||||||
authorized = True
|
authorized = True
|
||||||
|
|
||||||
elif self.check_token(request):
|
|
||||||
authorized = True
|
|
||||||
|
|
||||||
# No authorization was found for the request
|
# No authorization was found for the request
|
||||||
if not authorized:
|
if not authorized:
|
||||||
path = request.path_info
|
path = request.path_info
|
||||||
|
@ -128,14 +128,10 @@ class CreateAPI(CleanMixin, generics.CreateAPIView):
|
|||||||
class RetrieveAPI(generics.RetrieveAPIView):
|
class RetrieveAPI(generics.RetrieveAPIView):
|
||||||
"""View for retrieve API."""
|
"""View for retrieve API."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class RetrieveUpdateAPI(CleanMixin, generics.RetrieveUpdateAPIView):
|
class RetrieveUpdateAPI(CleanMixin, generics.RetrieveUpdateAPIView):
|
||||||
"""View for retrieve and update API."""
|
"""View for retrieve and update API."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class CustomDestroyModelMixin:
|
class CustomDestroyModelMixin:
|
||||||
"""This mixin was created pass the kwargs from the API to the models."""
|
"""This mixin was created pass the kwargs from the API to the models."""
|
||||||
|
@ -1099,4 +1099,4 @@ def after_error_logged(sender, instance: Error, created: bool, **kwargs):
|
|||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
"""We do not want to throw an exception while reporting an exception"""
|
"""We do not want to throw an exception while reporting an exception"""
|
||||||
logger.error(exc) # noqa: LOG005
|
logger.error(exc)
|
||||||
|
@ -105,8 +105,7 @@ class IsStaffOrReadOnly(permissions.IsAdminUser):
|
|||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
"""Check if the user is a superuser."""
|
"""Check if the user is a superuser."""
|
||||||
return bool(
|
return bool(
|
||||||
request.user
|
(request.user and request.user.is_staff)
|
||||||
and request.user.is_staff
|
|
||||||
or request.method in permissions.SAFE_METHODS
|
or request.method in permissions.SAFE_METHODS
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -11,43 +11,37 @@ def isInTestMode():
|
|||||||
|
|
||||||
def isImportingData():
|
def isImportingData():
|
||||||
"""Returns True if the database is currently importing (or exporting) data, e.g. 'loaddata' command is performed."""
|
"""Returns True if the database is currently importing (or exporting) data, e.g. 'loaddata' command is performed."""
|
||||||
return any((x in sys.argv for x in ['flush', 'loaddata', 'dumpdata']))
|
return any(x in sys.argv for x in ['flush', 'loaddata', 'dumpdata'])
|
||||||
|
|
||||||
|
|
||||||
def isRunningMigrations():
|
def isRunningMigrations():
|
||||||
"""Return True if the database is currently running migrations."""
|
"""Return True if the database is currently running migrations."""
|
||||||
return any(
|
return any(
|
||||||
(
|
x in sys.argv
|
||||||
x in sys.argv
|
for x in ['migrate', 'makemigrations', 'showmigrations', 'runmigrations']
|
||||||
for x in ['migrate', 'makemigrations', 'showmigrations', 'runmigrations']
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def isRebuildingData():
|
def isRebuildingData():
|
||||||
"""Return true if any of the rebuilding commands are being executed."""
|
"""Return true if any of the rebuilding commands are being executed."""
|
||||||
return any(
|
return any(
|
||||||
(
|
x in sys.argv
|
||||||
x in sys.argv
|
for x in ['prerender', 'rebuild_models', 'rebuild_thumbnails', 'rebuild']
|
||||||
for x in ['prerender', 'rebuild_models', 'rebuild_thumbnails', 'rebuild']
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def isRunningBackup():
|
def isRunningBackup():
|
||||||
"""Return true if any of the backup commands are being executed."""
|
"""Return true if any of the backup commands are being executed."""
|
||||||
return any(
|
return any(
|
||||||
(
|
x in sys.argv
|
||||||
x in sys.argv
|
for x in [
|
||||||
for x in [
|
'backup',
|
||||||
'backup',
|
'restore',
|
||||||
'restore',
|
'dbbackup',
|
||||||
'dbbackup',
|
'dbresotore',
|
||||||
'dbresotore',
|
'mediabackup',
|
||||||
'mediabackup',
|
'mediarestore',
|
||||||
'mediarestore',
|
]
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -390,8 +390,6 @@ class InvenTreeTaggitSerializer(TaggitSerializer):
|
|||||||
class InvenTreeTagModelSerializer(InvenTreeTaggitSerializer, InvenTreeModelSerializer):
|
class InvenTreeTagModelSerializer(InvenTreeTaggitSerializer, InvenTreeModelSerializer):
|
||||||
"""Combination of InvenTreeTaggitSerializer and InvenTreeModelSerializer."""
|
"""Combination of InvenTreeTaggitSerializer and InvenTreeModelSerializer."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class UserSerializer(InvenTreeModelSerializer):
|
class UserSerializer(InvenTreeModelSerializer):
|
||||||
"""Serializer for a User."""
|
"""Serializer for a User."""
|
||||||
@ -704,7 +702,6 @@ class DataFileUploadSerializer(serializers.Serializer):
|
|||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
"""Empty overwrite for save."""
|
"""Empty overwrite for save."""
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class DataFileExtractSerializer(serializers.Serializer):
|
class DataFileExtractSerializer(serializers.Serializer):
|
||||||
@ -824,7 +821,6 @@ class DataFileExtractSerializer(serializers.Serializer):
|
|||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
"""No "save" action for this serializer."""
|
"""No "save" action for this serializer."""
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class NotesFieldMixin:
|
class NotesFieldMixin:
|
||||||
|
@ -281,7 +281,7 @@ QUERYCOUNT = {
|
|||||||
'MIN_TIME_TO_LOG': 0.1,
|
'MIN_TIME_TO_LOG': 0.1,
|
||||||
'MIN_QUERY_COUNT_TO_LOG': 25,
|
'MIN_QUERY_COUNT_TO_LOG': 25,
|
||||||
},
|
},
|
||||||
'IGNORE_REQUEST_PATTERNS': ['^(?!\/(api)?(plugin)?\/).*'],
|
'IGNORE_REQUEST_PATTERNS': [r'^(?!\/(api)?(plugin)?\/).*'],
|
||||||
'IGNORE_SQL_PATTERNS': [],
|
'IGNORE_SQL_PATTERNS': [],
|
||||||
'DISPLAY_DUPLICATES': 1,
|
'DISPLAY_DUPLICATES': 1,
|
||||||
'RESPONSE_HEADER': 'X-Django-Query-Count',
|
'RESPONSE_HEADER': 'X-Django-Query-Count',
|
||||||
@ -298,7 +298,7 @@ if (
|
|||||||
and INVENTREE_ADMIN_ENABLED
|
and INVENTREE_ADMIN_ENABLED
|
||||||
and not TESTING
|
and not TESTING
|
||||||
and get_boolean_setting('INVENTREE_DEBUG_SHELL', 'debug_shell', False)
|
and get_boolean_setting('INVENTREE_DEBUG_SHELL', 'debug_shell', False)
|
||||||
): # noqa
|
):
|
||||||
try:
|
try:
|
||||||
import django_admin_shell # noqa: F401
|
import django_admin_shell # noqa: F401
|
||||||
|
|
||||||
@ -1065,7 +1065,7 @@ COOKIE_MODE = (
|
|||||||
|
|
||||||
valid_cookie_modes = {'lax': 'Lax', 'strict': 'Strict', 'none': None, 'null': None}
|
valid_cookie_modes = {'lax': 'Lax', 'strict': 'Strict', 'none': None, 'null': None}
|
||||||
|
|
||||||
if COOKIE_MODE not in valid_cookie_modes.keys():
|
if COOKIE_MODE not in valid_cookie_modes:
|
||||||
logger.error('Invalid cookie samesite mode: %s', COOKIE_MODE)
|
logger.error('Invalid cookie samesite mode: %s', COOKIE_MODE)
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
|
@ -94,20 +94,19 @@ for name, provider in providers.registry.provider_map.items():
|
|||||||
urls = []
|
urls = []
|
||||||
if len(adapters) == 1:
|
if len(adapters) == 1:
|
||||||
urls = handle_oauth2(adapter=adapters[0])
|
urls = handle_oauth2(adapter=adapters[0])
|
||||||
|
elif provider.id in legacy:
|
||||||
|
logger.warning(
|
||||||
|
'`%s` is not supported on platform UI. Use `%s` instead.',
|
||||||
|
provider.id,
|
||||||
|
legacy[provider.id],
|
||||||
|
)
|
||||||
|
continue
|
||||||
else:
|
else:
|
||||||
if provider.id in legacy:
|
logger.error(
|
||||||
logger.warning(
|
'Found handler that is not yet ready for platform UI: `%s`. Open an feature request on GitHub if you need it implemented.',
|
||||||
'`%s` is not supported on platform UI. Use `%s` instead.',
|
provider.id,
|
||||||
provider.id,
|
)
|
||||||
legacy[provider.id],
|
continue
|
||||||
)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
logger.error(
|
|
||||||
'Found handler that is not yet ready for platform UI: `%s`. Open an feature request on GitHub if you need it implemented.',
|
|
||||||
provider.id,
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
provider_urlpatterns += [path(f'{provider.id}/', include(urls))]
|
provider_urlpatterns += [path(f'{provider.id}/', include(urls))]
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Provides system status functionality checks."""
|
"""Provides system status functionality checks."""
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
@ -206,7 +206,7 @@ def offload_task(
|
|||||||
raise_warning(f"WARNING: '{taskname}' not offloaded - Function not found")
|
raise_warning(f"WARNING: '{taskname}' not offloaded - Function not found")
|
||||||
return False
|
return False
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise_warning(f"WARNING: '{taskname}' not offloaded due to {str(exc)}")
|
raise_warning(f"WARNING: '{taskname}' not offloaded due to {exc!s}")
|
||||||
log_error('InvenTree.offload_task')
|
log_error('InvenTree.offload_task')
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
@ -256,7 +256,7 @@ def offload_task(
|
|||||||
_func(*args, **kwargs)
|
_func(*args, **kwargs)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
log_error('InvenTree.offload_task')
|
log_error('InvenTree.offload_task')
|
||||||
raise_warning(f"WARNING: '{taskname}' failed due to {str(exc)}")
|
raise_warning(f"WARNING: '{taskname}' failed due to {exc!s}")
|
||||||
raise exc
|
raise exc
|
||||||
|
|
||||||
# Finally, task either completed successfully or was offloaded
|
# Finally, task either completed successfully or was offloaded
|
||||||
|
@ -80,7 +80,7 @@ def do_translate(parser, token):
|
|||||||
"""
|
"""
|
||||||
bits = token.split_contents()
|
bits = token.split_contents()
|
||||||
if len(bits) < 2:
|
if len(bits) < 2:
|
||||||
raise TemplateSyntaxError("'%s' takes at least one argument" % bits[0])
|
raise TemplateSyntaxError(f"'{bits[0]}' takes at least one argument")
|
||||||
message_string = parser.compile_filter(bits[1])
|
message_string = parser.compile_filter(bits[1])
|
||||||
remaining = bits[2:]
|
remaining = bits[2:]
|
||||||
|
|
||||||
@ -95,7 +95,7 @@ def do_translate(parser, token):
|
|||||||
option = remaining.pop(0)
|
option = remaining.pop(0)
|
||||||
if option in seen:
|
if option in seen:
|
||||||
raise TemplateSyntaxError(
|
raise TemplateSyntaxError(
|
||||||
"The '%s' option was specified more than once." % option
|
f"The '{option}' option was specified more than once."
|
||||||
)
|
)
|
||||||
elif option == 'noop':
|
elif option == 'noop':
|
||||||
noop = True
|
noop = True
|
||||||
@ -104,13 +104,12 @@ def do_translate(parser, token):
|
|||||||
value = remaining.pop(0)
|
value = remaining.pop(0)
|
||||||
except IndexError:
|
except IndexError:
|
||||||
raise TemplateSyntaxError(
|
raise TemplateSyntaxError(
|
||||||
"No argument provided to the '%s' tag for the context option."
|
f"No argument provided to the '{bits[0]}' tag for the context option."
|
||||||
% bits[0]
|
|
||||||
)
|
)
|
||||||
if value in invalid_context:
|
if value in invalid_context:
|
||||||
raise TemplateSyntaxError(
|
raise TemplateSyntaxError(
|
||||||
"Invalid argument '%s' provided to the '%s' tag for the context "
|
f"Invalid argument '{value}' provided to the '{bits[0]}' tag for the context "
|
||||||
'option' % (value, bits[0])
|
'option'
|
||||||
)
|
)
|
||||||
message_context = parser.compile_filter(value)
|
message_context = parser.compile_filter(value)
|
||||||
elif option == 'as':
|
elif option == 'as':
|
||||||
@ -118,16 +117,15 @@ def do_translate(parser, token):
|
|||||||
value = remaining.pop(0)
|
value = remaining.pop(0)
|
||||||
except IndexError:
|
except IndexError:
|
||||||
raise TemplateSyntaxError(
|
raise TemplateSyntaxError(
|
||||||
"No argument provided to the '%s' tag for the as option." % bits[0]
|
f"No argument provided to the '{bits[0]}' tag for the as option."
|
||||||
)
|
)
|
||||||
asvar = value
|
asvar = value
|
||||||
elif option == 'escape':
|
elif option == 'escape':
|
||||||
escape = True
|
escape = True
|
||||||
else:
|
else:
|
||||||
raise TemplateSyntaxError(
|
raise TemplateSyntaxError(
|
||||||
"Unknown argument for '%s' tag: '%s'. The only options "
|
f"Unknown argument for '{bits[0]}' tag: '{option}'. The only options "
|
||||||
"available are 'noop', 'context' \"xxx\", and 'as VAR'."
|
"available are 'noop', 'context' \"xxx\", and 'as VAR'."
|
||||||
% (bits[0], option)
|
|
||||||
)
|
)
|
||||||
seen.add(option)
|
seen.add(option)
|
||||||
|
|
||||||
|
@ -498,7 +498,7 @@ def primitive_to_javascript(primitive):
|
|||||||
elif type(primitive) in [int, float]:
|
elif type(primitive) in [int, float]:
|
||||||
return primitive
|
return primitive
|
||||||
# Wrap with quotes
|
# Wrap with quotes
|
||||||
return format_html("'{}'", primitive) # noqa: P103
|
return format_html("'{}'", primitive)
|
||||||
|
|
||||||
|
|
||||||
@register.simple_tag()
|
@register.simple_tag()
|
||||||
|
@ -70,11 +70,11 @@ class APITests(InvenTreeAPITestCase):
|
|||||||
"""Helper function to use basic auth."""
|
"""Helper function to use basic auth."""
|
||||||
# Use basic authentication
|
# Use basic authentication
|
||||||
|
|
||||||
authstring = bytes('{u}:{p}'.format(u=self.username, p=self.password), 'ascii')
|
authstring = bytes(f'{self.username}:{self.password}', 'ascii')
|
||||||
|
|
||||||
# Use "basic" auth by default
|
# Use "basic" auth by default
|
||||||
auth = b64encode(authstring).decode('ascii')
|
auth = b64encode(authstring).decode('ascii')
|
||||||
self.client.credentials(HTTP_AUTHORIZATION='Basic {auth}'.format(auth=auth))
|
self.client.credentials(HTTP_AUTHORIZATION=f'Basic {auth}')
|
||||||
|
|
||||||
def tokenAuth(self):
|
def tokenAuth(self):
|
||||||
"""Helper function to use token auth."""
|
"""Helper function to use token auth."""
|
||||||
|
@ -70,7 +70,7 @@ class URLTest(TestCase):
|
|||||||
|
|
||||||
pattern = '{% url [\'"]([^\'"]+)[\'"]([^%]*)%}'
|
pattern = '{% url [\'"]([^\'"]+)[\'"]([^%]*)%}'
|
||||||
|
|
||||||
with open(input_file, 'r') as f:
|
with open(input_file) as f:
|
||||||
data = f.read()
|
data = f.read()
|
||||||
|
|
||||||
results = re.findall(pattern, data)
|
results = re.findall(pattern, data)
|
||||||
|
@ -543,22 +543,22 @@ class FormatTest(TestCase):
|
|||||||
def test_currency_formatting(self):
|
def test_currency_formatting(self):
|
||||||
"""Test that currency formatting works correctly for multiple currencies."""
|
"""Test that currency formatting works correctly for multiple currencies."""
|
||||||
test_data = (
|
test_data = (
|
||||||
(Money(3651.285718, 'USD'), 4, True, '$3,651.2857'), # noqa: E201,E202
|
(Money(3651.285718, 'USD'), 4, True, '$3,651.2857'),
|
||||||
(Money(487587.849178, 'CAD'), 5, True, 'CA$487,587.84918'), # noqa: E201,E202
|
(Money(487587.849178, 'CAD'), 5, True, 'CA$487,587.84918'),
|
||||||
(Money(0.348102, 'EUR'), 1, False, '0.3'), # noqa: E201,E202
|
(Money(0.348102, 'EUR'), 1, False, '0.3'),
|
||||||
(Money(0.916530, 'GBP'), 1, True, '£0.9'), # noqa: E201,E202
|
(Money(0.916530, 'GBP'), 1, True, '£0.9'),
|
||||||
(Money(61.031024, 'JPY'), 3, False, '61.031'), # noqa: E201,E202
|
(Money(61.031024, 'JPY'), 3, False, '61.031'),
|
||||||
(Money(49609.694602, 'JPY'), 1, True, '¥49,609.7'), # noqa: E201,E202
|
(Money(49609.694602, 'JPY'), 1, True, '¥49,609.7'),
|
||||||
(Money(155565.264777, 'AUD'), 2, False, '155,565.26'), # noqa: E201,E202
|
(Money(155565.264777, 'AUD'), 2, False, '155,565.26'),
|
||||||
(Money(0.820437, 'CNY'), 4, True, 'CN¥0.8204'), # noqa: E201,E202
|
(Money(0.820437, 'CNY'), 4, True, 'CN¥0.8204'),
|
||||||
(Money(7587.849178, 'EUR'), 0, True, '€7,588'), # noqa: E201,E202
|
(Money(7587.849178, 'EUR'), 0, True, '€7,588'),
|
||||||
(Money(0.348102, 'GBP'), 3, False, '0.348'), # noqa: E201,E202
|
(Money(0.348102, 'GBP'), 3, False, '0.348'),
|
||||||
(Money(0.652923, 'CHF'), 0, True, 'CHF1'), # noqa: E201,E202
|
(Money(0.652923, 'CHF'), 0, True, 'CHF1'),
|
||||||
(Money(0.820437, 'CNY'), 1, True, 'CN¥0.8'), # noqa: E201,E202
|
(Money(0.820437, 'CNY'), 1, True, 'CN¥0.8'),
|
||||||
(Money(98789.5295680, 'CHF'), 0, False, '98,790'), # noqa: E201,E202
|
(Money(98789.5295680, 'CHF'), 0, False, '98,790'),
|
||||||
(Money(0.585787, 'USD'), 1, True, '$0.6'), # noqa: E201,E202
|
(Money(0.585787, 'USD'), 1, True, '$0.6'),
|
||||||
(Money(0.690541, 'CAD'), 3, True, 'CA$0.691'), # noqa: E201,E202
|
(Money(0.690541, 'CAD'), 3, True, 'CA$0.691'),
|
||||||
(Money(427.814104, 'AUD'), 5, True, 'A$427.81410'), # noqa: E201,E202
|
(Money(427.814104, 'AUD'), 5, True, 'A$427.81410'),
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.settings(LANGUAGE_CODE='en-us'):
|
with self.settings(LANGUAGE_CODE='en-us'):
|
||||||
@ -794,7 +794,7 @@ class TestDownloadFile(TestCase):
|
|||||||
def test_download(self):
|
def test_download(self):
|
||||||
"""Tests for DownloadFile."""
|
"""Tests for DownloadFile."""
|
||||||
helpers.DownloadFile('hello world', 'out.txt')
|
helpers.DownloadFile('hello world', 'out.txt')
|
||||||
helpers.DownloadFile(bytes(b'hello world'), 'out.bin')
|
helpers.DownloadFile(b'hello world', 'out.bin')
|
||||||
|
|
||||||
|
|
||||||
class TestMPTT(TestCase):
|
class TestMPTT(TestCase):
|
||||||
@ -1557,15 +1557,12 @@ class ClassValidationMixinTest(TestCase):
|
|||||||
|
|
||||||
def test(self):
|
def test(self):
|
||||||
"""Test function."""
|
"""Test function."""
|
||||||
...
|
|
||||||
|
|
||||||
def test1(self):
|
def test1(self):
|
||||||
"""Test function."""
|
"""Test function."""
|
||||||
...
|
|
||||||
|
|
||||||
def test2(self):
|
def test2(self):
|
||||||
"""Test function."""
|
"""Test function."""
|
||||||
...
|
|
||||||
|
|
||||||
required_attributes = ['NAME']
|
required_attributes = ['NAME']
|
||||||
required_overrides = [test, [test1, test2]]
|
required_overrides = [test, [test1, test2]]
|
||||||
@ -1573,8 +1570,6 @@ class ClassValidationMixinTest(TestCase):
|
|||||||
class InvalidClass:
|
class InvalidClass:
|
||||||
"""An invalid class that does not inherit from ClassValidationMixin."""
|
"""An invalid class that does not inherit from ClassValidationMixin."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
def test_valid_class(self):
|
def test_valid_class(self):
|
||||||
"""Test that a valid class passes the validation."""
|
"""Test that a valid class passes the validation."""
|
||||||
|
|
||||||
@ -1585,11 +1580,9 @@ class ClassValidationMixinTest(TestCase):
|
|||||||
|
|
||||||
def test(self):
|
def test(self):
|
||||||
"""Test function."""
|
"""Test function."""
|
||||||
...
|
|
||||||
|
|
||||||
def test2(self):
|
def test2(self):
|
||||||
"""Test function."""
|
"""Test function."""
|
||||||
...
|
|
||||||
|
|
||||||
TestClass.validate()
|
TestClass.validate()
|
||||||
|
|
||||||
@ -1612,7 +1605,6 @@ class ClassValidationMixinTest(TestCase):
|
|||||||
|
|
||||||
def test2(self):
|
def test2(self):
|
||||||
"""Test function."""
|
"""Test function."""
|
||||||
...
|
|
||||||
|
|
||||||
with self.assertRaisesRegex(
|
with self.assertRaisesRegex(
|
||||||
NotImplementedError,
|
NotImplementedError,
|
||||||
@ -1627,8 +1619,6 @@ class ClassProviderMixinTest(TestCase):
|
|||||||
class TestClass(ClassProviderMixin):
|
class TestClass(ClassProviderMixin):
|
||||||
"""This class is a dummy class to test the ClassProviderMixin."""
|
"""This class is a dummy class to test the ClassProviderMixin."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
def test_get_provider_file(self):
|
def test_get_provider_file(self):
|
||||||
"""Test the get_provider_file function."""
|
"""Test the get_provider_file function."""
|
||||||
self.assertEqual(self.TestClass.get_provider_file(), __file__)
|
self.assertEqual(self.TestClass.get_provider_file(), __file__)
|
||||||
|
@ -15,7 +15,7 @@ def reload_translation_stats():
|
|||||||
STATS_FILE = settings.BASE_DIR.joinpath('InvenTree/locale_stats.json').absolute()
|
STATS_FILE = settings.BASE_DIR.joinpath('InvenTree/locale_stats.json').absolute()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(STATS_FILE, 'r') as f:
|
with open(STATS_FILE) as f:
|
||||||
_translation_stats = json.load(f)
|
_translation_stats = json.load(f)
|
||||||
except Exception:
|
except Exception:
|
||||||
_translation_stats = None
|
_translation_stats = None
|
||||||
|
@ -246,8 +246,6 @@ class ExchangeRateMixin:
|
|||||||
class InvenTreeTestCase(ExchangeRateMixin, UserMixin, TestCase):
|
class InvenTreeTestCase(ExchangeRateMixin, UserMixin, TestCase):
|
||||||
"""Testcase with user setup build in."""
|
"""Testcase with user setup build in."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InvenTreeAPITestCase(ExchangeRateMixin, UserMixin, APITestCase):
|
class InvenTreeAPITestCase(ExchangeRateMixin, UserMixin, APITestCase):
|
||||||
"""Base class for running InvenTree API tests."""
|
"""Base class for running InvenTree API tests."""
|
||||||
@ -283,9 +281,7 @@ class InvenTreeAPITestCase(ExchangeRateMixin, UserMixin, APITestCase):
|
|||||||
) # pragma: no cover
|
) # pragma: no cover
|
||||||
|
|
||||||
if verbose or n >= value:
|
if verbose or n >= value:
|
||||||
msg = '\r\n%s' % json.dumps(
|
msg = f'\r\n{json.dumps(context.captured_queries, indent=4)}' # pragma: no cover
|
||||||
context.captured_queries, indent=4
|
|
||||||
) # pragma: no cover
|
|
||||||
else:
|
else:
|
||||||
msg = None
|
msg = None
|
||||||
|
|
||||||
|
@ -456,7 +456,7 @@ urlpatterns = []
|
|||||||
if settings.INVENTREE_ADMIN_ENABLED:
|
if settings.INVENTREE_ADMIN_ENABLED:
|
||||||
admin_url = settings.INVENTREE_ADMIN_URL
|
admin_url = settings.INVENTREE_ADMIN_URL
|
||||||
|
|
||||||
if settings.ADMIN_SHELL_ENABLE: # noqa
|
if settings.ADMIN_SHELL_ENABLE:
|
||||||
urlpatterns += [path(f'{admin_url}/shell/', include('django_admin_shell.urls'))]
|
urlpatterns += [path(f'{admin_url}/shell/', include('django_admin_shell.urls'))]
|
||||||
|
|
||||||
urlpatterns += [
|
urlpatterns += [
|
||||||
|
@ -96,7 +96,6 @@ def validate_sales_order_reference(value):
|
|||||||
|
|
||||||
def validate_tree_name(value):
|
def validate_tree_name(value):
|
||||||
"""Placeholder for legacy function used in migrations."""
|
"""Placeholder for legacy function used in migrations."""
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
def validate_overage(value):
|
def validate_overage(value):
|
||||||
|
@ -180,7 +180,7 @@ class InvenTreeRoleMixin(PermissionRequiredMixin):
|
|||||||
AjaxUpdateView: 'change',
|
AjaxUpdateView: 'change',
|
||||||
}
|
}
|
||||||
|
|
||||||
for view_class in permission_map.keys():
|
for view_class in permission_map:
|
||||||
if issubclass(type(self), view_class):
|
if issubclass(type(self), view_class):
|
||||||
return permission_map[view_class]
|
return permission_map[view_class]
|
||||||
|
|
||||||
@ -238,7 +238,6 @@ class AjaxMixin(InvenTreeRoleMixin):
|
|||||||
Ref: https://docs.djangoproject.com/en/dev/topics/forms/
|
Ref: https://docs.djangoproject.com/en/dev/topics/forms/
|
||||||
"""
|
"""
|
||||||
# Do nothing by default
|
# Do nothing by default
|
||||||
pass
|
|
||||||
|
|
||||||
def renderJsonResponse(self, request, form=None, data=None, context=None):
|
def renderJsonResponse(self, request, form=None, data=None, context=None):
|
||||||
"""Render a JSON response based on specific class context.
|
"""Render a JSON response based on specific class context.
|
||||||
@ -578,14 +577,10 @@ class UserSessionOverride:
|
|||||||
class CustomSessionDeleteView(UserSessionOverride, SessionDeleteView):
|
class CustomSessionDeleteView(UserSessionOverride, SessionDeleteView):
|
||||||
"""Revert to settings after session delete."""
|
"""Revert to settings after session delete."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class CustomSessionDeleteOtherView(UserSessionOverride, SessionDeleteOtherView):
|
class CustomSessionDeleteOtherView(UserSessionOverride, SessionDeleteOtherView):
|
||||||
"""Revert to settings after session delete."""
|
"""Revert to settings after session delete."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class CustomLoginView(LoginView):
|
class CustomLoginView(LoginView):
|
||||||
"""Custom login view that allows login with urlargs."""
|
"""Custom login view that allows login with urlargs."""
|
||||||
|
@ -47,7 +47,7 @@ from plugin.models import NotificationUserSetting
|
|||||||
from plugin.serializers import NotificationUserSettingSerializer
|
from plugin.serializers import NotificationUserSettingSerializer
|
||||||
|
|
||||||
|
|
||||||
class CsrfExemptMixin(object):
|
class CsrfExemptMixin:
|
||||||
"""Exempts the view from CSRF requirements."""
|
"""Exempts the view from CSRF requirements."""
|
||||||
|
|
||||||
@method_decorator(csrf_exempt)
|
@method_decorator(csrf_exempt)
|
||||||
|
@ -55,7 +55,7 @@ def get_icon_packs():
|
|||||||
tabler_icons_path = Path(__file__).parent.parent.joinpath(
|
tabler_icons_path = Path(__file__).parent.parent.joinpath(
|
||||||
'InvenTree/static/tabler-icons/icons.json'
|
'InvenTree/static/tabler-icons/icons.json'
|
||||||
)
|
)
|
||||||
with open(tabler_icons_path, 'r') as tabler_icons_file:
|
with open(tabler_icons_path) as tabler_icons_file:
|
||||||
tabler_icons = json.load(tabler_icons_file)
|
tabler_icons = json.load(tabler_icons_file)
|
||||||
|
|
||||||
icon_packs = [
|
icon_packs = [
|
||||||
|
@ -246,7 +246,7 @@ class BaseInvenTreeSetting(models.Model):
|
|||||||
|
|
||||||
If a particular setting is not present, create it with the default value
|
If a particular setting is not present, create it with the default value
|
||||||
"""
|
"""
|
||||||
cache_key = f'BUILD_DEFAULT_VALUES:{str(cls.__name__)}'
|
cache_key = f'BUILD_DEFAULT_VALUES:{cls.__name__!s}'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if InvenTree.helpers.str2bool(cache.get(cache_key, False)):
|
if InvenTree.helpers.str2bool(cache.get(cache_key, False)):
|
||||||
@ -329,7 +329,7 @@ class BaseInvenTreeSetting(models.Model):
|
|||||||
- The unique KEY string
|
- The unique KEY string
|
||||||
- Any key:value kwargs associated with the particular setting type (e.g. user-id)
|
- Any key:value kwargs associated with the particular setting type (e.g. user-id)
|
||||||
"""
|
"""
|
||||||
key = f'{str(cls.__name__)}:{setting_key}'
|
key = f'{cls.__name__!s}:{setting_key}'
|
||||||
|
|
||||||
for k, v in kwargs.items():
|
for k, v in kwargs.items():
|
||||||
key += f'_{k}:{v}'
|
key += f'_{k}:{v}'
|
||||||
|
@ -365,7 +365,7 @@ def trigger_notification(obj, category=None, obj_ref='pk', **kwargs):
|
|||||||
obj_ref_value = getattr(obj, 'id', None)
|
obj_ref_value = getattr(obj, 'id', None)
|
||||||
if not obj_ref_value:
|
if not obj_ref_value:
|
||||||
raise KeyError(
|
raise KeyError(
|
||||||
f"Could not resolve an object reference for '{str(obj)}' with {obj_ref}, pk, id"
|
f"Could not resolve an object reference for '{obj!s}' with {obj_ref}, pk, id"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if we have notified recently...
|
# Check if we have notified recently...
|
||||||
@ -432,9 +432,9 @@ def trigger_notification(obj, category=None, obj_ref='pk', **kwargs):
|
|||||||
deliver_notification(method, obj, category, target_users, context)
|
deliver_notification(method, obj, category, target_users, context)
|
||||||
except NotImplementedError as error:
|
except NotImplementedError as error:
|
||||||
# Allow any single notification method to fail, without failing the others
|
# Allow any single notification method to fail, without failing the others
|
||||||
logger.error(error) # noqa: LOG005
|
logger.error(error)
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
logger.error(error) # noqa: LOG005
|
logger.error(error)
|
||||||
|
|
||||||
# Set delivery flag
|
# Set delivery flag
|
||||||
common.models.NotificationEntry.notify(category, obj_ref_value)
|
common.models.NotificationEntry.notify(category, obj_ref_value)
|
||||||
|
@ -831,11 +831,9 @@ class PluginSettingsApiTest(PluginMixin, InvenTreeAPITestCase):
|
|||||||
|
|
||||||
def test_invalid_setting_key(self):
|
def test_invalid_setting_key(self):
|
||||||
"""Test that an invalid setting key returns a 404."""
|
"""Test that an invalid setting key returns a 404."""
|
||||||
...
|
|
||||||
|
|
||||||
def test_uninitialized_setting(self):
|
def test_uninitialized_setting(self):
|
||||||
"""Test that requesting an uninitialized setting creates the setting."""
|
"""Test that requesting an uninitialized setting creates the setting."""
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class ErrorReportTest(InvenTreeAPITestCase):
|
class ErrorReportTest(InvenTreeAPITestCase):
|
||||||
@ -933,7 +931,7 @@ class WebhookMessageTests(TestCase):
|
|||||||
def test_bad_token(self):
|
def test_bad_token(self):
|
||||||
"""Test that a wrong token is not working."""
|
"""Test that a wrong token is not working."""
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
self.url, content_type=CONTENT_TYPE_JSON, **{'HTTP_TOKEN': '1234567fghj'}
|
self.url, content_type=CONTENT_TYPE_JSON, HTTP_TOKEN='1234567fghj'
|
||||||
)
|
)
|
||||||
|
|
||||||
assert response.status_code == HTTPStatus.FORBIDDEN
|
assert response.status_code == HTTPStatus.FORBIDDEN
|
||||||
@ -956,7 +954,7 @@ class WebhookMessageTests(TestCase):
|
|||||||
self.url,
|
self.url,
|
||||||
data="{'this': 123}",
|
data="{'this': 123}",
|
||||||
content_type=CONTENT_TYPE_JSON,
|
content_type=CONTENT_TYPE_JSON,
|
||||||
**{'HTTP_TOKEN': str(self.endpoint_def.token)},
|
HTTP_TOKEN=str(self.endpoint_def.token),
|
||||||
)
|
)
|
||||||
|
|
||||||
assert response.status_code == HTTPStatus.NOT_ACCEPTABLE
|
assert response.status_code == HTTPStatus.NOT_ACCEPTABLE
|
||||||
@ -1004,7 +1002,7 @@ class WebhookMessageTests(TestCase):
|
|||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
self.url,
|
self.url,
|
||||||
content_type=CONTENT_TYPE_JSON,
|
content_type=CONTENT_TYPE_JSON,
|
||||||
**{'HTTP_TOKEN': str('68MXtc/OiXdA5e2Nq9hATEVrZFpLb3Zb0oau7n8s31I=')},
|
HTTP_TOKEN='68MXtc/OiXdA5e2Nq9hATEVrZFpLb3Zb0oau7n8s31I=',
|
||||||
)
|
)
|
||||||
|
|
||||||
assert response.status_code == HTTPStatus.OK
|
assert response.status_code == HTTPStatus.OK
|
||||||
@ -1019,7 +1017,7 @@ class WebhookMessageTests(TestCase):
|
|||||||
self.url,
|
self.url,
|
||||||
data={'this': 'is a message'},
|
data={'this': 'is a message'},
|
||||||
content_type=CONTENT_TYPE_JSON,
|
content_type=CONTENT_TYPE_JSON,
|
||||||
**{'HTTP_TOKEN': str(self.endpoint_def.token)},
|
HTTP_TOKEN=str(self.endpoint_def.token),
|
||||||
)
|
)
|
||||||
|
|
||||||
assert response.status_code == HTTPStatus.OK
|
assert response.status_code == HTTPStatus.OK
|
||||||
|
@ -312,7 +312,6 @@ class FileManagementFormView(MultiStepFormView):
|
|||||||
This method is very specific to the type of data found in the file,
|
This method is very specific to the type of data found in the file,
|
||||||
therefore overwrite it in the subclass.
|
therefore overwrite it in the subclass.
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
def get_clean_items(self):
|
def get_clean_items(self):
|
||||||
"""Returns dict with all cleaned values."""
|
"""Returns dict with all cleaned values."""
|
||||||
|
@ -10,4 +10,3 @@ class CompanyConfig(AppConfig):
|
|||||||
|
|
||||||
def ready(self):
|
def ready(self):
|
||||||
"""This function is called whenever the Company app is loaded."""
|
"""This function is called whenever the Company app is loaded."""
|
||||||
pass
|
|
||||||
|
@ -9,4 +9,4 @@ States can be extended with custom options for each InvenTree instance - those o
|
|||||||
from .states import StatusCode
|
from .states import StatusCode
|
||||||
from .transition import StateTransitionMixin, TransitionMethod, storage
|
from .transition import StateTransitionMixin, TransitionMethod, storage
|
||||||
|
|
||||||
__all__ = ['StatusCode', 'storage', 'TransitionMethod', 'StateTransitionMixin']
|
__all__ = ['StateTransitionMixin', 'StatusCode', 'TransitionMethod', 'storage']
|
||||||
|
@ -11,7 +11,7 @@ from .states import StatusCode
|
|||||||
@register.simple_tag
|
@register.simple_tag
|
||||||
def status_label(typ: str, key: int, *args, **kwargs):
|
def status_label(typ: str, key: int, *args, **kwargs):
|
||||||
"""Render a status label."""
|
"""Render a status label."""
|
||||||
state = {cls.tag(): cls for cls in inheritors(StatusCode)}.get(typ, None)
|
state = {cls.tag(): cls for cls in inheritors(StatusCode)}.get(typ)
|
||||||
if state:
|
if state:
|
||||||
return mark_safe(state.render(key, large=kwargs.get('large', False)))
|
return mark_safe(state.render(key, large=kwargs.get('large', False)))
|
||||||
raise ValueError(f"Unknown status type '{typ}'")
|
raise ValueError(f"Unknown status type '{typ}'")
|
||||||
|
@ -23,7 +23,6 @@ class GeneralStatus(StatusCode):
|
|||||||
|
|
||||||
def GHI(self): # This should be ignored
|
def GHI(self): # This should be ignored
|
||||||
"""A invalid function."""
|
"""A invalid function."""
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class GeneralStateTest(InvenTreeTestCase):
|
class GeneralStateTest(InvenTreeTestCase):
|
||||||
|
@ -3,6 +3,6 @@
|
|||||||
from django import template
|
from django import template
|
||||||
|
|
||||||
register = template.Library()
|
register = template.Library()
|
||||||
from generic.states.tags import status_label # noqa: E402
|
from generic.states.tags import status_label
|
||||||
|
|
||||||
__all__ = [status_label]
|
__all__ = [status_label]
|
||||||
|
@ -213,8 +213,6 @@ class DataImportExportSerializerMixin(
|
|||||||
):
|
):
|
||||||
"""Mixin class for adding data import/export functionality to a DRF serializer."""
|
"""Mixin class for adding data import/export functionality to a DRF serializer."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DataExportViewMixin:
|
class DataExportViewMixin:
|
||||||
"""Mixin class for exporting a dataset via the API.
|
"""Mixin class for exporting a dataset via the API.
|
||||||
|
@ -215,7 +215,7 @@ class DataImportSession(models.Model):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
# A default value exists
|
# A default value exists
|
||||||
if field in field_defaults and field_defaults[field]:
|
if field_defaults.get(field):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# The field has been mapped to a data column
|
# The field has been mapped to a data column
|
||||||
|
@ -36,7 +36,7 @@ def load_data_file(data_file, file_format=None):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
data = file_object.read()
|
data = file_object.read()
|
||||||
except (IOError, FileNotFoundError):
|
except (OSError, FileNotFoundError):
|
||||||
raise ValidationError(_('Failed to open data file'))
|
raise ValidationError(_('Failed to open data file'))
|
||||||
|
|
||||||
# Excel formats expect binary data
|
# Excel formats expect binary data
|
||||||
|
@ -19,7 +19,7 @@ class ImporterTest(InvenTreeTestCase):
|
|||||||
|
|
||||||
fn = os.path.join(os.path.dirname(__file__), 'test_data', 'companies.csv')
|
fn = os.path.join(os.path.dirname(__file__), 'test_data', 'companies.csv')
|
||||||
|
|
||||||
with open(fn, 'r') as input_file:
|
with open(fn) as input_file:
|
||||||
data = input_file.read()
|
data = input_file.read()
|
||||||
|
|
||||||
session = DataImportSession.objects.create(
|
session = DataImportSession.objects.create(
|
||||||
@ -61,4 +61,3 @@ class ImporterTest(InvenTreeTestCase):
|
|||||||
|
|
||||||
def test_field_defaults(self):
|
def test_field_defaults(self):
|
||||||
"""Test default field values."""
|
"""Test default field values."""
|
||||||
...
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from machine.machine_type import BaseDriver, BaseMachineType, MachineStatus
|
from machine.machine_type import BaseDriver, BaseMachineType, MachineStatus
|
||||||
from machine.registry import registry
|
from machine.registry import registry
|
||||||
|
|
||||||
__all__ = ['registry', 'BaseMachineType', 'BaseDriver', 'MachineStatus']
|
__all__ = ['BaseDriver', 'BaseMachineType', 'MachineStatus', 'registry']
|
||||||
|
@ -4,8 +4,8 @@ from machine.machine_types.label_printer import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
# machine types
|
|
||||||
'LabelPrinterMachine',
|
|
||||||
# base drivers
|
# base drivers
|
||||||
'LabelPrinterBaseDriver',
|
'LabelPrinterBaseDriver',
|
||||||
|
# machine types
|
||||||
|
'LabelPrinterMachine',
|
||||||
]
|
]
|
||||||
|
@ -84,13 +84,11 @@ class MachineAPITest(TestMachineRegistryMixin, InvenTreeAPITestCase):
|
|||||||
machine_type,
|
machine_type,
|
||||||
{
|
{
|
||||||
**machine_type,
|
**machine_type,
|
||||||
**{
|
'slug': 'label-printer',
|
||||||
'slug': 'label-printer',
|
'name': 'Label Printer',
|
||||||
'name': 'Label Printer',
|
'description': 'Directly print labels for various items.',
|
||||||
'description': 'Directly print labels for various items.',
|
'provider_plugin': None,
|
||||||
'provider_plugin': None,
|
'is_builtin': True,
|
||||||
'is_builtin': True,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
@ -109,15 +107,13 @@ class MachineAPITest(TestMachineRegistryMixin, InvenTreeAPITestCase):
|
|||||||
driver,
|
driver,
|
||||||
{
|
{
|
||||||
**driver,
|
**driver,
|
||||||
**{
|
'slug': 'test-label-printer-api',
|
||||||
'slug': 'test-label-printer-api',
|
'name': 'Test label printer',
|
||||||
'name': 'Test label printer',
|
'description': 'This is a test label printer driver for testing.',
|
||||||
'description': 'This is a test label printer driver for testing.',
|
'provider_plugin': None,
|
||||||
'provider_plugin': None,
|
'is_builtin': True,
|
||||||
'is_builtin': True,
|
'machine_type': 'label-printer',
|
||||||
'machine_type': 'label-printer',
|
'driver_errors': [],
|
||||||
'driver_errors': [],
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.assertEqual(driver['provider_file'], __file__)
|
self.assertEqual(driver['provider_file'], __file__)
|
||||||
@ -173,17 +169,15 @@ class MachineAPITest(TestMachineRegistryMixin, InvenTreeAPITestCase):
|
|||||||
response.data[0],
|
response.data[0],
|
||||||
{
|
{
|
||||||
**response.data[0],
|
**response.data[0],
|
||||||
**{
|
'name': 'Test Machine',
|
||||||
'name': 'Test Machine',
|
'machine_type': 'label-printer',
|
||||||
'machine_type': 'label-printer',
|
'driver': 'test-label-printer-api',
|
||||||
'driver': 'test-label-printer-api',
|
'initialized': True,
|
||||||
'initialized': True,
|
'active': True,
|
||||||
'active': True,
|
'status': 101,
|
||||||
'status': 101,
|
'status_model': 'LabelPrinterStatus',
|
||||||
'status_model': 'LabelPrinterStatus',
|
'status_text': '',
|
||||||
'status_text': '',
|
'is_driver_available': True,
|
||||||
'is_driver_available': True,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -216,9 +210,7 @@ class MachineAPITest(TestMachineRegistryMixin, InvenTreeAPITestCase):
|
|||||||
reverse('api-machine-detail', kwargs={'pk': pk}),
|
reverse('api-machine-detail', kwargs={'pk': pk}),
|
||||||
{'name': 'Updated Machine'},
|
{'name': 'Updated Machine'},
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(response.data, {**response.data, 'name': 'Updated Machine'})
|
||||||
response.data, {**response.data, **{'name': 'Updated Machine'}}
|
|
||||||
)
|
|
||||||
self.assertEqual(MachineConfig.objects.get(pk=pk).name, 'Updated Machine')
|
self.assertEqual(MachineConfig.objects.get(pk=pk).name, 'Updated Machine')
|
||||||
|
|
||||||
# Delete the machine
|
# Delete the machine
|
||||||
|
@ -337,8 +337,6 @@ class PurchaseOrderList(PurchaseOrderMixin, DataExportViewMixin, ListCreateAPI):
|
|||||||
class PurchaseOrderDetail(PurchaseOrderMixin, RetrieveUpdateDestroyAPI):
|
class PurchaseOrderDetail(PurchaseOrderMixin, RetrieveUpdateDestroyAPI):
|
||||||
"""API endpoint for detail view of a PurchaseOrder object."""
|
"""API endpoint for detail view of a PurchaseOrder object."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PurchaseOrderContextMixin:
|
class PurchaseOrderContextMixin:
|
||||||
"""Mixin to add purchase order object as serializer context variable."""
|
"""Mixin to add purchase order object as serializer context variable."""
|
||||||
@ -603,8 +601,6 @@ class PurchaseOrderLineItemList(
|
|||||||
class PurchaseOrderLineItemDetail(PurchaseOrderLineItemMixin, RetrieveUpdateDestroyAPI):
|
class PurchaseOrderLineItemDetail(PurchaseOrderLineItemMixin, RetrieveUpdateDestroyAPI):
|
||||||
"""Detail API endpoint for PurchaseOrderLineItem object."""
|
"""Detail API endpoint for PurchaseOrderLineItem object."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PurchaseOrderExtraLineList(GeneralExtraLineList, ListCreateAPI):
|
class PurchaseOrderExtraLineList(GeneralExtraLineList, ListCreateAPI):
|
||||||
"""API endpoint for accessing a list of PurchaseOrderExtraLine objects."""
|
"""API endpoint for accessing a list of PurchaseOrderExtraLine objects."""
|
||||||
@ -746,8 +742,6 @@ class SalesOrderList(SalesOrderMixin, DataExportViewMixin, ListCreateAPI):
|
|||||||
class SalesOrderDetail(SalesOrderMixin, RetrieveUpdateDestroyAPI):
|
class SalesOrderDetail(SalesOrderMixin, RetrieveUpdateDestroyAPI):
|
||||||
"""API endpoint for detail view of a SalesOrder object."""
|
"""API endpoint for detail view of a SalesOrder object."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class SalesOrderLineItemFilter(LineItemFilter):
|
class SalesOrderLineItemFilter(LineItemFilter):
|
||||||
"""Custom filters for SalesOrderLineItemList endpoint."""
|
"""Custom filters for SalesOrderLineItemList endpoint."""
|
||||||
@ -865,8 +859,6 @@ class SalesOrderLineItemList(
|
|||||||
class SalesOrderLineItemDetail(SalesOrderLineItemMixin, RetrieveUpdateDestroyAPI):
|
class SalesOrderLineItemDetail(SalesOrderLineItemMixin, RetrieveUpdateDestroyAPI):
|
||||||
"""API endpoint for detail view of a SalesOrderLineItem object."""
|
"""API endpoint for detail view of a SalesOrderLineItem object."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class SalesOrderExtraLineList(GeneralExtraLineList, ListCreateAPI):
|
class SalesOrderExtraLineList(GeneralExtraLineList, ListCreateAPI):
|
||||||
"""API endpoint for accessing a list of SalesOrderExtraLine objects."""
|
"""API endpoint for accessing a list of SalesOrderExtraLine objects."""
|
||||||
@ -1181,8 +1173,6 @@ class ReturnOrderList(ReturnOrderMixin, DataExportViewMixin, ListCreateAPI):
|
|||||||
class ReturnOrderDetail(ReturnOrderMixin, RetrieveUpdateDestroyAPI):
|
class ReturnOrderDetail(ReturnOrderMixin, RetrieveUpdateDestroyAPI):
|
||||||
"""API endpoint for detail view of a single ReturnOrder object."""
|
"""API endpoint for detail view of a single ReturnOrder object."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ReturnOrderContextMixin:
|
class ReturnOrderContextMixin:
|
||||||
"""Simple mixin class to add a ReturnOrder to the serializer context."""
|
"""Simple mixin class to add a ReturnOrder to the serializer context."""
|
||||||
@ -1310,8 +1300,6 @@ class ReturnOrderLineItemList(
|
|||||||
class ReturnOrderLineItemDetail(ReturnOrderLineItemMixin, RetrieveUpdateDestroyAPI):
|
class ReturnOrderLineItemDetail(ReturnOrderLineItemMixin, RetrieveUpdateDestroyAPI):
|
||||||
"""API endpoint for detail view of a ReturnOrderLineItem object."""
|
"""API endpoint for detail view of a ReturnOrderLineItem object."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ReturnOrderExtraLineList(GeneralExtraLineList, ListCreateAPI):
|
class ReturnOrderExtraLineList(GeneralExtraLineList, ListCreateAPI):
|
||||||
"""API endpoint for accessing a list of ReturnOrderExtraLine objects."""
|
"""API endpoint for accessing a list of ReturnOrderExtraLine objects."""
|
||||||
|
@ -294,7 +294,7 @@ class SalesOrderExport(AjaxView):
|
|||||||
|
|
||||||
export_format = request.GET.get('format', 'csv')
|
export_format = request.GET.get('format', 'csv')
|
||||||
|
|
||||||
filename = f'{str(order)} - {order.customer.name}.{export_format}'
|
filename = f'{order!s} - {order.customer.name}.{export_format}'
|
||||||
|
|
||||||
dataset = SalesOrderLineItemResource().export(queryset=order.lines.all())
|
dataset = SalesOrderLineItemResource().export(queryset=order.lines.all())
|
||||||
|
|
||||||
@ -321,7 +321,7 @@ class PurchaseOrderExport(AjaxView):
|
|||||||
|
|
||||||
export_format = request.GET.get('format', 'csv')
|
export_format = request.GET.get('format', 'csv')
|
||||||
|
|
||||||
filename = f'{str(order)} - {order.supplier.name}.{export_format}'
|
filename = f'{order!s} - {order.supplier.name}.{export_format}'
|
||||||
|
|
||||||
dataset = PurchaseOrderLineItemResource().export(queryset=order.lines.all())
|
dataset = PurchaseOrderLineItemResource().export(queryset=order.lines.all())
|
||||||
|
|
||||||
|
@ -466,8 +466,6 @@ class PartTestTemplateMixin:
|
|||||||
class PartTestTemplateDetail(PartTestTemplateMixin, RetrieveUpdateDestroyAPI):
|
class PartTestTemplateDetail(PartTestTemplateMixin, RetrieveUpdateDestroyAPI):
|
||||||
"""Detail endpoint for PartTestTemplate model."""
|
"""Detail endpoint for PartTestTemplate model."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PartTestTemplateList(PartTestTemplateMixin, DataExportViewMixin, ListCreateAPI):
|
class PartTestTemplateList(PartTestTemplateMixin, DataExportViewMixin, ListCreateAPI):
|
||||||
"""API endpoint for listing (and creating) a PartTestTemplate."""
|
"""API endpoint for listing (and creating) a PartTestTemplate."""
|
||||||
@ -1570,8 +1568,6 @@ class PartParameterTemplateList(
|
|||||||
class PartParameterTemplateDetail(PartParameterTemplateMixin, RetrieveUpdateDestroyAPI):
|
class PartParameterTemplateDetail(PartParameterTemplateMixin, RetrieveUpdateDestroyAPI):
|
||||||
"""API endpoint for accessing the detail view for a PartParameterTemplate object."""
|
"""API endpoint for accessing the detail view for a PartParameterTemplate object."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PartParameterAPIMixin:
|
class PartParameterAPIMixin:
|
||||||
"""Mixin class for PartParameter API endpoints."""
|
"""Mixin class for PartParameter API endpoints."""
|
||||||
@ -1663,8 +1659,6 @@ class PartParameterList(PartParameterAPIMixin, DataExportViewMixin, ListCreateAP
|
|||||||
class PartParameterDetail(PartParameterAPIMixin, RetrieveUpdateDestroyAPI):
|
class PartParameterDetail(PartParameterAPIMixin, RetrieveUpdateDestroyAPI):
|
||||||
"""API endpoint for detail view of a single PartParameter object."""
|
"""API endpoint for detail view of a single PartParameter object."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PartStocktakeFilter(rest_filters.FilterSet):
|
class PartStocktakeFilter(rest_filters.FilterSet):
|
||||||
"""Custom filter for the PartStocktakeList endpoint."""
|
"""Custom filter for the PartStocktakeList endpoint."""
|
||||||
@ -1922,8 +1916,6 @@ class BomList(BomMixin, DataExportViewMixin, ListCreateDestroyAPIView):
|
|||||||
class BomDetail(BomMixin, RetrieveUpdateDestroyAPI):
|
class BomDetail(BomMixin, RetrieveUpdateDestroyAPI):
|
||||||
"""API endpoint for detail view of a single BomItem object."""
|
"""API endpoint for detail view of a single BomItem object."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class BomImportUpload(CreateAPI):
|
class BomImportUpload(CreateAPI):
|
||||||
"""API endpoint for uploading a complete Bill of Materials.
|
"""API endpoint for uploading a complete Bill of Materials.
|
||||||
|
@ -4328,7 +4328,7 @@ class BomItem(
|
|||||||
- allow_variants
|
- allow_variants
|
||||||
"""
|
"""
|
||||||
# Seed the hash with the ID of this BOM item
|
# Seed the hash with the ID of this BOM item
|
||||||
result_hash = hashlib.md5(''.encode())
|
result_hash = hashlib.md5(b'')
|
||||||
|
|
||||||
# The following components are used to calculate the checksum
|
# The following components are used to calculate the checksum
|
||||||
components = [
|
components = [
|
||||||
@ -4422,8 +4422,7 @@ class BomItem(
|
|||||||
try:
|
try:
|
||||||
ovg = float(overage)
|
ovg = float(overage)
|
||||||
|
|
||||||
if ovg < 0:
|
ovg = max(ovg, 0)
|
||||||
ovg = 0
|
|
||||||
|
|
||||||
return ovg
|
return ovg
|
||||||
except ValueError:
|
except ValueError:
|
||||||
@ -4435,10 +4434,8 @@ class BomItem(
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
percent = float(overage) / 100.0
|
percent = float(overage) / 100.0
|
||||||
if percent > 1:
|
percent = min(percent, 1)
|
||||||
percent = 1
|
percent = max(percent, 0)
|
||||||
if percent < 0:
|
|
||||||
percent = 0
|
|
||||||
|
|
||||||
# Must be represented as a decimal
|
# Must be represented as a decimal
|
||||||
percent = Decimal(percent)
|
percent = Decimal(percent)
|
||||||
|
@ -1979,9 +1979,8 @@ class BomImportExtractSerializer(InvenTree.serializers.DataFileExtractSerializer
|
|||||||
|
|
||||||
if part is None:
|
if part is None:
|
||||||
row['errors']['part'] = _('No matching part found')
|
row['errors']['part'] = _('No matching part found')
|
||||||
else:
|
elif not part.component:
|
||||||
if not part.component:
|
row['errors']['part'] = _('Part is not designated as a component')
|
||||||
row['errors']['part'] = _('Part is not designated as a component')
|
|
||||||
|
|
||||||
# Update the 'part' value in the row
|
# Update the 'part' value in the row
|
||||||
row['part'] = part.pk if part is not None else None
|
row['part'] = part.pk if part is not None else None
|
||||||
|
@ -49,7 +49,7 @@ class BomExportTest(InvenTreeTestCase):
|
|||||||
with open(filename, 'wb') as f:
|
with open(filename, 'wb') as f:
|
||||||
f.write(response.getvalue())
|
f.write(response.getvalue())
|
||||||
|
|
||||||
with open(filename, 'r') as f:
|
with open(filename) as f:
|
||||||
reader = csv.reader(f, delimiter=',')
|
reader = csv.reader(f, delimiter=',')
|
||||||
|
|
||||||
for line in reader:
|
for line in reader:
|
||||||
@ -96,7 +96,7 @@ class BomExportTest(InvenTreeTestCase):
|
|||||||
f.write(response.getvalue())
|
f.write(response.getvalue())
|
||||||
|
|
||||||
# Read the file
|
# Read the file
|
||||||
with open(filename, 'r') as f:
|
with open(filename) as f:
|
||||||
reader = csv.reader(f, delimiter=',')
|
reader = csv.reader(f, delimiter=',')
|
||||||
|
|
||||||
for line in reader:
|
for line in reader:
|
||||||
|
@ -419,7 +419,7 @@ class PartParameterTest(InvenTreeAPITestCase):
|
|||||||
|
|
||||||
response = self.get(
|
response = self.get(
|
||||||
url,
|
url,
|
||||||
{'ordering': 'parameter_{pk}'.format(pk=template.pk), 'parameters': 'true'},
|
{'ordering': f'parameter_{template.pk}', 'parameters': 'true'},
|
||||||
expected_code=200,
|
expected_code=200,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -436,10 +436,7 @@ class PartParameterTest(InvenTreeAPITestCase):
|
|||||||
# Next, check reverse ordering
|
# Next, check reverse ordering
|
||||||
response = self.get(
|
response = self.get(
|
||||||
url,
|
url,
|
||||||
{
|
{'ordering': f'-parameter_{template.pk}', 'parameters': 'true'},
|
||||||
'ordering': '-parameter_{pk}'.format(pk=template.pk),
|
|
||||||
'parameters': 'true',
|
|
||||||
},
|
|
||||||
expected_code=200,
|
expected_code=200,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -111,7 +111,6 @@ class PartPricingTests(InvenTreeTestCase):
|
|||||||
|
|
||||||
def test_invalid_rate(self):
|
def test_invalid_rate(self):
|
||||||
"""Ensure that conversion behaves properly with missing rates."""
|
"""Ensure that conversion behaves properly with missing rates."""
|
||||||
...
|
|
||||||
|
|
||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
"""Tests for hard-coded values."""
|
"""Tests for hard-coded values."""
|
||||||
|
@ -414,7 +414,7 @@ class PartDetailFromIPN(PartDetail):
|
|||||||
if not self.object:
|
if not self.object:
|
||||||
return HttpResponseRedirect(reverse('part-index'))
|
return HttpResponseRedirect(reverse('part-index'))
|
||||||
|
|
||||||
return super(PartDetailFromIPN, self).get(request, *args, **kwargs)
|
return super().get(request, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class PartImageSelect(AjaxUpdateView):
|
class PartImageSelect(AjaxUpdateView):
|
||||||
|
@ -5,8 +5,8 @@ from .plugin import InvenTreePlugin
|
|||||||
from .registry import registry
|
from .registry import registry
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'registry',
|
|
||||||
'InvenTreePlugin',
|
'InvenTreePlugin',
|
||||||
'MixinNotImplementedError',
|
|
||||||
'MixinImplementationError',
|
'MixinImplementationError',
|
||||||
|
'MixinNotImplementedError',
|
||||||
|
'registry',
|
||||||
]
|
]
|
||||||
|
@ -594,7 +594,7 @@ class BarcodeSOAllocate(BarcodeView):
|
|||||||
raise ValidationError(response)
|
raise ValidationError(response)
|
||||||
|
|
||||||
# If we have sufficient information, we can allocate the stock item
|
# If we have sufficient information, we can allocate the stock item
|
||||||
if all((x is not None for x in [line_item, sales_order, shipment, quantity])):
|
if all(x is not None for x in [line_item, sales_order, shipment, quantity]):
|
||||||
order.models.SalesOrderAllocation.objects.create(
|
order.models.SalesOrderAllocation.objects.create(
|
||||||
line=line_item, shipment=shipment, item=stock_item, quantity=quantity
|
line=line_item, shipment=shipment, item=stock_item, quantity=quantity
|
||||||
)
|
)
|
||||||
|
@ -471,9 +471,9 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
|||||||
# 2. check if it's defined on the part
|
# 2. check if it's defined on the part
|
||||||
# 3. check if there's 1 or 0 stock locations defined in InvenTree
|
# 3. check if there's 1 or 0 stock locations defined in InvenTree
|
||||||
# -> assume all stock is going into that location (or no location)
|
# -> assume all stock is going into that location (or no location)
|
||||||
if location := line_item.destination:
|
if (location := line_item.destination) or (
|
||||||
pass
|
location := supplier_part.part.get_default_location()
|
||||||
elif location := supplier_part.part.get_default_location():
|
):
|
||||||
pass
|
pass
|
||||||
elif StockLocation.objects.count() <= 1:
|
elif StockLocation.objects.count() <= 1:
|
||||||
if not (location := StockLocation.objects.first()):
|
if not (location := StockLocation.objects.first()):
|
||||||
|
@ -33,7 +33,6 @@ class ReportMixin:
|
|||||||
request: The request object which initiated the report generation
|
request: The request object which initiated the report generation
|
||||||
context: The context dictionary to add to
|
context: The context dictionary to add to
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
def add_label_context(self, label_instance, model_instance, request, context):
|
def add_label_context(self, label_instance, model_instance, request, context):
|
||||||
"""Add extra context to the provided label instance.
|
"""Add extra context to the provided label instance.
|
||||||
@ -46,7 +45,6 @@ class ReportMixin:
|
|||||||
request: The request object which initiated the label generation
|
request: The request object which initiated the label generation
|
||||||
context: The context dictionary to add to
|
context: The context dictionary to add to
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
def report_callback(self, template, instance, report, request):
|
def report_callback(self, template, instance, report, request):
|
||||||
"""Callback function called after a report is generated.
|
"""Callback function called after a report is generated.
|
||||||
@ -59,4 +57,3 @@ class ReportMixin:
|
|||||||
|
|
||||||
The default implementation does nothing.
|
The default implementation does nothing.
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
@ -15,8 +15,6 @@ else:
|
|||||||
class SettingsKeyType:
|
class SettingsKeyType:
|
||||||
"""Dummy class, so that python throws no error."""
|
"""Dummy class, so that python throws no error."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class SettingsMixin:
|
class SettingsMixin:
|
||||||
"""Mixin that enables global settings for the plugin."""
|
"""Mixin that enables global settings for the plugin."""
|
||||||
|
@ -205,4 +205,3 @@ class ValidationMixin:
|
|||||||
Raises:
|
Raises:
|
||||||
ValidationError if the proposed parameter value is objectionable
|
ValidationError if the proposed parameter value is objectionable
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
@ -251,8 +251,6 @@ class LabelPrintingMixin:
|
|||||||
|
|
||||||
def before_printing(self):
|
def before_printing(self):
|
||||||
"""Hook method called before printing labels."""
|
"""Hook method called before printing labels."""
|
||||||
pass
|
|
||||||
|
|
||||||
def after_printing(self):
|
def after_printing(self):
|
||||||
"""Hook method called after printing labels."""
|
"""Hook method called after printing labels."""
|
||||||
pass
|
|
||||||
|
@ -44,14 +44,10 @@ class MixinImplementationError(ValueError):
|
|||||||
Mostly raised if constant is missing
|
Mostly raised if constant is missing
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class MixinNotImplementedError(NotImplementedError):
|
class MixinNotImplementedError(NotImplementedError):
|
||||||
"""Error if necessary mixin function was not overwritten."""
|
"""Error if necessary mixin function was not overwritten."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def log_error(error, reference: str = 'general'):
|
def log_error(error, reference: str = 'general'):
|
||||||
"""Log an plugin error."""
|
"""Log an plugin error."""
|
||||||
|
@ -220,15 +220,11 @@ def install_plugin(url=None, packagename=None, user=None, version=None):
|
|||||||
full_pkg = f'{packagename}@{url}'
|
full_pkg = f'{packagename}@{url}'
|
||||||
else:
|
else:
|
||||||
full_pkg = url
|
full_pkg = url
|
||||||
else: # pragma: no cover
|
elif url:
|
||||||
# using a custom package repositories
|
install_name.append('-i')
|
||||||
# This is only for pypa compliant directory services (all current are tested above)
|
full_pkg = url
|
||||||
# and not covered by tests.
|
elif packagename:
|
||||||
if url:
|
full_pkg = packagename
|
||||||
install_name.append('-i')
|
|
||||||
full_pkg = url
|
|
||||||
elif packagename:
|
|
||||||
full_pkg = packagename
|
|
||||||
|
|
||||||
elif packagename:
|
elif packagename:
|
||||||
# use pypi
|
# use pypi
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
from machine import BaseDriver, BaseMachineType, MachineStatus, registry
|
from machine import BaseDriver, BaseMachineType, MachineStatus, registry
|
||||||
|
|
||||||
__all__ = ['registry', 'BaseDriver', 'BaseMachineType', 'MachineStatus']
|
__all__ = ['BaseDriver', 'BaseMachineType', 'MachineStatus', 'registry']
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
"""just re-export the machine types from the plugin InvenTree app."""
|
"""just re-export the machine types from the plugin InvenTree app."""
|
||||||
|
|
||||||
from machine.machine_types import * # noqa: F403, F401
|
from machine.machine_types import * # noqa: F403
|
||||||
|
@ -20,23 +20,23 @@ from plugin.base.locate.mixins import LocateMixin
|
|||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'APICallMixin',
|
'APICallMixin',
|
||||||
|
'ActionMixin',
|
||||||
'AppMixin',
|
'AppMixin',
|
||||||
|
'BarcodeMixin',
|
||||||
|
'BulkNotificationMethod',
|
||||||
'CurrencyExchangeMixin',
|
'CurrencyExchangeMixin',
|
||||||
'EventMixin',
|
'EventMixin',
|
||||||
'IconPackMixin',
|
'IconPackMixin',
|
||||||
'LabelPrintingMixin',
|
'LabelPrintingMixin',
|
||||||
|
'LocateMixin',
|
||||||
'NavigationMixin',
|
'NavigationMixin',
|
||||||
|
'PanelMixin',
|
||||||
'ReportMixin',
|
'ReportMixin',
|
||||||
'ScheduleMixin',
|
'ScheduleMixin',
|
||||||
'SettingsContentMixin',
|
'SettingsContentMixin',
|
||||||
'SettingsMixin',
|
'SettingsMixin',
|
||||||
'UrlsMixin',
|
|
||||||
'PanelMixin',
|
|
||||||
'ActionMixin',
|
|
||||||
'BarcodeMixin',
|
|
||||||
'SupplierBarcodeMixin',
|
|
||||||
'LocateMixin',
|
|
||||||
'ValidationMixin',
|
|
||||||
'SingleNotificationMethod',
|
'SingleNotificationMethod',
|
||||||
'BulkNotificationMethod',
|
'SupplierBarcodeMixin',
|
||||||
|
'UrlsMixin',
|
||||||
|
'ValidationMixin',
|
||||||
]
|
]
|
||||||
|
@ -85,8 +85,6 @@ class ScheduledTaskPluginTests(TestCase):
|
|||||||
class NoSchedules(Base):
|
class NoSchedules(Base):
|
||||||
"""Plugin without schedules."""
|
"""Plugin without schedules."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
with self.assertRaises(MixinImplementationError):
|
with self.assertRaises(MixinImplementationError):
|
||||||
NoSchedules().register_tasks()
|
NoSchedules().register_tasks()
|
||||||
|
|
||||||
|
@ -18,8 +18,9 @@ class SampleTransitionPlugin(InvenTreePlugin):
|
|||||||
def transition(current_state, target_state, instance, default_action, **kwargs): # noqa: N805
|
def transition(current_state, target_state, instance, default_action, **kwargs): # noqa: N805
|
||||||
"""Example override function for state transition."""
|
"""Example override function for state transition."""
|
||||||
# Only act on ReturnOrders that should be completed
|
# Only act on ReturnOrders that should be completed
|
||||||
if not isinstance(instance, ReturnOrder) or not (
|
if (
|
||||||
target_state == ReturnOrderStatus.COMPLETE.value
|
not isinstance(instance, ReturnOrder)
|
||||||
|
or target_state != ReturnOrderStatus.COMPLETE.value
|
||||||
):
|
):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -144,8 +144,6 @@ class PluginConfigInstallSerializer(serializers.Serializer):
|
|||||||
class PluginConfigEmptySerializer(serializers.Serializer):
|
class PluginConfigEmptySerializer(serializers.Serializer):
|
||||||
"""Serializer for a PluginConfig."""
|
"""Serializer for a PluginConfig."""
|
||||||
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class PluginReloadSerializer(serializers.Serializer):
|
class PluginReloadSerializer(serializers.Serializer):
|
||||||
"""Serializer for remotely forcing plugin registry reload."""
|
"""Serializer for remotely forcing plugin registry reload."""
|
||||||
|
@ -195,8 +195,7 @@ class PluginDetailAPITest(PluginMixin, InvenTreeAPITestCase):
|
|||||||
mixin_dict = plg.mixins()
|
mixin_dict = plg.mixins()
|
||||||
self.assertIn('base', mixin_dict)
|
self.assertIn('base', mixin_dict)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
mixin_dict,
|
mixin_dict, {**mixin_dict, 'base': {'key': 'base', 'human_name': 'base'}}
|
||||||
{**mixin_dict, **{'base': {'key': 'base', 'human_name': 'base'}}},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# check reload on save
|
# check reload on save
|
||||||
|
@ -190,12 +190,12 @@ def uploaded_image(
|
|||||||
elif width is not None:
|
elif width is not None:
|
||||||
# Resize the image, width only
|
# Resize the image, width only
|
||||||
wpercent = width / float(img.size[0])
|
wpercent = width / float(img.size[0])
|
||||||
hsize = int((float(img.size[1]) * float(wpercent)))
|
hsize = int(float(img.size[1]) * float(wpercent))
|
||||||
img = img.resize((width, hsize))
|
img = img.resize((width, hsize))
|
||||||
elif height is not None:
|
elif height is not None:
|
||||||
# Resize the image, height only
|
# Resize the image, height only
|
||||||
hpercent = height / float(img.size[1])
|
hpercent = height / float(img.size[1])
|
||||||
wsize = int((float(img.size[0]) * float(hpercent)))
|
wsize = int(float(img.size[0]) * float(hpercent))
|
||||||
img = img.resize((wsize, height))
|
img = img.resize((wsize, height))
|
||||||
|
|
||||||
# Optionally rotate the image
|
# Optionally rotate the image
|
||||||
|
@ -7,7 +7,7 @@ import sys
|
|||||||
|
|
||||||
def calculate_coverage(filename):
|
def calculate_coverage(filename):
|
||||||
"""Calculate translation coverage for a .po file."""
|
"""Calculate translation coverage for a .po file."""
|
||||||
with open(filename, 'r') as f:
|
with open(filename) as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
|
|
||||||
lines_count = 0
|
lines_count = 0
|
||||||
@ -19,7 +19,7 @@ def calculate_coverage(filename):
|
|||||||
lines_count += 1
|
lines_count += 1
|
||||||
|
|
||||||
elif line.startswith('msgstr'):
|
elif line.startswith('msgstr'):
|
||||||
if line.startswith('msgstr ""') or line.startswith("msgstr ''"):
|
if line.startswith(('msgstr ""', "msgstr ''")):
|
||||||
lines_uncovered += 1
|
lines_uncovered += 1
|
||||||
else:
|
else:
|
||||||
lines_covered += 1
|
lines_covered += 1
|
||||||
@ -53,7 +53,7 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
percentages = []
|
percentages = []
|
||||||
|
|
||||||
for locale in locales.keys():
|
for locale in locales:
|
||||||
locale_file = locales[locale]
|
locale_file = locales[locale]
|
||||||
stats = calculate_coverage(locale_file)
|
stats = calculate_coverage(locale_file)
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
print('Generating icon list...')
|
print('Generating icon list...')
|
||||||
with open(
|
with open(
|
||||||
os.path.join(TMP_FOLDER, 'node_modules', '@tabler', 'icons', 'icons.json'), 'r'
|
os.path.join(TMP_FOLDER, 'node_modules', '@tabler', 'icons', 'icons.json')
|
||||||
) as f:
|
) as f:
|
||||||
icons = json.load(f)
|
icons = json.load(f)
|
||||||
|
|
||||||
|
@ -974,23 +974,17 @@ class StockList(DataExportViewMixin, ListCreateDestroyAPIView):
|
|||||||
'The supplier part has a pack size defined, but flag use_pack_size not set'
|
'The supplier part has a pack size defined, but flag use_pack_size not set'
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
else:
|
elif bool(data.get('use_pack_size')):
|
||||||
if bool(data.get('use_pack_size')):
|
quantity = data['quantity'] = supplier_part.base_quantity(quantity)
|
||||||
quantity = data['quantity'] = supplier_part.base_quantity(
|
|
||||||
quantity
|
|
||||||
)
|
|
||||||
|
|
||||||
# Divide purchase price by pack size, to save correct price per stock item
|
# Divide purchase price by pack size, to save correct price per stock item
|
||||||
if (
|
if data['purchase_price'] and supplier_part.pack_quantity_native:
|
||||||
data['purchase_price']
|
try:
|
||||||
and supplier_part.pack_quantity_native
|
data['purchase_price'] = float(
|
||||||
):
|
data['purchase_price']
|
||||||
try:
|
) / float(supplier_part.pack_quantity_native)
|
||||||
data['purchase_price'] = float(
|
except ValueError:
|
||||||
data['purchase_price']
|
pass
|
||||||
) / float(supplier_part.pack_quantity_native)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Now remove the flag from data, so that it doesn't interfere with saving
|
# Now remove the flag from data, so that it doesn't interfere with saving
|
||||||
# Do this regardless of results above
|
# Do this regardless of results above
|
||||||
@ -1241,8 +1235,6 @@ class StockItemTestResultMixin:
|
|||||||
class StockItemTestResultDetail(StockItemTestResultMixin, RetrieveUpdateDestroyAPI):
|
class StockItemTestResultDetail(StockItemTestResultMixin, RetrieveUpdateDestroyAPI):
|
||||||
"""Detail endpoint for StockItemTestResult."""
|
"""Detail endpoint for StockItemTestResult."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class StockItemTestResultFilter(rest_filters.FilterSet):
|
class StockItemTestResultFilter(rest_filters.FilterSet):
|
||||||
"""API filter for the StockItemTestResult list."""
|
"""API filter for the StockItemTestResult list."""
|
||||||
|
@ -477,8 +477,7 @@ class StockItem(
|
|||||||
|
|
||||||
serial_int = abs(serial_int)
|
serial_int = abs(serial_int)
|
||||||
|
|
||||||
if serial_int > clip:
|
serial_int = min(serial_int, clip)
|
||||||
serial_int = clip
|
|
||||||
|
|
||||||
self.serial_int = serial_int
|
self.serial_int = serial_int
|
||||||
return
|
return
|
||||||
@ -584,7 +583,7 @@ class StockItem(
|
|||||||
except (ValueError, StockItem.DoesNotExist):
|
except (ValueError, StockItem.DoesNotExist):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
super(StockItem, self).save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
# If user information is provided, and no existing note exists, create one!
|
# If user information is provided, and no existing note exists, create one!
|
||||||
if user and self.tracking_info.count() == 0:
|
if user and self.tracking_info.count() == 0:
|
||||||
@ -619,7 +618,7 @@ class StockItem(
|
|||||||
If the StockItem is serialized, the same serial number.
|
If the StockItem is serialized, the same serial number.
|
||||||
cannot exist for the same part (or part tree).
|
cannot exist for the same part (or part tree).
|
||||||
"""
|
"""
|
||||||
super(StockItem, self).validate_unique(exclude)
|
super().validate_unique(exclude)
|
||||||
|
|
||||||
# If the serial number is set, make sure it is not a duplicate
|
# If the serial number is set, make sure it is not a duplicate
|
||||||
if self.serial:
|
if self.serial:
|
||||||
@ -2022,8 +2021,7 @@ class StockItem(
|
|||||||
except (InvalidOperation, ValueError):
|
except (InvalidOperation, ValueError):
|
||||||
return
|
return
|
||||||
|
|
||||||
if quantity < 0:
|
quantity = max(quantity, 0)
|
||||||
quantity = 0
|
|
||||||
|
|
||||||
self.quantity = quantity
|
self.quantity = quantity
|
||||||
|
|
||||||
@ -2208,7 +2206,7 @@ class StockItem(
|
|||||||
|
|
||||||
for key in item_results.keys():
|
for key in item_results.keys():
|
||||||
# Results from sub items should not override master ones
|
# Results from sub items should not override master ones
|
||||||
if key not in result_map.keys():
|
if key not in result_map:
|
||||||
result_map[key] = item_results[key]
|
result_map[key] = item_results[key]
|
||||||
|
|
||||||
return result_map
|
return result_map
|
||||||
@ -2586,6 +2584,4 @@ class StockItemTestResult(InvenTree.models.InvenTreeMetadataModel):
|
|||||||
help_text=_('The timestamp of the test finish'),
|
help_text=_('The timestamp of the test finish'),
|
||||||
)
|
)
|
||||||
|
|
||||||
user = models.ForeignKey(User, on_delete=models.SET_NULL, blank=True, null=True)
|
|
||||||
|
|
||||||
date = models.DateTimeField(auto_now_add=True, editable=False)
|
date = models.DateTimeField(auto_now_add=True, editable=False)
|
||||||
|
@ -421,7 +421,7 @@ class StockItemSerializer(
|
|||||||
tests = kwargs.pop('tests', False)
|
tests = kwargs.pop('tests', False)
|
||||||
path_detail = kwargs.pop('path_detail', False)
|
path_detail = kwargs.pop('path_detail', False)
|
||||||
|
|
||||||
super(StockItemSerializer, self).__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
if not part_detail:
|
if not part_detail:
|
||||||
self.fields.pop('part_detail', None)
|
self.fields.pop('part_detail', None)
|
||||||
|
@ -640,7 +640,7 @@ class StockItemListTest(StockAPITestCase):
|
|||||||
StockStatus.REJECTED.value: 0,
|
StockStatus.REJECTED.value: 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
for code in codes.keys():
|
for code in codes:
|
||||||
num = codes[code]
|
num = codes[code]
|
||||||
|
|
||||||
response = self.get_stock(status=code)
|
response = self.get_stock(status=code)
|
||||||
|
@ -293,8 +293,6 @@ class InvenTreeUserAdmin(UserAdmin):
|
|||||||
class OwnerAdmin(admin.ModelAdmin):
|
class OwnerAdmin(admin.ModelAdmin):
|
||||||
"""Custom admin interface for the Owner model."""
|
"""Custom admin interface for the Owner model."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
admin.site.unregister(Group)
|
admin.site.unregister(Group)
|
||||||
admin.site.register(Group, RoleGroupAdmin)
|
admin.site.register(Group, RoleGroupAdmin)
|
||||||
|
@ -557,10 +557,8 @@ def update_group_roles(group, debug=False):
|
|||||||
|
|
||||||
permissions_to_add.add(permission_string)
|
permissions_to_add.add(permission_string)
|
||||||
|
|
||||||
else:
|
elif permission_string not in permissions_to_add:
|
||||||
# A forbidden action will be ignored if we have already allowed it
|
permissions_to_delete.add(permission_string)
|
||||||
if permission_string not in permissions_to_add:
|
|
||||||
permissions_to_delete.add(permission_string)
|
|
||||||
|
|
||||||
# Pre-fetch all the RuleSet objects
|
# Pre-fetch all the RuleSet objects
|
||||||
rulesets = {
|
rulesets = {
|
||||||
|
25
tasks.py
25
tasks.py
@ -25,10 +25,9 @@ def checkPythonVersion():
|
|||||||
|
|
||||||
valid = True
|
valid = True
|
||||||
|
|
||||||
if sys.version_info.major < REQ_MAJOR:
|
if sys.version_info.major < REQ_MAJOR or (
|
||||||
valid = False
|
sys.version_info.major == REQ_MAJOR and sys.version_info.minor < REQ_MINOR
|
||||||
|
):
|
||||||
elif sys.version_info.major == REQ_MAJOR and sys.version_info.minor < REQ_MINOR:
|
|
||||||
valid = False
|
valid = False
|
||||||
|
|
||||||
if not valid:
|
if not valid:
|
||||||
@ -618,7 +617,7 @@ def export_records(
|
|||||||
print('Running data post-processing step...')
|
print('Running data post-processing step...')
|
||||||
|
|
||||||
# Post-process the file, to remove any "permissions" specified for a user or group
|
# Post-process the file, to remove any "permissions" specified for a user or group
|
||||||
with open(tmpfile, 'r') as f_in:
|
with open(tmpfile) as f_in:
|
||||||
data = json.loads(f_in.read())
|
data = json.loads(f_in.read())
|
||||||
|
|
||||||
data_out = []
|
data_out = []
|
||||||
@ -684,7 +683,7 @@ def import_records(
|
|||||||
# Pre-process the data, to remove any "permissions" specified for a user or group
|
# Pre-process the data, to remove any "permissions" specified for a user or group
|
||||||
datafile = f'{target}.data.json'
|
datafile = f'{target}.data.json'
|
||||||
|
|
||||||
with open(target, 'r') as f_in:
|
with open(target) as f_in:
|
||||||
try:
|
try:
|
||||||
data = json.loads(f_in.read())
|
data = json.loads(f_in.read())
|
||||||
except json.JSONDecodeError as exc:
|
except json.JSONDecodeError as exc:
|
||||||
@ -836,7 +835,7 @@ def server(c, address='127.0.0.1:8000'):
|
|||||||
|
|
||||||
Note: This is *not* sufficient for a production installation.
|
Note: This is *not* sufficient for a production installation.
|
||||||
"""
|
"""
|
||||||
manage(c, 'runserver {address}'.format(address=address), pty=True)
|
manage(c, f'runserver {address}', pty=True)
|
||||||
|
|
||||||
|
|
||||||
@task(pre=[wait])
|
@task(pre=[wait])
|
||||||
@ -880,16 +879,16 @@ def test_translations(c):
|
|||||||
|
|
||||||
# compile regex
|
# compile regex
|
||||||
reg = re.compile(
|
reg = re.compile(
|
||||||
r'[a-zA-Z0-9]{1}' # match any single letter and number # noqa: W504
|
r'[a-zA-Z0-9]{1}' # match any single letter and number
|
||||||
+ r'(?![^{\(\<]*[}\)\>])' # that is not inside curly brackets, brackets or a tag # noqa: W504
|
+ r'(?![^{\(\<]*[}\)\>])' # that is not inside curly brackets, brackets or a tag
|
||||||
+ r'(?<![^\%][^\(][)][a-z])' # that is not a specially formatted variable with singles # noqa: W504
|
+ r'(?<![^\%][^\(][)][a-z])' # that is not a specially formatted variable with singles
|
||||||
+ r'(?![^\\][\n])' # that is not a newline
|
+ r'(?![^\\][\n])' # that is not a newline
|
||||||
)
|
)
|
||||||
last_string = ''
|
last_string = ''
|
||||||
|
|
||||||
# loop through input file lines
|
# loop through input file lines
|
||||||
with open(file_path, 'rt') as file_org:
|
with open(file_path) as file_org:
|
||||||
with open(new_file_path, 'wt') as file_new:
|
with open(new_file_path, 'w') as file_new:
|
||||||
for line in file_org:
|
for line in file_org:
|
||||||
if line.startswith('msgstr "'):
|
if line.startswith('msgstr "'):
|
||||||
# write output -> replace regex matches with x in the read in (multi)string
|
# write output -> replace regex matches with x in the read in (multi)string
|
||||||
@ -1330,7 +1329,7 @@ def frontend_download(
|
|||||||
version_file = localDir().joinpath('VERSION')
|
version_file = localDir().joinpath('VERSION')
|
||||||
if not version_file.exists():
|
if not version_file.exists():
|
||||||
return
|
return
|
||||||
from dotenv import dotenv_values # noqa: WPS433
|
from dotenv import dotenv_values
|
||||||
|
|
||||||
content = dotenv_values(version_file)
|
content = dotenv_values(version_file)
|
||||||
if (
|
if (
|
||||||
|
Loading…
x
Reference in New Issue
Block a user