mirror of
https://github.com/inventree/InvenTree
synced 2024-08-30 18:33:04 +00:00
switched to single quotes everywhere
This commit is contained in:
parent
a92442e60e
commit
f83fedbbb8
@ -32,12 +32,12 @@ class InvenTreeResource(ModelResource):
|
||||
"""Override the default import_data_inner function to provide better error handling"""
|
||||
if len(dataset) > self.MAX_IMPORT_ROWS:
|
||||
raise ImportExportError(
|
||||
f"Dataset contains too many rows (max {self.MAX_IMPORT_ROWS})"
|
||||
f'Dataset contains too many rows (max {self.MAX_IMPORT_ROWS})'
|
||||
)
|
||||
|
||||
if len(dataset.headers) > self.MAX_IMPORT_COLS:
|
||||
raise ImportExportError(
|
||||
f"Dataset contains too many columns (max {self.MAX_IMPORT_COLS})"
|
||||
f'Dataset contains too many columns (max {self.MAX_IMPORT_COLS})'
|
||||
)
|
||||
|
||||
return super().import_data_inner(
|
||||
|
@ -232,19 +232,19 @@ class BulkDeleteMixin:
|
||||
|
||||
if not items and not filters:
|
||||
raise ValidationError({
|
||||
"non_field_errors": [
|
||||
"List of items or filters must be provided for bulk deletion"
|
||||
'non_field_errors': [
|
||||
'List of items or filters must be provided for bulk deletion'
|
||||
]
|
||||
})
|
||||
|
||||
if items and type(items) is not list:
|
||||
raise ValidationError({
|
||||
"items": ["'items' must be supplied as a list object"]
|
||||
'items': ["'items' must be supplied as a list object"]
|
||||
})
|
||||
|
||||
if filters and type(filters) is not dict:
|
||||
raise ValidationError({
|
||||
"filters": ["'filters' must be supplied as a dict object"]
|
||||
'filters': ["'filters' must be supplied as a dict object"]
|
||||
})
|
||||
|
||||
# Keep track of how many items we deleted
|
||||
@ -266,7 +266,7 @@ class BulkDeleteMixin:
|
||||
n_deleted = queryset.count()
|
||||
queryset.delete()
|
||||
|
||||
return Response({'success': f"Deleted {n_deleted} items"}, status=204)
|
||||
return Response({'success': f'Deleted {n_deleted} items'}, status=204)
|
||||
|
||||
|
||||
class ListCreateDestroyAPIView(BulkDeleteMixin, ListCreateAPI):
|
||||
@ -308,7 +308,7 @@ class APIDownloadMixin:
|
||||
|
||||
def download_queryset(self, queryset, export_format):
|
||||
"""This function must be implemented to provide a downloadFile request."""
|
||||
raise NotImplementedError("download_queryset method not implemented!")
|
||||
raise NotImplementedError('download_queryset method not implemented!')
|
||||
|
||||
|
||||
class AttachmentMixin:
|
||||
|
@ -21,7 +21,7 @@ from InvenTree.ready import (
|
||||
isPluginRegistryLoaded,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class InvenTreeConfig(AppConfig):
|
||||
@ -82,11 +82,11 @@ class InvenTreeConfig(AppConfig):
|
||||
try:
|
||||
Schedule.objects.filter(func__in=obsolete).delete()
|
||||
except Exception:
|
||||
logger.exception("Failed to remove obsolete tasks - database not ready")
|
||||
logger.exception('Failed to remove obsolete tasks - database not ready')
|
||||
|
||||
def start_background_tasks(self):
|
||||
"""Start all background tests for InvenTree."""
|
||||
logger.info("Starting background tasks...")
|
||||
logger.info('Starting background tasks...')
|
||||
|
||||
from django_q.models import Schedule
|
||||
|
||||
@ -130,17 +130,17 @@ class InvenTreeConfig(AppConfig):
|
||||
|
||||
if len(tasks_to_create) > 0:
|
||||
Schedule.objects.bulk_create(tasks_to_create)
|
||||
logger.info("Created %s new scheduled tasks", len(tasks_to_create))
|
||||
logger.info('Created %s new scheduled tasks', len(tasks_to_create))
|
||||
|
||||
if len(tasks_to_update) > 0:
|
||||
Schedule.objects.bulk_update(tasks_to_update, ['schedule_type', 'minutes'])
|
||||
logger.info("Updated %s existing scheduled tasks", len(tasks_to_update))
|
||||
logger.info('Updated %s existing scheduled tasks', len(tasks_to_update))
|
||||
|
||||
# Put at least one task onto the background worker stack,
|
||||
# which will be processed as soon as the worker comes online
|
||||
InvenTree.tasks.offload_task(InvenTree.tasks.heartbeat, force_async=True)
|
||||
|
||||
logger.info("Started %s scheduled background tasks...", len(tasks))
|
||||
logger.info('Started %s scheduled background tasks...', len(tasks))
|
||||
|
||||
def collect_tasks(self):
|
||||
"""Collect all background tasks."""
|
||||
@ -152,7 +152,7 @@ class InvenTreeConfig(AppConfig):
|
||||
try:
|
||||
import_module(f'{app.module.__package__}.tasks')
|
||||
except Exception as e: # pragma: no cover
|
||||
logger.exception("Error loading tasks for %s: %s", app_name, e)
|
||||
logger.exception('Error loading tasks for %s: %s', app_name, e)
|
||||
|
||||
def update_exchange_rates(self): # pragma: no cover
|
||||
"""Update exchange rates each time the server is started.
|
||||
@ -183,20 +183,20 @@ class InvenTreeConfig(AppConfig):
|
||||
|
||||
if last_update is None:
|
||||
# Never been updated
|
||||
logger.info("Exchange backend has never been updated")
|
||||
logger.info('Exchange backend has never been updated')
|
||||
update = True
|
||||
|
||||
# Backend currency has changed?
|
||||
if base_currency != backend.base_currency:
|
||||
logger.info(
|
||||
"Base currency changed from %s to %s",
|
||||
'Base currency changed from %s to %s',
|
||||
backend.base_currency,
|
||||
base_currency,
|
||||
)
|
||||
update = True
|
||||
|
||||
except ExchangeBackend.DoesNotExist:
|
||||
logger.info("Exchange backend not found - updating")
|
||||
logger.info('Exchange backend not found - updating')
|
||||
update = True
|
||||
|
||||
except Exception:
|
||||
@ -207,9 +207,9 @@ class InvenTreeConfig(AppConfig):
|
||||
try:
|
||||
update_exchange_rates()
|
||||
except OperationalError:
|
||||
logger.warning("Could not update exchange rates - database not ready")
|
||||
logger.warning('Could not update exchange rates - database not ready')
|
||||
except Exception as e:
|
||||
logger.exception("Error updating exchange rates: %s (%s)", e, type(e))
|
||||
logger.exception('Error updating exchange rates: %s (%s)', e, type(e))
|
||||
|
||||
def add_user_on_startup(self):
|
||||
"""Add a user on startup."""
|
||||
@ -222,7 +222,7 @@ class InvenTreeConfig(AppConfig):
|
||||
add_email = get_setting('INVENTREE_ADMIN_EMAIL', 'admin_email')
|
||||
add_password = get_setting('INVENTREE_ADMIN_PASSWORD', 'admin_password')
|
||||
add_password_file = get_setting(
|
||||
"INVENTREE_ADMIN_PASSWORD_FILE", "admin_password_file", None
|
||||
'INVENTREE_ADMIN_PASSWORD_FILE', 'admin_password_file', None
|
||||
)
|
||||
|
||||
# check if all values are present
|
||||
@ -260,7 +260,7 @@ class InvenTreeConfig(AppConfig):
|
||||
try:
|
||||
with transaction.atomic():
|
||||
if user.objects.filter(username=add_user).exists():
|
||||
logger.info("User %s already exists - skipping creation", add_user)
|
||||
logger.info('User %s already exists - skipping creation', add_user)
|
||||
else:
|
||||
new_user = user.objects.create_superuser(
|
||||
add_user, add_email, add_password
|
||||
@ -272,12 +272,12 @@ class InvenTreeConfig(AppConfig):
|
||||
def add_user_from_file(self):
|
||||
"""Add the superuser from a file."""
|
||||
# stop if checks were already created
|
||||
if hasattr(settings, "USER_ADDED_FILE") and settings.USER_ADDED_FILE:
|
||||
if hasattr(settings, 'USER_ADDED_FILE') and settings.USER_ADDED_FILE:
|
||||
return
|
||||
|
||||
# get values
|
||||
add_password_file = get_setting(
|
||||
"INVENTREE_ADMIN_PASSWORD_FILE", "admin_password_file", None
|
||||
'INVENTREE_ADMIN_PASSWORD_FILE', 'admin_password_file', None
|
||||
)
|
||||
|
||||
# no variable set -> do not try anything
|
||||
@ -296,7 +296,7 @@ class InvenTreeConfig(AppConfig):
|
||||
self._create_admin_user(
|
||||
get_setting('INVENTREE_ADMIN_USER', 'admin_user', 'admin'),
|
||||
get_setting('INVENTREE_ADMIN_EMAIL', 'admin_email', ''),
|
||||
add_password_file.read_text(encoding="utf-8"),
|
||||
add_password_file.read_text(encoding='utf-8'),
|
||||
)
|
||||
|
||||
# do not try again
|
||||
|
@ -63,9 +63,9 @@ class RenderJavascriptFiles(InvenTreeTestCase): # pragma: no cover
|
||||
"""Look for all javascript files."""
|
||||
n = 0
|
||||
|
||||
print("Rendering javascript files...")
|
||||
print('Rendering javascript files...')
|
||||
|
||||
n += self.download_files('translated', '/js/i18n')
|
||||
n += self.download_files('dynamic', '/js/dynamic')
|
||||
|
||||
print(f"Rendered {n} javascript files.")
|
||||
print(f'Rendered {n} javascript files.')
|
||||
|
@ -99,9 +99,9 @@ def get_config_file(create=True) -> Path:
|
||||
)
|
||||
ensure_dir(cfg_filename.parent)
|
||||
|
||||
cfg_template = base_dir.joinpath("config_template.yaml")
|
||||
cfg_template = base_dir.joinpath('config_template.yaml')
|
||||
shutil.copyfile(cfg_template, cfg_filename)
|
||||
print(f"Created config file {cfg_filename}")
|
||||
print(f'Created config file {cfg_filename}')
|
||||
|
||||
return cfg_filename
|
||||
|
||||
@ -293,14 +293,14 @@ def get_plugin_file():
|
||||
|
||||
if not plugin_file.exists():
|
||||
logger.warning(
|
||||
"Plugin configuration file does not exist - creating default file"
|
||||
'Plugin configuration file does not exist - creating default file'
|
||||
)
|
||||
logger.info("Creating plugin file at '%s'", plugin_file)
|
||||
ensure_dir(plugin_file.parent)
|
||||
|
||||
# If opening the file fails (no write permission, for example), then this will throw an error
|
||||
plugin_file.write_text(
|
||||
"# InvenTree Plugins (uses PIP framework to install)\n\n"
|
||||
'# InvenTree Plugins (uses PIP framework to install)\n\n'
|
||||
)
|
||||
|
||||
return plugin_file
|
||||
@ -323,7 +323,7 @@ def get_secret_key():
|
||||
"""
|
||||
# Look for environment variable
|
||||
if secret_key := get_setting('INVENTREE_SECRET_KEY', 'secret_key'):
|
||||
logger.info("SECRET_KEY loaded by INVENTREE_SECRET_KEY") # pragma: no cover
|
||||
logger.info('SECRET_KEY loaded by INVENTREE_SECRET_KEY') # pragma: no cover
|
||||
return secret_key
|
||||
|
||||
# Look for secret key file
|
||||
@ -331,7 +331,7 @@ def get_secret_key():
|
||||
secret_key_file = Path(secret_key_file).resolve()
|
||||
else:
|
||||
# Default location for secret key file
|
||||
secret_key_file = get_base_dir().joinpath("secret_key.txt").resolve()
|
||||
secret_key_file = get_base_dir().joinpath('secret_key.txt').resolve()
|
||||
|
||||
if not secret_key_file.exists():
|
||||
logger.info("Generating random key file at '%s'", secret_key_file)
|
||||
@ -367,9 +367,9 @@ def get_custom_file(
|
||||
static_storage = StaticFilesStorage()
|
||||
|
||||
if static_storage.exists(value):
|
||||
logger.info("Loading %s from %s directory: %s", log_ref, 'static', value)
|
||||
logger.info('Loading %s from %s directory: %s', log_ref, 'static', value)
|
||||
elif lookup_media and default_storage.exists(value):
|
||||
logger.info("Loading %s from %s directory: %s", log_ref, 'media', value)
|
||||
logger.info('Loading %s from %s directory: %s', log_ref, 'media', value)
|
||||
else:
|
||||
add_dir_str = ' or media' if lookup_media else ''
|
||||
logger.warning(
|
||||
|
@ -127,7 +127,7 @@ def convert_physical_value(value: str, unit: str = None, strip_units=True):
|
||||
if unit:
|
||||
raise ValidationError(_(f'Could not convert {original} to {unit}'))
|
||||
else:
|
||||
raise ValidationError(_("Invalid quantity supplied"))
|
||||
raise ValidationError(_('Invalid quantity supplied'))
|
||||
|
||||
# Calculate the "magnitude" of the value, as a float
|
||||
# If the value is specified strangely (e.g. as a fraction or a dozen), this can cause issues
|
||||
|
@ -30,22 +30,22 @@ def is_email_configured():
|
||||
|
||||
# Display warning unless in test mode
|
||||
if not testing: # pragma: no cover
|
||||
logger.debug("EMAIL_HOST is not configured")
|
||||
logger.debug('EMAIL_HOST is not configured')
|
||||
|
||||
# Display warning unless in test mode
|
||||
if not settings.EMAIL_HOST_USER and not testing: # pragma: no cover
|
||||
logger.debug("EMAIL_HOST_USER is not configured")
|
||||
logger.debug('EMAIL_HOST_USER is not configured')
|
||||
|
||||
# Display warning unless in test mode
|
||||
if not settings.EMAIL_HOST_PASSWORD and testing: # pragma: no cover
|
||||
logger.debug("EMAIL_HOST_PASSWORD is not configured")
|
||||
logger.debug('EMAIL_HOST_PASSWORD is not configured')
|
||||
|
||||
# Email sender must be configured
|
||||
if not settings.DEFAULT_FROM_EMAIL:
|
||||
configured = False
|
||||
|
||||
if not testing: # pragma: no cover
|
||||
logger.debug("DEFAULT_FROM_EMAIL is not configured")
|
||||
logger.debug('DEFAULT_FROM_EMAIL is not configured')
|
||||
|
||||
return configured
|
||||
|
||||
@ -75,7 +75,7 @@ def send_email(subject, body, recipients, from_email=None, html_message=None):
|
||||
if settings.TESTING:
|
||||
from_email = 'from@test.com'
|
||||
else:
|
||||
logger.error("send_email failed: DEFAULT_FROM_EMAIL not specified")
|
||||
logger.error('send_email failed: DEFAULT_FROM_EMAIL not specified')
|
||||
return
|
||||
|
||||
InvenTree.tasks.offload_task(
|
||||
|
@ -86,7 +86,7 @@ def exception_handler(exc, context):
|
||||
# If in DEBUG mode, provide error information in the response
|
||||
error_detail = str(exc)
|
||||
else:
|
||||
error_detail = _("Error details can be found in the admin panel")
|
||||
error_detail = _('Error details can be found in the admin panel')
|
||||
|
||||
response_data = {
|
||||
'error': type(exc).__name__,
|
||||
|
@ -18,7 +18,7 @@ class InvenTreeExchange(SimpleExchangeBackend):
|
||||
Uses the plugin system to actually fetch the rates from an external API.
|
||||
"""
|
||||
|
||||
name = "InvenTreeExchange"
|
||||
name = 'InvenTreeExchange'
|
||||
|
||||
def get_rates(self, **kwargs) -> None:
|
||||
"""Set the requested currency codes and get rates."""
|
||||
@ -55,19 +55,19 @@ class InvenTreeExchange(SimpleExchangeBackend):
|
||||
try:
|
||||
rates = plugin.update_exchange_rates(base_currency, symbols)
|
||||
except Exception as exc:
|
||||
logger.exception("Exchange rate update failed: %s", exc)
|
||||
logger.exception('Exchange rate update failed: %s', exc)
|
||||
return {}
|
||||
|
||||
if not rates:
|
||||
logger.warning(
|
||||
"Exchange rate update failed - no data returned from plugin %s", slug
|
||||
'Exchange rate update failed - no data returned from plugin %s', slug
|
||||
)
|
||||
return {}
|
||||
|
||||
# Update exchange rates based on returned data
|
||||
if type(rates) is not dict:
|
||||
logger.warning(
|
||||
"Invalid exchange rate data returned from plugin %s (type %s)",
|
||||
'Invalid exchange rate data returned from plugin %s (type %s)',
|
||||
slug,
|
||||
type(rates),
|
||||
)
|
||||
@ -82,7 +82,7 @@ class InvenTreeExchange(SimpleExchangeBackend):
|
||||
def update_rates(self, base_currency=None, **kwargs):
|
||||
"""Call to update all exchange rates"""
|
||||
backend, _ = ExchangeBackend.objects.update_or_create(
|
||||
name=self.name, defaults={"base_currency": base_currency}
|
||||
name=self.name, defaults={'base_currency': base_currency}
|
||||
)
|
||||
|
||||
if base_currency is None:
|
||||
@ -91,7 +91,7 @@ class InvenTreeExchange(SimpleExchangeBackend):
|
||||
symbols = currency_codes()
|
||||
|
||||
logger.info(
|
||||
"Updating exchange rates for %s (%s currencies)",
|
||||
'Updating exchange rates for %s (%s currencies)',
|
||||
base_currency,
|
||||
len(symbols),
|
||||
)
|
||||
@ -110,7 +110,7 @@ class InvenTreeExchange(SimpleExchangeBackend):
|
||||
])
|
||||
else:
|
||||
logger.info(
|
||||
"No exchange rates returned from backend - currencies not updated"
|
||||
'No exchange rates returned from backend - currencies not updated'
|
||||
)
|
||||
|
||||
logger.info("Updated exchange rates for %s", base_currency)
|
||||
logger.info('Updated exchange rates for %s', base_currency)
|
||||
|
@ -76,7 +76,7 @@ class InvenTreeSearchFilter(filters.SearchFilter):
|
||||
|
||||
if whole:
|
||||
# Wrap the search term to enable word-boundary matching
|
||||
term = r"\y" + term + r"\y"
|
||||
term = r'\y' + term + r'\y'
|
||||
|
||||
terms.append(term)
|
||||
|
||||
|
@ -64,7 +64,7 @@ def construct_format_regex(fmt_string: str) -> str:
|
||||
Raises:
|
||||
ValueError: Format string is invalid
|
||||
"""
|
||||
pattern = "^"
|
||||
pattern = '^'
|
||||
|
||||
for group in string.Formatter().parse(fmt_string):
|
||||
prefix = group[0] # Prefix (literal text appearing before this group)
|
||||
@ -87,7 +87,7 @@ def construct_format_regex(fmt_string: str) -> str:
|
||||
':',
|
||||
';',
|
||||
'|',
|
||||
'\'',
|
||||
"'",
|
||||
'"',
|
||||
]
|
||||
|
||||
@ -115,9 +115,9 @@ def construct_format_regex(fmt_string: str) -> str:
|
||||
# TODO: Introspect required width
|
||||
w = '+'
|
||||
|
||||
pattern += f"(?P<{name}>{chr}{w})"
|
||||
pattern += f'(?P<{name}>{chr}{w})'
|
||||
|
||||
pattern += "$"
|
||||
pattern += '$'
|
||||
|
||||
return pattern
|
||||
|
||||
@ -172,7 +172,7 @@ def extract_named_group(name: str, value: str, fmt_string: str) -> str:
|
||||
|
||||
if not result:
|
||||
raise ValueError(
|
||||
_("Provided value does not match required pattern: ") + fmt_string
|
||||
_('Provided value does not match required pattern: ') + fmt_string
|
||||
)
|
||||
|
||||
# And return the value we are interested in
|
||||
@ -198,7 +198,7 @@ def format_money(money: Money, decimal_places: int = None, format: str = None) -
|
||||
if format:
|
||||
pattern = parse_pattern(format)
|
||||
else:
|
||||
pattern = locale.currency_formats["standard"]
|
||||
pattern = locale.currency_formats['standard']
|
||||
if decimal_places is not None:
|
||||
pattern.frac_prec = (decimal_places, decimal_places)
|
||||
|
||||
|
@ -140,7 +140,7 @@ class SetPasswordForm(HelperForm):
|
||||
)
|
||||
|
||||
old_password = forms.CharField(
|
||||
label=_("Old password"),
|
||||
label=_('Old password'),
|
||||
strip=False,
|
||||
required=False,
|
||||
widget=forms.PasswordInput(
|
||||
@ -178,23 +178,23 @@ class CustomSignupForm(SignupForm):
|
||||
|
||||
# check for two mail fields
|
||||
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_MAIL_TWICE'):
|
||||
self.fields["email2"] = forms.EmailField(
|
||||
label=_("Email (again)"),
|
||||
self.fields['email2'] = forms.EmailField(
|
||||
label=_('Email (again)'),
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"type": "email",
|
||||
"placeholder": _("Email address confirmation"),
|
||||
'type': 'email',
|
||||
'placeholder': _('Email address confirmation'),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
# check for two password fields
|
||||
if not InvenTreeSetting.get_setting('LOGIN_SIGNUP_PWD_TWICE'):
|
||||
self.fields.pop("password2")
|
||||
self.fields.pop('password2')
|
||||
|
||||
# reorder fields
|
||||
set_form_field_order(
|
||||
self, ["username", "email", "email2", "password1", "password2"]
|
||||
self, ['username', 'email', 'email2', 'password1', 'password2']
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
@ -203,10 +203,10 @@ class CustomSignupForm(SignupForm):
|
||||
|
||||
# check for two mail fields
|
||||
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_MAIL_TWICE'):
|
||||
email = cleaned_data.get("email")
|
||||
email2 = cleaned_data.get("email2")
|
||||
email = cleaned_data.get('email')
|
||||
email2 = cleaned_data.get('email2')
|
||||
if (email and email2) and email != email2:
|
||||
self.add_error("email2", _("You must type the same email each time."))
|
||||
self.add_error('email2', _('You must type the same email each time.'))
|
||||
|
||||
return cleaned_data
|
||||
|
||||
@ -221,7 +221,7 @@ def registration_enabled():
|
||||
return True
|
||||
else:
|
||||
logger.error(
|
||||
"Registration cannot be enabled, because EMAIL_HOST is not configured."
|
||||
'Registration cannot be enabled, because EMAIL_HOST is not configured.'
|
||||
)
|
||||
return False
|
||||
|
||||
@ -292,7 +292,7 @@ class CustomUrlMixin:
|
||||
|
||||
def get_email_confirmation_url(self, request, emailconfirmation):
|
||||
"""Custom email confirmation (activation) url."""
|
||||
url = reverse("account_confirm_email", args=[emailconfirmation.key])
|
||||
url = reverse('account_confirm_email', args=[emailconfirmation.key])
|
||||
return Site.objects.get_current().domain + url
|
||||
|
||||
|
||||
|
@ -36,7 +36,7 @@ def generateTestKey(test_name):
|
||||
Tests must be named such that they will have unique keys.
|
||||
"""
|
||||
key = test_name.strip().lower()
|
||||
key = key.replace(" ", "")
|
||||
key = key.replace(' ', '')
|
||||
|
||||
# Remove any characters that cannot be used to represent a variable
|
||||
key = re.sub(r'[^a-zA-Z0-9]', '', key)
|
||||
@ -56,7 +56,7 @@ def constructPathString(path, max_chars=250):
|
||||
# Replace middle elements to limit the pathstring
|
||||
if len(pathstring) > max_chars:
|
||||
n = int(max_chars / 2 - 2)
|
||||
pathstring = pathstring[:n] + "..." + pathstring[-n:]
|
||||
pathstring = pathstring[:n] + '...' + pathstring[-n:]
|
||||
|
||||
return pathstring
|
||||
|
||||
@ -82,12 +82,12 @@ def TestIfImage(img):
|
||||
|
||||
def getBlankImage():
|
||||
"""Return the qualified path for the 'blank image' placeholder."""
|
||||
return getStaticUrl("img/blank_image.png")
|
||||
return getStaticUrl('img/blank_image.png')
|
||||
|
||||
|
||||
def getBlankThumbnail():
|
||||
"""Return the qualified path for the 'blank image' thumbnail placeholder."""
|
||||
return getStaticUrl("img/blank_image.thumbnail.png")
|
||||
return getStaticUrl('img/blank_image.thumbnail.png')
|
||||
|
||||
|
||||
def getLogoImage(as_file=False, custom=True):
|
||||
@ -105,13 +105,13 @@ def getLogoImage(as_file=False, custom=True):
|
||||
|
||||
if storage is not None:
|
||||
if as_file:
|
||||
return f"file://{storage.path(settings.CUSTOM_LOGO)}"
|
||||
return f'file://{storage.path(settings.CUSTOM_LOGO)}'
|
||||
return storage.url(settings.CUSTOM_LOGO)
|
||||
|
||||
# If we have got to this point, return the default logo
|
||||
if as_file:
|
||||
path = settings.STATIC_ROOT.joinpath('img/inventree.png')
|
||||
return f"file://{path}"
|
||||
return f'file://{path}'
|
||||
return getStaticUrl('img/inventree.png')
|
||||
|
||||
|
||||
@ -124,7 +124,7 @@ def getSplashScreen(custom=True):
|
||||
return static_storage.url(settings.CUSTOM_SPLASH)
|
||||
|
||||
# No custom splash screen
|
||||
return static_storage.url("img/inventree_splash.jpg")
|
||||
return static_storage.url('img/inventree_splash.jpg')
|
||||
|
||||
|
||||
def TestIfImageURL(url):
|
||||
@ -234,7 +234,7 @@ def increment(value):
|
||||
# Provide a default value if provided with a null input
|
||||
return '1'
|
||||
|
||||
pattern = r"(.*?)(\d+)?$"
|
||||
pattern = r'(.*?)(\d+)?$'
|
||||
|
||||
result = re.search(pattern, value)
|
||||
|
||||
@ -293,7 +293,7 @@ def decimal2string(d):
|
||||
if '.' not in s:
|
||||
return s
|
||||
|
||||
return s.rstrip("0").rstrip(".")
|
||||
return s.rstrip('0').rstrip('.')
|
||||
|
||||
|
||||
def decimal2money(d, currency=None):
|
||||
@ -395,7 +395,7 @@ def DownloadFile(
|
||||
length = len(bytes(data, response.charset))
|
||||
response['Content-Length'] = length
|
||||
|
||||
disposition = "inline" if inline else "attachment"
|
||||
disposition = 'inline' if inline else 'attachment'
|
||||
|
||||
response['Content-Disposition'] = f'{disposition}; filename={filename}'
|
||||
|
||||
@ -455,7 +455,7 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
try:
|
||||
expected_quantity = int(expected_quantity)
|
||||
except ValueError:
|
||||
raise ValidationError([_("Invalid quantity provided")])
|
||||
raise ValidationError([_('Invalid quantity provided')])
|
||||
|
||||
if input_string:
|
||||
input_string = str(input_string).strip()
|
||||
@ -463,7 +463,7 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
input_string = ''
|
||||
|
||||
if len(input_string) == 0:
|
||||
raise ValidationError([_("Empty serial number string")])
|
||||
raise ValidationError([_('Empty serial number string')])
|
||||
|
||||
next_value = increment_serial_number(starting_value)
|
||||
|
||||
@ -473,7 +473,7 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
next_value = increment_serial_number(next_value)
|
||||
|
||||
# Split input string by whitespace or comma (,) characters
|
||||
groups = re.split(r"[\s,]+", input_string)
|
||||
groups = re.split(r'[\s,]+', input_string)
|
||||
|
||||
serials = []
|
||||
errors = []
|
||||
@ -493,7 +493,7 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
return
|
||||
|
||||
if serial in serials:
|
||||
add_error(_("Duplicate serial") + f": {serial}")
|
||||
add_error(_('Duplicate serial') + f': {serial}')
|
||||
else:
|
||||
serials.append(serial)
|
||||
|
||||
@ -525,7 +525,7 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
|
||||
if a == b:
|
||||
# Invalid group
|
||||
add_error(_(f"Invalid group range: {group}"))
|
||||
add_error(_(f'Invalid group range: {group}'))
|
||||
continue
|
||||
|
||||
group_items = []
|
||||
@ -556,7 +556,7 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
if len(group_items) > remaining:
|
||||
add_error(
|
||||
_(
|
||||
f"Group range {group} exceeds allowed quantity ({expected_quantity})"
|
||||
f'Group range {group} exceeds allowed quantity ({expected_quantity})'
|
||||
)
|
||||
)
|
||||
elif (
|
||||
@ -568,7 +568,7 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
for item in group_items:
|
||||
add_serial(item)
|
||||
else:
|
||||
add_error(_(f"Invalid group range: {group}"))
|
||||
add_error(_(f'Invalid group range: {group}'))
|
||||
|
||||
else:
|
||||
# In the case of a different number of hyphens, simply add the entire group
|
||||
@ -586,14 +586,14 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
sequence_count = max(0, expected_quantity - len(serials))
|
||||
|
||||
if len(items) > 2 or len(items) == 0:
|
||||
add_error(_(f"Invalid group sequence: {group}"))
|
||||
add_error(_(f'Invalid group sequence: {group}'))
|
||||
continue
|
||||
elif len(items) == 2:
|
||||
try:
|
||||
if items[1]:
|
||||
sequence_count = int(items[1]) + 1
|
||||
except ValueError:
|
||||
add_error(_(f"Invalid group sequence: {group}"))
|
||||
add_error(_(f'Invalid group sequence: {group}'))
|
||||
continue
|
||||
|
||||
value = items[0]
|
||||
@ -612,7 +612,7 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
for item in sequence_items:
|
||||
add_serial(item)
|
||||
else:
|
||||
add_error(_(f"Invalid group sequence: {group}"))
|
||||
add_error(_(f'Invalid group sequence: {group}'))
|
||||
|
||||
else:
|
||||
# At this point, we assume that the 'group' is just a single serial value
|
||||
@ -622,12 +622,12 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
raise ValidationError(errors)
|
||||
|
||||
if len(serials) == 0:
|
||||
raise ValidationError([_("No serial numbers found")])
|
||||
raise ValidationError([_('No serial numbers found')])
|
||||
|
||||
if len(errors) == 0 and len(serials) != expected_quantity:
|
||||
raise ValidationError([
|
||||
_(
|
||||
f"Number of unique serial numbers ({len(serials)}) must match quantity ({expected_quantity})"
|
||||
f'Number of unique serial numbers ({len(serials)}) must match quantity ({expected_quantity})'
|
||||
)
|
||||
])
|
||||
|
||||
@ -666,7 +666,7 @@ def validateFilterString(value, model=None):
|
||||
pair = group.split('=')
|
||||
|
||||
if len(pair) != 2:
|
||||
raise ValidationError(f"Invalid group: {group}")
|
||||
raise ValidationError(f'Invalid group: {group}')
|
||||
|
||||
k, v = pair
|
||||
|
||||
@ -674,7 +674,7 @@ def validateFilterString(value, model=None):
|
||||
v = v.strip()
|
||||
|
||||
if not k or not v:
|
||||
raise ValidationError(f"Invalid group: {group}")
|
||||
raise ValidationError(f'Invalid group: {group}')
|
||||
|
||||
results[k] = v
|
||||
|
||||
@ -745,7 +745,7 @@ def strip_html_tags(value: str, raise_error=True, field_name=None):
|
||||
if len(cleaned) != len(value) and raise_error:
|
||||
field = field_name or 'non_field_errors'
|
||||
|
||||
raise ValidationError({field: [_("Remove HTML tags from this value")]})
|
||||
raise ValidationError({field: [_('Remove HTML tags from this value')]})
|
||||
|
||||
return cleaned
|
||||
|
||||
|
@ -120,7 +120,7 @@ def download_image_from_url(remote_url, timeout=2.5):
|
||||
'INVENTREE_DOWNLOAD_FROM_URL_USER_AGENT'
|
||||
)
|
||||
if user_agent:
|
||||
headers = {"User-Agent": user_agent}
|
||||
headers = {'User-Agent': user_agent}
|
||||
else:
|
||||
headers = None
|
||||
|
||||
@ -135,28 +135,28 @@ def download_image_from_url(remote_url, timeout=2.5):
|
||||
# Throw an error if anything goes wrong
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.ConnectionError as exc:
|
||||
raise Exception(_("Connection error") + f": {str(exc)}")
|
||||
raise Exception(_('Connection error') + f': {str(exc)}')
|
||||
except requests.exceptions.Timeout as exc:
|
||||
raise exc
|
||||
except requests.exceptions.HTTPError:
|
||||
raise requests.exceptions.HTTPError(
|
||||
_("Server responded with invalid status code") + f": {response.status_code}"
|
||||
_('Server responded with invalid status code') + f': {response.status_code}'
|
||||
)
|
||||
except Exception as exc:
|
||||
raise Exception(_("Exception occurred") + f": {str(exc)}")
|
||||
raise Exception(_('Exception occurred') + f': {str(exc)}')
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(
|
||||
_("Server responded with invalid status code") + f": {response.status_code}"
|
||||
_('Server responded with invalid status code') + f': {response.status_code}'
|
||||
)
|
||||
|
||||
try:
|
||||
content_length = int(response.headers.get('Content-Length', 0))
|
||||
except ValueError:
|
||||
raise ValueError(_("Server responded with invalid Content-Length value"))
|
||||
raise ValueError(_('Server responded with invalid Content-Length value'))
|
||||
|
||||
if content_length > max_size:
|
||||
raise ValueError(_("Image size is too large"))
|
||||
raise ValueError(_('Image size is too large'))
|
||||
|
||||
# Download the file, ensuring we do not exceed the reported size
|
||||
file = io.BytesIO()
|
||||
@ -168,12 +168,12 @@ def download_image_from_url(remote_url, timeout=2.5):
|
||||
dl_size += len(chunk)
|
||||
|
||||
if dl_size > max_size:
|
||||
raise ValueError(_("Image download exceeded maximum size"))
|
||||
raise ValueError(_('Image download exceeded maximum size'))
|
||||
|
||||
file.write(chunk)
|
||||
|
||||
if dl_size == 0:
|
||||
raise ValueError(_("Remote server returned empty response"))
|
||||
raise ValueError(_('Remote server returned empty response'))
|
||||
|
||||
# Now, attempt to convert the downloaded data to a valid image file
|
||||
# img.verify() will throw an exception if the image is not valid
|
||||
@ -181,7 +181,7 @@ def download_image_from_url(remote_url, timeout=2.5):
|
||||
img = Image.open(file).convert()
|
||||
img.verify()
|
||||
except Exception:
|
||||
raise TypeError(_("Supplied URL is not a valid image file"))
|
||||
raise TypeError(_('Supplied URL is not a valid image file'))
|
||||
|
||||
return img
|
||||
|
||||
|
@ -18,13 +18,13 @@ def send_simple_login_email(user, link):
|
||||
"""Send an email with the login link to this user."""
|
||||
site = Site.objects.get_current()
|
||||
|
||||
context = {"username": user.username, "site_name": site.name, "link": link}
|
||||
context = {'username': user.username, 'site_name': site.name, 'link': link}
|
||||
email_plaintext_message = render_to_string(
|
||||
"InvenTree/user_simple_login.txt", context
|
||||
'InvenTree/user_simple_login.txt', context
|
||||
)
|
||||
|
||||
send_mail(
|
||||
_(f"[{site.name}] Log in to the app"),
|
||||
_(f'[{site.name}] Log in to the app'),
|
||||
email_plaintext_message,
|
||||
settings.DEFAULT_FROM_EMAIL,
|
||||
[user.email],
|
||||
@ -34,7 +34,7 @@ def send_simple_login_email(user, link):
|
||||
class GetSimpleLoginSerializer(serializers.Serializer):
|
||||
"""Serializer for the simple login view."""
|
||||
|
||||
email = serializers.CharField(label=_("Email"))
|
||||
email = serializers.CharField(label=_('Email'))
|
||||
|
||||
|
||||
class GetSimpleLoginView(APIView):
|
||||
@ -47,14 +47,14 @@ class GetSimpleLoginView(APIView):
|
||||
"""Get the token for the current user or fail."""
|
||||
serializer = self.serializer_class(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
self.email_submitted(email=serializer.data["email"])
|
||||
return Response({"status": "ok"})
|
||||
self.email_submitted(email=serializer.data['email'])
|
||||
return Response({'status': 'ok'})
|
||||
|
||||
def email_submitted(self, email):
|
||||
"""Notify user about link."""
|
||||
user = self.get_user(email)
|
||||
if user is None:
|
||||
print("user not found:", email)
|
||||
print('user not found:', email)
|
||||
return
|
||||
link = self.create_link(user)
|
||||
send_simple_login_email(user, link)
|
||||
@ -68,7 +68,7 @@ class GetSimpleLoginView(APIView):
|
||||
|
||||
def create_link(self, user):
|
||||
"""Create a login link for this user."""
|
||||
link = reverse("sesame-login")
|
||||
link = reverse('sesame-login')
|
||||
link = self.request.build_absolute_uri(link)
|
||||
link += sesame.utils.get_query_string(user)
|
||||
return link
|
||||
|
@ -12,7 +12,7 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Cleanup old (undefined) settings in the database."""
|
||||
logger.info("Collecting settings")
|
||||
logger.info('Collecting settings')
|
||||
from common.models import InvenTreeSetting, InvenTreeUserSetting
|
||||
|
||||
# general settings
|
||||
@ -35,4 +35,4 @@ class Command(BaseCommand):
|
||||
setting.delete()
|
||||
logger.info("deleted user setting '%s'", setting.key)
|
||||
|
||||
logger.info("checked all settings")
|
||||
logger.info('checked all settings')
|
||||
|
@ -58,10 +58,10 @@ class Command(BaseCommand):
|
||||
for file in os.listdir(SOURCE_DIR):
|
||||
path = os.path.join(SOURCE_DIR, file)
|
||||
if os.path.exists(path) and os.path.isfile(path):
|
||||
print(f"render {file}")
|
||||
print(f'render {file}')
|
||||
render_file(file, SOURCE_DIR, TARGET_DIR, locales, ctx)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Using multi-level directories is not implemented at this point'
|
||||
) # TODO multilevel dir if needed
|
||||
print(f"rendered all files in {SOURCE_DIR}")
|
||||
print(f'rendered all files in {SOURCE_DIR}')
|
||||
|
@ -13,50 +13,50 @@ class Command(BaseCommand):
|
||||
"""Rebuild all database models which leverage the MPTT structure."""
|
||||
# Part model
|
||||
try:
|
||||
print("Rebuilding Part objects")
|
||||
print('Rebuilding Part objects')
|
||||
|
||||
from part.models import Part
|
||||
|
||||
Part.objects.rebuild()
|
||||
except Exception:
|
||||
print("Error rebuilding Part objects")
|
||||
print('Error rebuilding Part objects')
|
||||
|
||||
# Part category
|
||||
try:
|
||||
print("Rebuilding PartCategory objects")
|
||||
print('Rebuilding PartCategory objects')
|
||||
|
||||
from part.models import PartCategory
|
||||
|
||||
PartCategory.objects.rebuild()
|
||||
except Exception:
|
||||
print("Error rebuilding PartCategory objects")
|
||||
print('Error rebuilding PartCategory objects')
|
||||
|
||||
# StockItem model
|
||||
try:
|
||||
print("Rebuilding StockItem objects")
|
||||
print('Rebuilding StockItem objects')
|
||||
|
||||
from stock.models import StockItem
|
||||
|
||||
StockItem.objects.rebuild()
|
||||
except Exception:
|
||||
print("Error rebuilding StockItem objects")
|
||||
print('Error rebuilding StockItem objects')
|
||||
|
||||
# StockLocation model
|
||||
try:
|
||||
print("Rebuilding StockLocation objects")
|
||||
print('Rebuilding StockLocation objects')
|
||||
|
||||
from stock.models import StockLocation
|
||||
|
||||
StockLocation.objects.rebuild()
|
||||
except Exception:
|
||||
print("Error rebuilding StockLocation objects")
|
||||
print('Error rebuilding StockLocation objects')
|
||||
|
||||
# Build model
|
||||
try:
|
||||
print("Rebuilding Build objects")
|
||||
print('Rebuilding Build objects')
|
||||
|
||||
from build.models import Build
|
||||
|
||||
Build.objects.rebuild()
|
||||
except Exception:
|
||||
print("Error rebuilding Build objects")
|
||||
print('Error rebuilding Build objects')
|
||||
|
@ -37,20 +37,20 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Rebuild all thumbnail images."""
|
||||
logger.info("Rebuilding Part thumbnails")
|
||||
logger.info('Rebuilding Part thumbnails')
|
||||
|
||||
for part in Part.objects.exclude(image=None):
|
||||
try:
|
||||
self.rebuild_thumbnail(part)
|
||||
except (OperationalError, ProgrammingError):
|
||||
logger.exception("ERROR: Database read error.")
|
||||
logger.exception('ERROR: Database read error.')
|
||||
break
|
||||
|
||||
logger.info("Rebuilding Company thumbnails")
|
||||
logger.info('Rebuilding Company thumbnails')
|
||||
|
||||
for company in Company.objects.exclude(image=None):
|
||||
try:
|
||||
self.rebuild_thumbnail(company)
|
||||
except (OperationalError, ProgrammingError):
|
||||
logger.exception("ERROR: abase read error.")
|
||||
logger.exception('ERROR: abase read error.')
|
||||
break
|
||||
|
@ -12,7 +12,7 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Wait till the database is ready."""
|
||||
self.stdout.write("Waiting for database...")
|
||||
self.stdout.write('Waiting for database...')
|
||||
|
||||
connected = False
|
||||
|
||||
@ -25,12 +25,12 @@ class Command(BaseCommand):
|
||||
connected = True
|
||||
|
||||
except OperationalError as e:
|
||||
self.stdout.write(f"Could not connect to database: {e}")
|
||||
self.stdout.write(f'Could not connect to database: {e}')
|
||||
except ImproperlyConfigured as e:
|
||||
self.stdout.write(f"Improperly configured: {e}")
|
||||
self.stdout.write(f'Improperly configured: {e}')
|
||||
else:
|
||||
if not connection.is_usable():
|
||||
self.stdout.write("Database configuration is not usable")
|
||||
self.stdout.write('Database configuration is not usable')
|
||||
|
||||
if connected:
|
||||
self.stdout.write("Database connection successful!")
|
||||
self.stdout.write('Database connection successful!')
|
||||
|
@ -69,7 +69,7 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
|
||||
metadata['model'] = tbl_label
|
||||
|
||||
table = f"{app_label}_{tbl_label}"
|
||||
table = f'{app_label}_{tbl_label}'
|
||||
|
||||
actions = metadata.get('actions', None)
|
||||
|
||||
@ -87,7 +87,7 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
}
|
||||
|
||||
# let the view define a custom rolemap
|
||||
if hasattr(view, "rolemap"):
|
||||
if hasattr(view, 'rolemap'):
|
||||
rolemap.update(view.rolemap)
|
||||
|
||||
# Remove any HTTP methods that the user does not have permission for
|
||||
@ -264,7 +264,7 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
model = field.queryset.model
|
||||
else:
|
||||
logger.debug(
|
||||
"Could not extract model for:", field_info.get('label'), '->', field
|
||||
'Could not extract model for:', field_info.get('label'), '->', field
|
||||
)
|
||||
model = None
|
||||
|
||||
@ -286,4 +286,4 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
return field_info
|
||||
|
||||
|
||||
InvenTreeMetadata.label_lookup[DependentField] = "dependent field"
|
||||
InvenTreeMetadata.label_lookup[DependentField] = 'dependent field'
|
||||
|
@ -15,7 +15,7 @@ from error_report.middleware import ExceptionProcessor
|
||||
from InvenTree.urls import frontendpatterns
|
||||
from users.models import ApiToken
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class AuthRequiredMiddleware(object):
|
||||
@ -91,7 +91,7 @@ class AuthRequiredMiddleware(object):
|
||||
authorized = True
|
||||
|
||||
except ApiToken.DoesNotExist:
|
||||
logger.warning("Access denied for unknown token %s", token_key)
|
||||
logger.warning('Access denied for unknown token %s', token_key)
|
||||
|
||||
# No authorization was found for the request
|
||||
if not authorized:
|
||||
|
@ -303,7 +303,7 @@ class ReferenceIndexingMixin(models.Model):
|
||||
if recent:
|
||||
reference = recent.reference
|
||||
else:
|
||||
reference = ""
|
||||
reference = ''
|
||||
|
||||
return reference
|
||||
|
||||
@ -316,20 +316,20 @@ class ReferenceIndexingMixin(models.Model):
|
||||
info = InvenTree.format.parse_format_string(pattern)
|
||||
except Exception as exc:
|
||||
raise ValidationError({
|
||||
"value": _("Improperly formatted pattern") + ": " + str(exc)
|
||||
'value': _('Improperly formatted pattern') + ': ' + str(exc)
|
||||
})
|
||||
|
||||
# Check that only 'allowed' keys are provided
|
||||
for key in info.keys():
|
||||
if key not in ctx.keys():
|
||||
raise ValidationError({
|
||||
"value": _("Unknown format key specified") + f": '{key}'"
|
||||
'value': _('Unknown format key specified') + f": '{key}'"
|
||||
})
|
||||
|
||||
# Check that the 'ref' variable is specified
|
||||
if 'ref' not in info.keys():
|
||||
raise ValidationError({
|
||||
'value': _("Missing required format key") + ": 'ref'"
|
||||
'value': _('Missing required format key') + ": 'ref'"
|
||||
})
|
||||
|
||||
@classmethod
|
||||
@ -340,7 +340,7 @@ class ReferenceIndexingMixin(models.Model):
|
||||
value = str(value).strip()
|
||||
|
||||
if len(value) == 0:
|
||||
raise ValidationError(_("Reference field cannot be empty"))
|
||||
raise ValidationError(_('Reference field cannot be empty'))
|
||||
|
||||
# An 'empty' pattern means no further validation is required
|
||||
if not pattern:
|
||||
@ -348,7 +348,7 @@ class ReferenceIndexingMixin(models.Model):
|
||||
|
||||
if not InvenTree.format.validate_string(value, pattern):
|
||||
raise ValidationError(
|
||||
_("Reference must match required pattern") + ": " + pattern
|
||||
_('Reference must match required pattern') + ': ' + pattern
|
||||
)
|
||||
|
||||
# Check that the reference field can be rebuild
|
||||
@ -380,7 +380,7 @@ class ReferenceIndexingMixin(models.Model):
|
||||
|
||||
if validate:
|
||||
if reference_int > models.BigIntegerField.MAX_BIGINT:
|
||||
raise ValidationError({"reference": _("Reference number is too large")})
|
||||
raise ValidationError({'reference': _('Reference number is too large')})
|
||||
|
||||
return reference_int
|
||||
|
||||
@ -399,7 +399,7 @@ def extract_int(reference, clip=0x7FFFFFFF, allow_negative=False):
|
||||
return 0
|
||||
|
||||
# Look at the start of the string - can it be "integerized"?
|
||||
result = re.match(r"^(\d+)", reference)
|
||||
result = re.match(r'^(\d+)', reference)
|
||||
|
||||
if result and len(result.groups()) == 1:
|
||||
ref = result.groups()[0]
|
||||
@ -455,7 +455,7 @@ class InvenTreeAttachment(models.Model):
|
||||
|
||||
Note: Re-implement this for each subclass of InvenTreeAttachment
|
||||
"""
|
||||
return "attachments"
|
||||
return 'attachments'
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""Provide better validation error."""
|
||||
@ -547,7 +547,7 @@ class InvenTreeAttachment(models.Model):
|
||||
logger.error(
|
||||
"Attempted to rename attachment outside valid directory: '%s'", new_file
|
||||
)
|
||||
raise ValidationError(_("Invalid attachment directory"))
|
||||
raise ValidationError(_('Invalid attachment directory'))
|
||||
|
||||
# Ignore further checks if the filename is not actually being renamed
|
||||
if new_file == old_file:
|
||||
@ -556,23 +556,23 @@ class InvenTreeAttachment(models.Model):
|
||||
forbidden = [
|
||||
"'",
|
||||
'"',
|
||||
"#",
|
||||
"@",
|
||||
"!",
|
||||
"&",
|
||||
"^",
|
||||
"<",
|
||||
">",
|
||||
":",
|
||||
";",
|
||||
"/",
|
||||
"\\",
|
||||
"|",
|
||||
"?",
|
||||
"*",
|
||||
"%",
|
||||
"~",
|
||||
"`",
|
||||
'#',
|
||||
'@',
|
||||
'!',
|
||||
'&',
|
||||
'^',
|
||||
'<',
|
||||
'>',
|
||||
':',
|
||||
';',
|
||||
'/',
|
||||
'\\',
|
||||
'|',
|
||||
'?',
|
||||
'*',
|
||||
'%',
|
||||
'~',
|
||||
'`',
|
||||
]
|
||||
|
||||
for c in forbidden:
|
||||
@ -580,7 +580,7 @@ class InvenTreeAttachment(models.Model):
|
||||
raise ValidationError(_(f"Filename contains illegal character '{c}'"))
|
||||
|
||||
if len(fn.split('.')) < 2:
|
||||
raise ValidationError(_("Filename missing extension"))
|
||||
raise ValidationError(_('Filename missing extension'))
|
||||
|
||||
if not old_file.exists():
|
||||
logger.error(
|
||||
@ -589,14 +589,14 @@ class InvenTreeAttachment(models.Model):
|
||||
return
|
||||
|
||||
if new_file.exists():
|
||||
raise ValidationError(_("Attachment with this filename already exists"))
|
||||
raise ValidationError(_('Attachment with this filename already exists'))
|
||||
|
||||
try:
|
||||
os.rename(old_file, new_file)
|
||||
self.attachment.name = os.path.join(self.getSubdir(), fn)
|
||||
self.save()
|
||||
except Exception:
|
||||
raise ValidationError(_("Error renaming file"))
|
||||
raise ValidationError(_('Error renaming file'))
|
||||
|
||||
def fully_qualified_url(self):
|
||||
"""Return a 'fully qualified' URL for this attachment.
|
||||
@ -656,7 +656,7 @@ class InvenTreeTree(MPTTModel):
|
||||
except self.__class__.DoesNotExist:
|
||||
# If the object no longer exists, raise a ValidationError
|
||||
raise ValidationError(
|
||||
"Object %s of type %s no longer exists", str(self), str(self.__class__)
|
||||
'Object %s of type %s no longer exists', str(self), str(self.__class__)
|
||||
)
|
||||
|
||||
# Cache node ID values for lower nodes, before we delete this one
|
||||
@ -791,7 +791,7 @@ class InvenTreeTree(MPTTModel):
|
||||
super().save(*args, **kwargs)
|
||||
except InvalidMove:
|
||||
# Provide better error for parent selection
|
||||
raise ValidationError({'parent': _("Invalid choice")})
|
||||
raise ValidationError({'parent': _('Invalid choice')})
|
||||
|
||||
# Re-calculate the 'pathstring' field
|
||||
pathstring = self.construct_pathstring()
|
||||
@ -821,14 +821,14 @@ class InvenTreeTree(MPTTModel):
|
||||
self.__class__.objects.bulk_update(nodes_to_update, ['pathstring'])
|
||||
|
||||
name = models.CharField(
|
||||
blank=False, max_length=100, verbose_name=_("Name"), help_text=_("Name")
|
||||
blank=False, max_length=100, verbose_name=_('Name'), help_text=_('Name')
|
||||
)
|
||||
|
||||
description = models.CharField(
|
||||
blank=True,
|
||||
max_length=250,
|
||||
verbose_name=_("Description"),
|
||||
help_text=_("Description (optional)"),
|
||||
verbose_name=_('Description'),
|
||||
help_text=_('Description (optional)'),
|
||||
)
|
||||
|
||||
# When a category is deleted, graft the children onto its parent
|
||||
@ -837,7 +837,7 @@ class InvenTreeTree(MPTTModel):
|
||||
on_delete=models.DO_NOTHING,
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_("parent"),
|
||||
verbose_name=_('parent'),
|
||||
related_name='children',
|
||||
)
|
||||
|
||||
@ -854,7 +854,7 @@ class InvenTreeTree(MPTTModel):
|
||||
|
||||
The default implementation returns an empty list
|
||||
"""
|
||||
raise NotImplementedError(f"items() method not implemented for {type(self)}")
|
||||
raise NotImplementedError(f'items() method not implemented for {type(self)}')
|
||||
|
||||
def getUniqueParents(self):
|
||||
"""Return a flat set of all parent items that exist above this node.
|
||||
@ -929,7 +929,7 @@ class InvenTreeTree(MPTTModel):
|
||||
|
||||
def __str__(self):
|
||||
"""String representation of a category is the full path to that category."""
|
||||
return f"{self.pathstring} - {self.description}"
|
||||
return f'{self.pathstring} - {self.description}'
|
||||
|
||||
|
||||
class InvenTreeNotesMixin(models.Model):
|
||||
@ -1008,7 +1008,7 @@ class InvenTreeBarcodeMixin(models.Model):
|
||||
|
||||
if hasattr(self, 'get_api_url'):
|
||||
api_url = self.get_api_url()
|
||||
data['api_url'] = f"{api_url}{self.pk}/"
|
||||
data['api_url'] = f'{api_url}{self.pk}/'
|
||||
|
||||
if hasattr(self, 'get_absolute_url'):
|
||||
data['web_url'] = self.get_absolute_url()
|
||||
@ -1040,7 +1040,7 @@ class InvenTreeBarcodeMixin(models.Model):
|
||||
# Check for existing item
|
||||
if self.__class__.lookup_barcode(barcode_hash) is not None:
|
||||
if raise_error:
|
||||
raise ValidationError(_("Existing barcode found"))
|
||||
raise ValidationError(_('Existing barcode found'))
|
||||
else:
|
||||
return False
|
||||
|
||||
|
@ -18,7 +18,7 @@ def get_model_for_view(view, raise_error=True):
|
||||
if hasattr(view, 'get_serializer_class'):
|
||||
return view.get_serializr_class().Meta.model
|
||||
|
||||
raise AttributeError(f"Serializer class not specified for {view.__class__}")
|
||||
raise AttributeError(f'Serializer class not specified for {view.__class__}')
|
||||
|
||||
|
||||
class RolePermission(permissions.BasePermission):
|
||||
@ -62,7 +62,7 @@ class RolePermission(permissions.BasePermission):
|
||||
}
|
||||
|
||||
# let the view define a custom rolemap
|
||||
if hasattr(view, "rolemap"):
|
||||
if hasattr(view, 'rolemap'):
|
||||
rolemap.update(view.rolemap)
|
||||
|
||||
permission = rolemap[request.method]
|
||||
@ -78,7 +78,7 @@ class RolePermission(permissions.BasePermission):
|
||||
app_label = model._meta.app_label
|
||||
model_name = model._meta.model_name
|
||||
|
||||
table = f"{app_label}_{model_name}"
|
||||
table = f'{app_label}_{model_name}'
|
||||
except AttributeError:
|
||||
# We will assume that if the serializer class does *not* have a Meta,
|
||||
# then we don't need a permission
|
||||
|
@ -25,8 +25,8 @@ def isInMainThread():
|
||||
- The RUN_MAIN env is set in that case. However if --noreload is applied, this variable
|
||||
is not set because there are no different threads.
|
||||
"""
|
||||
if "runserver" in sys.argv and "--noreload" not in sys.argv:
|
||||
return os.environ.get('RUN_MAIN', None) == "true"
|
||||
if 'runserver' in sys.argv and '--noreload' not in sys.argv:
|
||||
return os.environ.get('RUN_MAIN', None) == 'true'
|
||||
|
||||
return True
|
||||
|
||||
|
@ -37,7 +37,7 @@ def sentry_ignore_errors():
|
||||
|
||||
def init_sentry(dsn, sample_rate, tags):
|
||||
"""Initialize sentry.io error reporting"""
|
||||
logger.info("Initializing sentry.io integration")
|
||||
logger.info('Initializing sentry.io integration')
|
||||
|
||||
sentry_sdk.init(
|
||||
dsn=dsn,
|
||||
@ -65,9 +65,9 @@ def report_exception(exc):
|
||||
"""Report an exception to sentry.io"""
|
||||
if settings.SENTRY_ENABLED and settings.SENTRY_DSN:
|
||||
if not any(isinstance(exc, e) for e in sentry_ignore_errors()):
|
||||
logger.info("Reporting exception to sentry.io: %s", exc)
|
||||
logger.info('Reporting exception to sentry.io: %s', exc)
|
||||
|
||||
try:
|
||||
sentry_sdk.capture_exception(exc)
|
||||
except Exception:
|
||||
logger.warning("Failed to report exception to sentry.io")
|
||||
logger.warning('Failed to report exception to sentry.io')
|
||||
|
@ -37,9 +37,9 @@ class InvenTreeMoneySerializer(MoneyField):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Override default values."""
|
||||
kwargs["max_digits"] = kwargs.get("max_digits", 19)
|
||||
self.decimal_places = kwargs["decimal_places"] = kwargs.get("decimal_places", 6)
|
||||
kwargs["required"] = kwargs.get("required", False)
|
||||
kwargs['max_digits'] = kwargs.get('max_digits', 19)
|
||||
self.decimal_places = kwargs['decimal_places'] = kwargs.get('decimal_places', 6)
|
||||
kwargs['required'] = kwargs.get('required', False)
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@ -57,7 +57,7 @@ class InvenTreeMoneySerializer(MoneyField):
|
||||
amount = Decimal(amount)
|
||||
amount = round(amount, self.decimal_places)
|
||||
except Exception:
|
||||
raise ValidationError({self.field_name: [_("Must be a valid number")]})
|
||||
raise ValidationError({self.field_name: [_('Must be a valid number')]})
|
||||
|
||||
currency = data.get(
|
||||
get_currency_field_name(self.field_name), self.default_currency
|
||||
@ -134,7 +134,7 @@ class DependentField(serializers.Field):
|
||||
|
||||
def get_child(self, raise_exception=False):
|
||||
"""This method tries to extract the child based on the provided data in the request by the client."""
|
||||
data = deepcopy(self.context["request"].data)
|
||||
data = deepcopy(self.context['request'].data)
|
||||
|
||||
def visit_parent(node):
|
||||
"""Recursively extract the data for the parent field/serializer in reverse."""
|
||||
@ -144,7 +144,7 @@ class DependentField(serializers.Field):
|
||||
visit_parent(node.parent)
|
||||
|
||||
# only do for composite fields and stop right before the current field
|
||||
if hasattr(node, "child") and node is not self and isinstance(data, dict):
|
||||
if hasattr(node, 'child') and node is not self and isinstance(data, dict):
|
||||
data = data.get(node.field_name, None)
|
||||
|
||||
visit_parent(self)
|
||||
@ -424,7 +424,7 @@ class ExendedUserSerializer(UserSerializer):
|
||||
pass
|
||||
else:
|
||||
raise PermissionDenied(
|
||||
_("You do not have permission to change this user role.")
|
||||
_('You do not have permission to change this user role.')
|
||||
)
|
||||
return super().validate(attrs)
|
||||
|
||||
@ -436,7 +436,7 @@ class UserCreateSerializer(ExendedUserSerializer):
|
||||
"""Expanded valiadation for auth."""
|
||||
# Check that the user trying to create a new user is a superuser
|
||||
if not self.context['request'].user.is_superuser:
|
||||
raise serializers.ValidationError(_("Only superusers can create new users"))
|
||||
raise serializers.ValidationError(_('Only superusers can create new users'))
|
||||
|
||||
# Generate a random password
|
||||
password = User.objects.make_random_password(length=14)
|
||||
@ -453,9 +453,9 @@ class UserCreateSerializer(ExendedUserSerializer):
|
||||
current_site = Site.objects.get_current()
|
||||
domain = current_site.domain
|
||||
instance.email_user(
|
||||
subject=_(f"Welcome to {current_site.name}"),
|
||||
subject=_(f'Welcome to {current_site.name}'),
|
||||
message=_(
|
||||
f"Your account has been created.\n\nPlease use the password reset function to get access (at https://{domain})."
|
||||
f'Your account has been created.\n\nPlease use the password reset function to get access (at https://{domain}).'
|
||||
),
|
||||
)
|
||||
return instance
|
||||
@ -551,7 +551,7 @@ class InvenTreeDecimalField(serializers.FloatField):
|
||||
try:
|
||||
return Decimal(str(data))
|
||||
except Exception:
|
||||
raise serializers.ValidationError(_("Invalid value"))
|
||||
raise serializers.ValidationError(_('Invalid value'))
|
||||
|
||||
|
||||
class DataFileUploadSerializer(serializers.Serializer):
|
||||
@ -571,8 +571,8 @@ class DataFileUploadSerializer(serializers.Serializer):
|
||||
fields = ['data_file']
|
||||
|
||||
data_file = serializers.FileField(
|
||||
label=_("Data File"),
|
||||
help_text=_("Select data file for upload"),
|
||||
label=_('Data File'),
|
||||
help_text=_('Select data file for upload'),
|
||||
required=True,
|
||||
allow_empty_file=False,
|
||||
)
|
||||
@ -589,13 +589,13 @@ class DataFileUploadSerializer(serializers.Serializer):
|
||||
accepted_file_types = ['xls', 'xlsx', 'csv', 'tsv', 'xml']
|
||||
|
||||
if ext not in accepted_file_types:
|
||||
raise serializers.ValidationError(_("Unsupported file type"))
|
||||
raise serializers.ValidationError(_('Unsupported file type'))
|
||||
|
||||
# Impose a 50MB limit on uploaded BOM files
|
||||
max_upload_file_size = 50 * 1024 * 1024
|
||||
|
||||
if data_file.size > max_upload_file_size:
|
||||
raise serializers.ValidationError(_("File is too large"))
|
||||
raise serializers.ValidationError(_('File is too large'))
|
||||
|
||||
# Read file data into memory (bytes object)
|
||||
try:
|
||||
@ -616,10 +616,10 @@ class DataFileUploadSerializer(serializers.Serializer):
|
||||
raise serializers.ValidationError(str(e))
|
||||
|
||||
if len(self.dataset.headers) == 0:
|
||||
raise serializers.ValidationError(_("No columns found in file"))
|
||||
raise serializers.ValidationError(_('No columns found in file'))
|
||||
|
||||
if len(self.dataset) == 0:
|
||||
raise serializers.ValidationError(_("No data rows found in file"))
|
||||
raise serializers.ValidationError(_('No data rows found in file'))
|
||||
|
||||
return data_file
|
||||
|
||||
@ -732,10 +732,10 @@ class DataFileExtractSerializer(serializers.Serializer):
|
||||
self.rows = data.get('rows', [])
|
||||
|
||||
if len(self.rows) == 0:
|
||||
raise serializers.ValidationError(_("No data rows provided"))
|
||||
raise serializers.ValidationError(_('No data rows provided'))
|
||||
|
||||
if len(self.columns) == 0:
|
||||
raise serializers.ValidationError(_("No data columns supplied"))
|
||||
raise serializers.ValidationError(_('No data columns supplied'))
|
||||
|
||||
self.validate_extracted_columns()
|
||||
|
||||
@ -758,7 +758,7 @@ class DataFileExtractSerializer(serializers.Serializer):
|
||||
processed_row = self.process_row(self.row_to_dict(row))
|
||||
|
||||
if processed_row:
|
||||
rows.append({"original": row, "data": processed_row})
|
||||
rows.append({'original': row, 'data': processed_row})
|
||||
|
||||
return {'fields': model_fields, 'columns': self.columns, 'rows': rows}
|
||||
|
||||
@ -834,8 +834,8 @@ class RemoteImageMixin(metaclass=serializers.SerializerMetaclass):
|
||||
required=False,
|
||||
allow_blank=False,
|
||||
write_only=True,
|
||||
label=_("Remote Image"),
|
||||
help_text=_("URL of remote image file"),
|
||||
label=_('Remote Image'),
|
||||
help_text=_('URL of remote image file'),
|
||||
)
|
||||
|
||||
def validate_remote_image(self, url):
|
||||
@ -851,7 +851,7 @@ class RemoteImageMixin(metaclass=serializers.SerializerMetaclass):
|
||||
'INVENTREE_DOWNLOAD_FROM_URL'
|
||||
):
|
||||
raise ValidationError(
|
||||
_("Downloading images from remote URL is not enabled")
|
||||
_('Downloading images from remote URL is not enabled')
|
||||
)
|
||||
|
||||
try:
|
||||
|
@ -52,7 +52,7 @@ if TESTING:
|
||||
site_packages = '/usr/local/lib/python3.9/site-packages'
|
||||
|
||||
if site_packages not in sys.path:
|
||||
print("Adding missing site-packages path:", site_packages)
|
||||
print('Adding missing site-packages path:', site_packages)
|
||||
sys.path.append(site_packages)
|
||||
|
||||
# Are environment variables manipulated by tests? Needs to be set by testing code
|
||||
@ -87,7 +87,7 @@ ENABLE_PLATFORM_FRONTEND = get_boolean_setting(
|
||||
# Configure logging settings
|
||||
log_level = get_setting('INVENTREE_LOG_LEVEL', 'log_level', 'WARNING')
|
||||
|
||||
logging.basicConfig(level=log_level, format="%(asctime)s %(levelname)s %(message)s")
|
||||
logging.basicConfig(level=log_level, format='%(asctime)s %(levelname)s %(message)s')
|
||||
|
||||
if log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
|
||||
log_level = 'WARNING' # pragma: no cover
|
||||
@ -109,7 +109,7 @@ if get_setting('INVENTREE_DB_LOGGING', 'db_logging', False):
|
||||
LOGGING['loggers'] = {'django.db.backends': {'level': log_level or 'DEBUG'}}
|
||||
|
||||
# Get a logger instance for this setup file
|
||||
logger = logging.getLogger("inventree")
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
# Load SECRET_KEY
|
||||
SECRET_KEY = config.get_secret_key()
|
||||
@ -122,7 +122,7 @@ MEDIA_ROOT = config.get_media_dir()
|
||||
|
||||
# List of allowed hosts (default = allow all)
|
||||
ALLOWED_HOSTS = get_setting(
|
||||
"INVENTREE_ALLOWED_HOSTS",
|
||||
'INVENTREE_ALLOWED_HOSTS',
|
||||
config_key='allowed_hosts',
|
||||
default_value=['*'],
|
||||
typecast=list,
|
||||
@ -135,11 +135,11 @@ CORS_URLS_REGEX = r'^/(api|media|static)/.*$'
|
||||
|
||||
# Extract CORS options from configuration file
|
||||
CORS_ORIGIN_ALLOW_ALL = get_boolean_setting(
|
||||
"INVENTREE_CORS_ORIGIN_ALLOW_ALL", config_key='cors.allow_all', default_value=False
|
||||
'INVENTREE_CORS_ORIGIN_ALLOW_ALL', config_key='cors.allow_all', default_value=False
|
||||
)
|
||||
|
||||
CORS_ORIGIN_WHITELIST = get_setting(
|
||||
"INVENTREE_CORS_ORIGIN_WHITELIST",
|
||||
'INVENTREE_CORS_ORIGIN_WHITELIST',
|
||||
config_key='cors.whitelist',
|
||||
default_value=[],
|
||||
typecast=list,
|
||||
@ -279,43 +279,43 @@ AUTHENTICATION_BACKENDS = CONFIG.get(
|
||||
'django.contrib.auth.backends.RemoteUserBackend', # proxy login
|
||||
'django.contrib.auth.backends.ModelBackend',
|
||||
'allauth.account.auth_backends.AuthenticationBackend', # SSO login via external providers
|
||||
"sesame.backends.ModelBackend", # Magic link login django-sesame
|
||||
'sesame.backends.ModelBackend', # Magic link login django-sesame
|
||||
],
|
||||
)
|
||||
|
||||
# LDAP support
|
||||
LDAP_AUTH = get_boolean_setting("INVENTREE_LDAP_ENABLED", "ldap.enabled", False)
|
||||
LDAP_AUTH = get_boolean_setting('INVENTREE_LDAP_ENABLED', 'ldap.enabled', False)
|
||||
if LDAP_AUTH:
|
||||
import ldap
|
||||
from django_auth_ldap.config import GroupOfUniqueNamesType, LDAPSearch
|
||||
|
||||
AUTHENTICATION_BACKENDS.append("django_auth_ldap.backend.LDAPBackend")
|
||||
AUTHENTICATION_BACKENDS.append('django_auth_ldap.backend.LDAPBackend')
|
||||
|
||||
# debug mode to troubleshoot configuration
|
||||
LDAP_DEBUG = get_boolean_setting("INVENTREE_LDAP_DEBUG", "ldap.debug", False)
|
||||
LDAP_DEBUG = get_boolean_setting('INVENTREE_LDAP_DEBUG', 'ldap.debug', False)
|
||||
if LDAP_DEBUG:
|
||||
if "loggers" not in LOGGING:
|
||||
LOGGING["loggers"] = {}
|
||||
LOGGING["loggers"]["django_auth_ldap"] = {
|
||||
"level": "DEBUG",
|
||||
"handlers": ["console"],
|
||||
if 'loggers' not in LOGGING:
|
||||
LOGGING['loggers'] = {}
|
||||
LOGGING['loggers']['django_auth_ldap'] = {
|
||||
'level': 'DEBUG',
|
||||
'handlers': ['console'],
|
||||
}
|
||||
|
||||
# get global options from dict and use ldap.OPT_* as keys and values
|
||||
global_options_dict = get_setting(
|
||||
"INVENTREE_LDAP_GLOBAL_OPTIONS", "ldap.global_options", {}, dict
|
||||
'INVENTREE_LDAP_GLOBAL_OPTIONS', 'ldap.global_options', {}, dict
|
||||
)
|
||||
global_options = {}
|
||||
for k, v in global_options_dict.items():
|
||||
# keys are always ldap.OPT_* constants
|
||||
k_attr = getattr(ldap, k, None)
|
||||
if not k.startswith("OPT_") or k_attr is None:
|
||||
if not k.startswith('OPT_') or k_attr is None:
|
||||
print(f"[LDAP] ldap.global_options, key '{k}' not found, skipping...")
|
||||
continue
|
||||
|
||||
# values can also be other strings, e.g. paths
|
||||
v_attr = v
|
||||
if v.startswith("OPT_"):
|
||||
if v.startswith('OPT_'):
|
||||
v_attr = getattr(ldap, v, None)
|
||||
|
||||
if v_attr is None:
|
||||
@ -325,55 +325,55 @@ if LDAP_AUTH:
|
||||
global_options[k_attr] = v_attr
|
||||
AUTH_LDAP_GLOBAL_OPTIONS = global_options
|
||||
if LDAP_DEBUG:
|
||||
print("[LDAP] ldap.global_options =", global_options)
|
||||
print('[LDAP] ldap.global_options =', global_options)
|
||||
|
||||
AUTH_LDAP_SERVER_URI = get_setting("INVENTREE_LDAP_SERVER_URI", "ldap.server_uri")
|
||||
AUTH_LDAP_SERVER_URI = get_setting('INVENTREE_LDAP_SERVER_URI', 'ldap.server_uri')
|
||||
AUTH_LDAP_START_TLS = get_boolean_setting(
|
||||
"INVENTREE_LDAP_START_TLS", "ldap.start_tls", False
|
||||
'INVENTREE_LDAP_START_TLS', 'ldap.start_tls', False
|
||||
)
|
||||
AUTH_LDAP_BIND_DN = get_setting("INVENTREE_LDAP_BIND_DN", "ldap.bind_dn")
|
||||
AUTH_LDAP_BIND_DN = get_setting('INVENTREE_LDAP_BIND_DN', 'ldap.bind_dn')
|
||||
AUTH_LDAP_BIND_PASSWORD = get_setting(
|
||||
"INVENTREE_LDAP_BIND_PASSWORD", "ldap.bind_password"
|
||||
'INVENTREE_LDAP_BIND_PASSWORD', 'ldap.bind_password'
|
||||
)
|
||||
AUTH_LDAP_USER_SEARCH = LDAPSearch(
|
||||
get_setting("INVENTREE_LDAP_SEARCH_BASE_DN", "ldap.search_base_dn"),
|
||||
get_setting('INVENTREE_LDAP_SEARCH_BASE_DN', 'ldap.search_base_dn'),
|
||||
ldap.SCOPE_SUBTREE,
|
||||
str(
|
||||
get_setting(
|
||||
"INVENTREE_LDAP_SEARCH_FILTER_STR",
|
||||
"ldap.search_filter_str",
|
||||
"(uid= %(user)s)",
|
||||
'INVENTREE_LDAP_SEARCH_FILTER_STR',
|
||||
'ldap.search_filter_str',
|
||||
'(uid= %(user)s)',
|
||||
)
|
||||
),
|
||||
)
|
||||
AUTH_LDAP_USER_DN_TEMPLATE = get_setting(
|
||||
"INVENTREE_LDAP_USER_DN_TEMPLATE", "ldap.user_dn_template"
|
||||
'INVENTREE_LDAP_USER_DN_TEMPLATE', 'ldap.user_dn_template'
|
||||
)
|
||||
AUTH_LDAP_USER_ATTR_MAP = get_setting(
|
||||
"INVENTREE_LDAP_USER_ATTR_MAP",
|
||||
"ldap.user_attr_map",
|
||||
'INVENTREE_LDAP_USER_ATTR_MAP',
|
||||
'ldap.user_attr_map',
|
||||
{'first_name': 'givenName', 'last_name': 'sn', 'email': 'mail'},
|
||||
dict,
|
||||
)
|
||||
AUTH_LDAP_ALWAYS_UPDATE_USER = get_boolean_setting(
|
||||
"INVENTREE_LDAP_ALWAYS_UPDATE_USER", "ldap.always_update_user", True
|
||||
'INVENTREE_LDAP_ALWAYS_UPDATE_USER', 'ldap.always_update_user', True
|
||||
)
|
||||
AUTH_LDAP_CACHE_TIMEOUT = get_setting(
|
||||
"INVENTREE_LDAP_CACHE_TIMEOUT", "ldap.cache_timeout", 3600, int
|
||||
'INVENTREE_LDAP_CACHE_TIMEOUT', 'ldap.cache_timeout', 3600, int
|
||||
)
|
||||
|
||||
AUTH_LDAP_GROUP_SEARCH = LDAPSearch(
|
||||
get_setting("INVENTREE_LDAP_GROUP_SEARCH", "ldap.group_search"),
|
||||
get_setting('INVENTREE_LDAP_GROUP_SEARCH', 'ldap.group_search'),
|
||||
ldap.SCOPE_SUBTREE,
|
||||
"(objectClass=groupOfUniqueNames)",
|
||||
'(objectClass=groupOfUniqueNames)',
|
||||
)
|
||||
AUTH_LDAP_GROUP_TYPE = GroupOfUniqueNamesType(name_attr="cn")
|
||||
AUTH_LDAP_GROUP_TYPE = GroupOfUniqueNamesType(name_attr='cn')
|
||||
AUTH_LDAP_REQUIRE_GROUP = get_setting(
|
||||
"INVENTREE_LDAP_REQUIRE_GROUP", "ldap.require_group"
|
||||
'INVENTREE_LDAP_REQUIRE_GROUP', 'ldap.require_group'
|
||||
)
|
||||
AUTH_LDAP_DENY_GROUP = get_setting("INVENTREE_LDAP_DENY_GROUP", "ldap.deny_group")
|
||||
AUTH_LDAP_DENY_GROUP = get_setting('INVENTREE_LDAP_DENY_GROUP', 'ldap.deny_group')
|
||||
AUTH_LDAP_USER_FLAGS_BY_GROUP = get_setting(
|
||||
"INVENTREE_LDAP_USER_FLAGS_BY_GROUP", "ldap.user_flags_by_group", {}, dict
|
||||
'INVENTREE_LDAP_USER_FLAGS_BY_GROUP', 'ldap.user_flags_by_group', {}, dict
|
||||
)
|
||||
AUTH_LDAP_FIND_GROUP_PERMS = True
|
||||
|
||||
@ -383,7 +383,7 @@ DEBUG_TOOLBAR_ENABLED = DEBUG and get_setting(
|
||||
|
||||
# If the debug toolbar is enabled, add the modules
|
||||
if DEBUG_TOOLBAR_ENABLED: # pragma: no cover
|
||||
logger.info("Running with DEBUG_TOOLBAR enabled")
|
||||
logger.info('Running with DEBUG_TOOLBAR enabled')
|
||||
INSTALLED_APPS.append('debug_toolbar')
|
||||
MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware')
|
||||
|
||||
@ -401,9 +401,9 @@ DOCKER = get_boolean_setting('INVENTREE_DOCKER', default_value=False)
|
||||
if DOCKER: # pragma: no cover
|
||||
# Internal IP addresses are different when running under docker
|
||||
hostname, ___, ips = socket.gethostbyname_ex(socket.gethostname())
|
||||
INTERNAL_IPS = [ip[: ip.rfind(".")] + ".1" for ip in ips] + [
|
||||
"127.0.0.1",
|
||||
"10.0.2.2",
|
||||
INTERNAL_IPS = [ip[: ip.rfind('.')] + '.1' for ip in ips] + [
|
||||
'127.0.0.1',
|
||||
'10.0.2.2',
|
||||
]
|
||||
|
||||
# Allow secure http developer server in debug mode
|
||||
@ -521,7 +521,7 @@ Configure the database backend based on the user-specified values.
|
||||
- The following code lets the user "mix and match" database configuration
|
||||
"""
|
||||
|
||||
logger.debug("Configuring database backend:")
|
||||
logger.debug('Configuring database backend:')
|
||||
|
||||
# Extract database configuration from the config.yaml file
|
||||
db_config = CONFIG.get('database', {})
|
||||
@ -535,7 +535,7 @@ db_keys = ['ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT']
|
||||
|
||||
for key in db_keys:
|
||||
# First, check the environment variables
|
||||
env_key = f"INVENTREE_DB_{key}"
|
||||
env_key = f'INVENTREE_DB_{key}'
|
||||
env_var = os.environ.get(env_key, None)
|
||||
|
||||
if env_var:
|
||||
@ -544,7 +544,7 @@ for key in db_keys:
|
||||
try:
|
||||
env_var = int(env_var)
|
||||
except ValueError:
|
||||
logger.exception("Invalid number for %s: %s", env_key, env_var)
|
||||
logger.exception('Invalid number for %s: %s', env_key, env_var)
|
||||
# Override configuration value
|
||||
db_config[key] = env_var
|
||||
|
||||
@ -585,9 +585,9 @@ if 'sqlite' in db_engine:
|
||||
db_name = str(Path(db_name).resolve())
|
||||
db_config['NAME'] = db_name
|
||||
|
||||
logger.info("DB_ENGINE: %s", db_engine)
|
||||
logger.info("DB_NAME: %s", db_name)
|
||||
logger.info("DB_HOST: %s", db_host)
|
||||
logger.info('DB_ENGINE: %s', db_engine)
|
||||
logger.info('DB_NAME: %s', db_name)
|
||||
logger.info('DB_HOST: %s', db_host)
|
||||
|
||||
"""
|
||||
In addition to base-level database configuration, we may wish to specify specific options to the database backend
|
||||
@ -600,21 +600,21 @@ Ref: https://docs.djangoproject.com/en/3.2/ref/settings/#std:setting-OPTIONS
|
||||
# connecting to the database server (such as a replica failover) don't sit and
|
||||
# wait for possibly an hour or more, just tell the client something went wrong
|
||||
# and let the client retry when they want to.
|
||||
db_options = db_config.get("OPTIONS", db_config.get("options", {}))
|
||||
db_options = db_config.get('OPTIONS', db_config.get('options', {}))
|
||||
|
||||
# Specific options for postgres backend
|
||||
if "postgres" in db_engine: # pragma: no cover
|
||||
if 'postgres' in db_engine: # pragma: no cover
|
||||
from psycopg2.extensions import (
|
||||
ISOLATION_LEVEL_READ_COMMITTED,
|
||||
ISOLATION_LEVEL_SERIALIZABLE,
|
||||
)
|
||||
|
||||
# Connection timeout
|
||||
if "connect_timeout" not in db_options:
|
||||
if 'connect_timeout' not in db_options:
|
||||
# The DB server is in the same data center, it should not take very
|
||||
# long to connect to the database server
|
||||
# # seconds, 2 is minimum allowed by libpq
|
||||
db_options["connect_timeout"] = int(
|
||||
db_options['connect_timeout'] = int(
|
||||
get_setting('INVENTREE_DB_TIMEOUT', 'database.timeout', 2)
|
||||
)
|
||||
|
||||
@ -624,36 +624,36 @@ if "postgres" in db_engine: # pragma: no cover
|
||||
# issue to resolve itself. It it that doesn't happen whatever happened
|
||||
# is probably fatal and no amount of waiting is going to fix it.
|
||||
# # 0 - TCP Keepalives disabled; 1 - enabled
|
||||
if "keepalives" not in db_options:
|
||||
db_options["keepalives"] = int(
|
||||
if 'keepalives' not in db_options:
|
||||
db_options['keepalives'] = int(
|
||||
get_setting('INVENTREE_DB_TCP_KEEPALIVES', 'database.tcp_keepalives', 1)
|
||||
)
|
||||
|
||||
# Seconds after connection is idle to send keep alive
|
||||
if "keepalives_idle" not in db_options:
|
||||
db_options["keepalives_idle"] = int(
|
||||
if 'keepalives_idle' not in db_options:
|
||||
db_options['keepalives_idle'] = int(
|
||||
get_setting(
|
||||
'INVENTREE_DB_TCP_KEEPALIVES_IDLE', 'database.tcp_keepalives_idle', 1
|
||||
)
|
||||
)
|
||||
|
||||
# Seconds after missing ACK to send another keep alive
|
||||
if "keepalives_interval" not in db_options:
|
||||
db_options["keepalives_interval"] = int(
|
||||
if 'keepalives_interval' not in db_options:
|
||||
db_options['keepalives_interval'] = int(
|
||||
get_setting(
|
||||
"INVENTREE_DB_TCP_KEEPALIVES_INTERVAL",
|
||||
"database.tcp_keepalives_internal",
|
||||
"1",
|
||||
'INVENTREE_DB_TCP_KEEPALIVES_INTERVAL',
|
||||
'database.tcp_keepalives_internal',
|
||||
'1',
|
||||
)
|
||||
)
|
||||
|
||||
# Number of missing ACKs before we close the connection
|
||||
if "keepalives_count" not in db_options:
|
||||
db_options["keepalives_count"] = int(
|
||||
if 'keepalives_count' not in db_options:
|
||||
db_options['keepalives_count'] = int(
|
||||
get_setting(
|
||||
"INVENTREE_DB_TCP_KEEPALIVES_COUNT",
|
||||
"database.tcp_keepalives_count",
|
||||
"5",
|
||||
'INVENTREE_DB_TCP_KEEPALIVES_COUNT',
|
||||
'database.tcp_keepalives_count',
|
||||
'5',
|
||||
)
|
||||
)
|
||||
|
||||
@ -668,18 +668,18 @@ if "postgres" in db_engine: # pragma: no cover
|
||||
# protect against simultaneous changes.
|
||||
# https://www.postgresql.org/docs/devel/transaction-iso.html
|
||||
# https://docs.djangoproject.com/en/3.2/ref/databases/#isolation-level
|
||||
if "isolation_level" not in db_options:
|
||||
if 'isolation_level' not in db_options:
|
||||
serializable = get_boolean_setting(
|
||||
'INVENTREE_DB_ISOLATION_SERIALIZABLE', 'database.serializable', False
|
||||
)
|
||||
db_options["isolation_level"] = (
|
||||
db_options['isolation_level'] = (
|
||||
ISOLATION_LEVEL_SERIALIZABLE
|
||||
if serializable
|
||||
else ISOLATION_LEVEL_READ_COMMITTED
|
||||
)
|
||||
|
||||
# Specific options for MySql / MariaDB backend
|
||||
elif "mysql" in db_engine: # pragma: no cover
|
||||
elif 'mysql' in db_engine: # pragma: no cover
|
||||
# TODO TCP time outs and keepalives
|
||||
|
||||
# MariaDB's default isolation level is Repeatable Read which is
|
||||
@ -688,16 +688,16 @@ elif "mysql" in db_engine: # pragma: no cover
|
||||
# protect against siumltaneous changes.
|
||||
# https://mariadb.com/kb/en/mariadb-transactions-and-isolation-levels-for-sql-server-users/#changing-the-isolation-level
|
||||
# https://docs.djangoproject.com/en/3.2/ref/databases/#mysql-isolation-level
|
||||
if "isolation_level" not in db_options:
|
||||
if 'isolation_level' not in db_options:
|
||||
serializable = get_boolean_setting(
|
||||
'INVENTREE_DB_ISOLATION_SERIALIZABLE', 'database.serializable', False
|
||||
)
|
||||
db_options["isolation_level"] = (
|
||||
"serializable" if serializable else "read committed"
|
||||
db_options['isolation_level'] = (
|
||||
'serializable' if serializable else 'read committed'
|
||||
)
|
||||
|
||||
# Specific options for sqlite backend
|
||||
elif "sqlite" in db_engine:
|
||||
elif 'sqlite' in db_engine:
|
||||
# TODO: Verify timeouts are not an issue because no network is involved for SQLite
|
||||
|
||||
# SQLite's default isolation level is Serializable due to SQLite's
|
||||
@ -756,30 +756,30 @@ if cache_host: # pragma: no cover
|
||||
# so don't wait too long for the cache as nothing in the cache should be
|
||||
# irreplaceable.
|
||||
_cache_options = {
|
||||
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
||||
"SOCKET_CONNECT_TIMEOUT": int(os.getenv("CACHE_CONNECT_TIMEOUT", "2")),
|
||||
"SOCKET_TIMEOUT": int(os.getenv("CACHE_SOCKET_TIMEOUT", "2")),
|
||||
"CONNECTION_POOL_KWARGS": {
|
||||
"socket_keepalive": config.is_true(os.getenv("CACHE_TCP_KEEPALIVE", "1")),
|
||||
"socket_keepalive_options": {
|
||||
socket.TCP_KEEPCNT: int(os.getenv("CACHE_KEEPALIVES_COUNT", "5")),
|
||||
socket.TCP_KEEPIDLE: int(os.getenv("CACHE_KEEPALIVES_IDLE", "1")),
|
||||
socket.TCP_KEEPINTVL: int(os.getenv("CACHE_KEEPALIVES_INTERVAL", "1")),
|
||||
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
|
||||
'SOCKET_CONNECT_TIMEOUT': int(os.getenv('CACHE_CONNECT_TIMEOUT', '2')),
|
||||
'SOCKET_TIMEOUT': int(os.getenv('CACHE_SOCKET_TIMEOUT', '2')),
|
||||
'CONNECTION_POOL_KWARGS': {
|
||||
'socket_keepalive': config.is_true(os.getenv('CACHE_TCP_KEEPALIVE', '1')),
|
||||
'socket_keepalive_options': {
|
||||
socket.TCP_KEEPCNT: int(os.getenv('CACHE_KEEPALIVES_COUNT', '5')),
|
||||
socket.TCP_KEEPIDLE: int(os.getenv('CACHE_KEEPALIVES_IDLE', '1')),
|
||||
socket.TCP_KEEPINTVL: int(os.getenv('CACHE_KEEPALIVES_INTERVAL', '1')),
|
||||
socket.TCP_USER_TIMEOUT: int(
|
||||
os.getenv("CACHE_TCP_USER_TIMEOUT", "1000")
|
||||
os.getenv('CACHE_TCP_USER_TIMEOUT', '1000')
|
||||
),
|
||||
},
|
||||
},
|
||||
}
|
||||
CACHES = {
|
||||
"default": {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": f"redis://{cache_host}:{cache_port}/0",
|
||||
"OPTIONS": _cache_options,
|
||||
'default': {
|
||||
'BACKEND': 'django_redis.cache.RedisCache',
|
||||
'LOCATION': f'redis://{cache_host}:{cache_port}/0',
|
||||
'OPTIONS': _cache_options,
|
||||
}
|
||||
}
|
||||
else:
|
||||
CACHES = {"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"}}
|
||||
CACHES = {'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}}
|
||||
|
||||
_q_worker_timeout = int(
|
||||
get_setting('INVENTREE_BACKGROUND_TIMEOUT', 'background.timeout', 90)
|
||||
@ -813,7 +813,7 @@ if SENTRY_ENABLED and SENTRY_DSN:
|
||||
if cache_host: # pragma: no cover
|
||||
# If using external redis cache, make the cache the broker for Django Q
|
||||
# as well
|
||||
Q_CLUSTER["django_redis"] = "worker"
|
||||
Q_CLUSTER['django_redis'] = 'worker'
|
||||
|
||||
# database user sessions
|
||||
SESSION_ENGINE = 'user_sessions.backends.db'
|
||||
@ -840,7 +840,7 @@ AUTH_PASSWORD_VALIDATORS = [
|
||||
EXTRA_URL_SCHEMES = get_setting('INVENTREE_EXTRA_URL_SCHEMES', 'extra_url_schemes', [])
|
||||
|
||||
if type(EXTRA_URL_SCHEMES) not in [list]: # pragma: no cover
|
||||
logger.warning("extra_url_schemes not correctly formatted")
|
||||
logger.warning('extra_url_schemes not correctly formatted')
|
||||
EXTRA_URL_SCHEMES = []
|
||||
|
||||
# Internationalization
|
||||
@ -912,7 +912,7 @@ CURRENCIES = get_setting(
|
||||
|
||||
# Ensure that at least one currency value is available
|
||||
if len(CURRENCIES) == 0: # pragma: no cover
|
||||
logger.warning("No currencies selected: Defaulting to USD")
|
||||
logger.warning('No currencies selected: Defaulting to USD')
|
||||
CURRENCIES = ['USD']
|
||||
|
||||
# Maximum number of decimal places for currency rendering
|
||||
@ -965,7 +965,7 @@ USE_L10N = True
|
||||
if not TESTING:
|
||||
USE_TZ = True # pragma: no cover
|
||||
|
||||
DATE_INPUT_FORMATS = ["%Y-%m-%d"]
|
||||
DATE_INPUT_FORMATS = ['%Y-%m-%d']
|
||||
|
||||
# crispy forms use the bootstrap templates
|
||||
CRISPY_TEMPLATE_PACK = 'bootstrap4'
|
||||
@ -1090,7 +1090,7 @@ PLUGIN_FILE_CHECKED = False # Was the plugin file checked?
|
||||
SITE_URL = get_setting('INVENTREE_SITE_URL', 'site_url', None)
|
||||
|
||||
if SITE_URL:
|
||||
logger.info("Site URL: %s", SITE_URL)
|
||||
logger.info('Site URL: %s', SITE_URL)
|
||||
|
||||
# Check that the site URL is valid
|
||||
validator = URLValidator()
|
||||
@ -1111,7 +1111,7 @@ FRONTEND_SETTINGS = config.get_frontend_settings(debug=DEBUG)
|
||||
FRONTEND_URL_BASE = FRONTEND_SETTINGS.get('base_url', 'platform')
|
||||
|
||||
if DEBUG:
|
||||
logger.info("InvenTree running with DEBUG enabled")
|
||||
logger.info('InvenTree running with DEBUG enabled')
|
||||
|
||||
logger.info("MEDIA_ROOT: '%s'", MEDIA_ROOT)
|
||||
logger.info("STATIC_ROOT: '%s'", STATIC_ROOT)
|
||||
@ -1131,12 +1131,12 @@ FLAGS = {
|
||||
CUSTOM_FLAGS = get_setting('INVENTREE_FLAGS', 'flags', None, typecast=dict)
|
||||
if CUSTOM_FLAGS:
|
||||
if not isinstance(CUSTOM_FLAGS, dict):
|
||||
logger.error("Invalid custom flags, must be valid dict: %s", str(CUSTOM_FLAGS))
|
||||
logger.error('Invalid custom flags, must be valid dict: %s', str(CUSTOM_FLAGS))
|
||||
else:
|
||||
logger.info("Custom flags: %s", str(CUSTOM_FLAGS))
|
||||
logger.info('Custom flags: %s', str(CUSTOM_FLAGS))
|
||||
FLAGS.update(CUSTOM_FLAGS)
|
||||
|
||||
# Magic login django-sesame
|
||||
SESAME_MAX_AGE = 300
|
||||
# LOGIN_REDIRECT_URL = f"/{FRONTEND_URL_BASE}/logged-in/"
|
||||
LOGIN_REDIRECT_URL = "/index/"
|
||||
LOGIN_REDIRECT_URL = '/index/'
|
||||
|
@ -74,9 +74,9 @@ provider_urlpatterns = []
|
||||
|
||||
for name, provider in providers.registry.provider_map.items():
|
||||
try:
|
||||
prov_mod = import_module(provider.get_package() + ".views")
|
||||
prov_mod = import_module(provider.get_package() + '.views')
|
||||
except ImportError:
|
||||
logger.exception("Could not import authentication provider %s", name)
|
||||
logger.exception('Could not import authentication provider %s', name)
|
||||
continue
|
||||
|
||||
# Try to extract the adapter class
|
||||
|
@ -48,7 +48,7 @@ def check_provider(provider, raise_error=False):
|
||||
if allauth.app_settings.SITES_ENABLED:
|
||||
# At least one matching site must be specified
|
||||
if not app.sites.exists():
|
||||
logger.error("SocialApp %s has no sites configured", app)
|
||||
logger.error('SocialApp %s has no sites configured', app)
|
||||
return False
|
||||
|
||||
# At this point, we assume that the provider is correctly configured
|
||||
|
@ -13,7 +13,7 @@ from django_q.status import Stat
|
||||
import InvenTree.email
|
||||
import InvenTree.ready
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def is_worker_running(**kwargs):
|
||||
@ -63,13 +63,13 @@ def check_system_health(**kwargs):
|
||||
|
||||
if not is_worker_running(**kwargs): # pragma: no cover
|
||||
result = False
|
||||
logger.warning(_("Background worker check failed"))
|
||||
logger.warning(_('Background worker check failed'))
|
||||
|
||||
if not InvenTree.email.is_email_configured(): # pragma: no cover
|
||||
result = False
|
||||
logger.warning(_("Email backend not configured"))
|
||||
logger.warning(_('Email backend not configured'))
|
||||
|
||||
if not result: # pragma: no cover
|
||||
logger.warning(_("InvenTree system health checks failed"))
|
||||
logger.warning(_('InvenTree system health checks failed'))
|
||||
|
||||
return result
|
||||
|
@ -9,12 +9,12 @@ class PurchaseOrderStatus(StatusCode):
|
||||
"""Defines a set of status codes for a PurchaseOrder."""
|
||||
|
||||
# Order status codes
|
||||
PENDING = 10, _("Pending"), 'secondary' # Order is pending (not yet placed)
|
||||
PLACED = 20, _("Placed"), 'primary' # Order has been placed with supplier
|
||||
COMPLETE = 30, _("Complete"), 'success' # Order has been completed
|
||||
CANCELLED = 40, _("Cancelled"), 'danger' # Order was cancelled
|
||||
LOST = 50, _("Lost"), 'warning' # Order was lost
|
||||
RETURNED = 60, _("Returned"), 'warning' # Order was returned
|
||||
PENDING = 10, _('Pending'), 'secondary' # Order is pending (not yet placed)
|
||||
PLACED = 20, _('Placed'), 'primary' # Order has been placed with supplier
|
||||
COMPLETE = 30, _('Complete'), 'success' # Order has been completed
|
||||
CANCELLED = 40, _('Cancelled'), 'danger' # Order was cancelled
|
||||
LOST = 50, _('Lost'), 'warning' # Order was lost
|
||||
RETURNED = 60, _('Returned'), 'warning' # Order was returned
|
||||
|
||||
|
||||
class PurchaseOrderStatusGroups:
|
||||
@ -34,16 +34,16 @@ class PurchaseOrderStatusGroups:
|
||||
class SalesOrderStatus(StatusCode):
|
||||
"""Defines a set of status codes for a SalesOrder."""
|
||||
|
||||
PENDING = 10, _("Pending"), 'secondary' # Order is pending
|
||||
PENDING = 10, _('Pending'), 'secondary' # Order is pending
|
||||
IN_PROGRESS = (
|
||||
15,
|
||||
_("In Progress"),
|
||||
_('In Progress'),
|
||||
'primary',
|
||||
) # Order has been issued, and is in progress
|
||||
SHIPPED = 20, _("Shipped"), 'success' # Order has been shipped to customer
|
||||
CANCELLED = 40, _("Cancelled"), 'danger' # Order has been cancelled
|
||||
LOST = 50, _("Lost"), 'warning' # Order was lost
|
||||
RETURNED = 60, _("Returned"), 'warning' # Order was returned
|
||||
SHIPPED = 20, _('Shipped'), 'success' # Order has been shipped to customer
|
||||
CANCELLED = 40, _('Cancelled'), 'danger' # Order has been cancelled
|
||||
LOST = 50, _('Lost'), 'warning' # Order was lost
|
||||
RETURNED = 60, _('Returned'), 'warning' # Order was returned
|
||||
|
||||
|
||||
class SalesOrderStatusGroups:
|
||||
@ -59,18 +59,18 @@ class SalesOrderStatusGroups:
|
||||
class StockStatus(StatusCode):
|
||||
"""Status codes for Stock."""
|
||||
|
||||
OK = 10, _("OK"), 'success' # Item is OK
|
||||
ATTENTION = 50, _("Attention needed"), 'warning' # Item requires attention
|
||||
DAMAGED = 55, _("Damaged"), 'warning' # Item is damaged
|
||||
DESTROYED = 60, _("Destroyed"), 'danger' # Item is destroyed
|
||||
REJECTED = 65, _("Rejected"), 'danger' # Item is rejected
|
||||
LOST = 70, _("Lost"), 'dark' # Item has been lost
|
||||
OK = 10, _('OK'), 'success' # Item is OK
|
||||
ATTENTION = 50, _('Attention needed'), 'warning' # Item requires attention
|
||||
DAMAGED = 55, _('Damaged'), 'warning' # Item is damaged
|
||||
DESTROYED = 60, _('Destroyed'), 'danger' # Item is destroyed
|
||||
REJECTED = 65, _('Rejected'), 'danger' # Item is rejected
|
||||
LOST = 70, _('Lost'), 'dark' # Item has been lost
|
||||
QUARANTINED = (
|
||||
75,
|
||||
_("Quarantined"),
|
||||
_('Quarantined'),
|
||||
'info',
|
||||
) # Item has been quarantined and is unavailable
|
||||
RETURNED = 85, _("Returned"), 'warning' # Item has been returned from a customer
|
||||
RETURNED = 85, _('Returned'), 'warning' # Item has been returned from a customer
|
||||
|
||||
|
||||
class StockStatusGroups:
|
||||
@ -129,7 +129,7 @@ class StockHistoryCode(StatusCode):
|
||||
BUILD_CONSUMED = 57, _('Consumed by build order')
|
||||
|
||||
# Sales order codes
|
||||
SHIPPED_AGAINST_SALES_ORDER = 60, _("Shipped against Sales Order")
|
||||
SHIPPED_AGAINST_SALES_ORDER = 60, _('Shipped against Sales Order')
|
||||
|
||||
# Purchase order codes
|
||||
RECEIVED_AGAINST_PURCHASE_ORDER = 70, _('Received against Purchase Order')
|
||||
@ -145,10 +145,10 @@ class StockHistoryCode(StatusCode):
|
||||
class BuildStatus(StatusCode):
|
||||
"""Build status codes."""
|
||||
|
||||
PENDING = 10, _("Pending"), 'secondary' # Build is pending / active
|
||||
PRODUCTION = 20, _("Production"), 'primary' # BuildOrder is in production
|
||||
CANCELLED = 30, _("Cancelled"), 'danger' # Build was cancelled
|
||||
COMPLETE = 40, _("Complete"), 'success' # Build is complete
|
||||
PENDING = 10, _('Pending'), 'secondary' # Build is pending / active
|
||||
PRODUCTION = 20, _('Production'), 'primary' # BuildOrder is in production
|
||||
CANCELLED = 30, _('Cancelled'), 'danger' # Build was cancelled
|
||||
COMPLETE = 40, _('Complete'), 'success' # Build is complete
|
||||
|
||||
|
||||
class BuildStatusGroups:
|
||||
@ -161,13 +161,13 @@ class ReturnOrderStatus(StatusCode):
|
||||
"""Defines a set of status codes for a ReturnOrder"""
|
||||
|
||||
# Order is pending, waiting for receipt of items
|
||||
PENDING = 10, _("Pending"), 'secondary'
|
||||
PENDING = 10, _('Pending'), 'secondary'
|
||||
|
||||
# Items have been received, and are being inspected
|
||||
IN_PROGRESS = 20, _("In Progress"), 'primary'
|
||||
IN_PROGRESS = 20, _('In Progress'), 'primary'
|
||||
|
||||
COMPLETE = 30, _("Complete"), 'success'
|
||||
CANCELLED = 40, _("Cancelled"), 'danger'
|
||||
COMPLETE = 30, _('Complete'), 'success'
|
||||
CANCELLED = 40, _('Cancelled'), 'danger'
|
||||
|
||||
|
||||
class ReturnOrderStatusGroups:
|
||||
@ -179,19 +179,19 @@ class ReturnOrderStatusGroups:
|
||||
class ReturnOrderLineStatus(StatusCode):
|
||||
"""Defines a set of status codes for a ReturnOrderLineItem"""
|
||||
|
||||
PENDING = 10, _("Pending"), 'secondary'
|
||||
PENDING = 10, _('Pending'), 'secondary'
|
||||
|
||||
# Item is to be returned to customer, no other action
|
||||
RETURN = 20, _("Return"), 'success'
|
||||
RETURN = 20, _('Return'), 'success'
|
||||
|
||||
# Item is to be repaired, and returned to customer
|
||||
REPAIR = 30, _("Repair"), 'primary'
|
||||
REPAIR = 30, _('Repair'), 'primary'
|
||||
|
||||
# Item is to be replaced (new item shipped)
|
||||
REPLACE = 40, _("Replace"), 'warning'
|
||||
REPLACE = 40, _('Replace'), 'warning'
|
||||
|
||||
# Item is to be refunded (cannot be repaired)
|
||||
REFUND = 50, _("Refund"), 'info'
|
||||
REFUND = 50, _('Refund'), 'info'
|
||||
|
||||
# Item is rejected
|
||||
REJECT = 60, _("Reject"), 'danger'
|
||||
REJECT = 60, _('Reject'), 'danger'
|
||||
|
@ -31,7 +31,7 @@ from plugin import registry
|
||||
|
||||
from .version import isInvenTreeUpToDate
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def schedule_task(taskname, **kwargs):
|
||||
@ -46,7 +46,7 @@ def schedule_task(taskname, **kwargs):
|
||||
try:
|
||||
from django_q.models import Schedule
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
logger.info("Could not start background tasks - App registry not ready")
|
||||
logger.info('Could not start background tasks - App registry not ready')
|
||||
return
|
||||
|
||||
try:
|
||||
@ -278,13 +278,13 @@ class ScheduledTask:
|
||||
interval: str
|
||||
minutes: int = None
|
||||
|
||||
MINUTES = "I"
|
||||
HOURLY = "H"
|
||||
DAILY = "D"
|
||||
WEEKLY = "W"
|
||||
MONTHLY = "M"
|
||||
QUARTERLY = "Q"
|
||||
YEARLY = "Y"
|
||||
MINUTES = 'I'
|
||||
HOURLY = 'H'
|
||||
DAILY = 'D'
|
||||
WEEKLY = 'W'
|
||||
MONTHLY = 'M'
|
||||
QUARTERLY = 'Q'
|
||||
YEARLY = 'Y'
|
||||
TYPE = [MINUTES, HOURLY, DAILY, WEEKLY, MONTHLY, QUARTERLY, YEARLY]
|
||||
|
||||
|
||||
@ -349,7 +349,7 @@ def heartbeat():
|
||||
try:
|
||||
from django_q.models import Success
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
logger.info("Could not perform heartbeat task - App registry not ready")
|
||||
logger.info('Could not perform heartbeat task - App registry not ready')
|
||||
return
|
||||
|
||||
threshold = timezone.now() - timedelta(minutes=30)
|
||||
@ -378,7 +378,7 @@ def delete_successful_tasks():
|
||||
results = Success.objects.filter(started__lte=threshold)
|
||||
|
||||
if results.count() > 0:
|
||||
logger.info("Deleting %s successful task records", results.count())
|
||||
logger.info('Deleting %s successful task records', results.count())
|
||||
results.delete()
|
||||
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
@ -402,7 +402,7 @@ def delete_failed_tasks():
|
||||
results = Failure.objects.filter(started__lte=threshold)
|
||||
|
||||
if results.count() > 0:
|
||||
logger.info("Deleting %s failed task records", results.count())
|
||||
logger.info('Deleting %s failed task records', results.count())
|
||||
results.delete()
|
||||
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
@ -423,7 +423,7 @@ def delete_old_error_logs():
|
||||
errors = Error.objects.filter(when__lte=threshold)
|
||||
|
||||
if errors.count() > 0:
|
||||
logger.info("Deleting %s old error logs", errors.count())
|
||||
logger.info('Deleting %s old error logs', errors.count())
|
||||
errors.delete()
|
||||
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
@ -449,13 +449,13 @@ def delete_old_notifications():
|
||||
items = NotificationEntry.objects.filter(updated__lte=threshold)
|
||||
|
||||
if items.count() > 0:
|
||||
logger.info("Deleted %s old notification entries", items.count())
|
||||
logger.info('Deleted %s old notification entries', items.count())
|
||||
items.delete()
|
||||
|
||||
items = NotificationMessage.objects.filter(creation__lte=threshold)
|
||||
|
||||
if items.count() > 0:
|
||||
logger.info("Deleted %s old notification messages", items.count())
|
||||
logger.info('Deleted %s old notification messages', items.count())
|
||||
items.delete()
|
||||
|
||||
except AppRegistryNotReady:
|
||||
@ -485,7 +485,7 @@ def check_for_updates():
|
||||
if not check_daily_holdoff('check_for_updates', interval):
|
||||
return
|
||||
|
||||
logger.info("Checking for InvenTree software updates")
|
||||
logger.info('Checking for InvenTree software updates')
|
||||
|
||||
headers = {}
|
||||
|
||||
@ -494,7 +494,7 @@ def check_for_updates():
|
||||
token = os.getenv('GITHUB_TOKEN', None)
|
||||
|
||||
if token:
|
||||
headers['Authorization'] = f"Bearer {token}"
|
||||
headers['Authorization'] = f'Bearer {token}'
|
||||
|
||||
response = requests.get(
|
||||
'https://api.github.com/repos/inventree/inventree/releases/latest',
|
||||
@ -513,7 +513,7 @@ def check_for_updates():
|
||||
if not tag:
|
||||
raise ValueError("'tag_name' missing from GitHub response") # pragma: no cover
|
||||
|
||||
match = re.match(r"^.*(\d+)\.(\d+)\.(\d+).*$", tag)
|
||||
match = re.match(r'^.*(\d+)\.(\d+)\.(\d+).*$', tag)
|
||||
|
||||
if len(match.groups()) != 3: # pragma: no cover
|
||||
logger.warning("Version '%s' did not match expected pattern", tag)
|
||||
@ -534,15 +534,15 @@ def check_for_updates():
|
||||
|
||||
# Send notification if there is a new version
|
||||
if not isInvenTreeUpToDate():
|
||||
logger.warning("InvenTree is not up-to-date, sending notification")
|
||||
logger.warning('InvenTree is not up-to-date, sending notification')
|
||||
|
||||
plg = registry.get_plugin('InvenTreeCoreNotificationsPlugin')
|
||||
if not plg:
|
||||
logger.warning("Cannot send notification - plugin not found")
|
||||
logger.warning('Cannot send notification - plugin not found')
|
||||
return
|
||||
plg = plg.plugin_config()
|
||||
if not plg:
|
||||
logger.warning("Cannot send notification - plugin config not found")
|
||||
logger.warning('Cannot send notification - plugin config not found')
|
||||
return
|
||||
# Send notification
|
||||
trigger_superuser_notification(
|
||||
@ -579,7 +579,7 @@ def update_exchange_rates(force: bool = False):
|
||||
)
|
||||
|
||||
if not check_daily_holdoff('update_exchange_rates', interval):
|
||||
logger.info("Skipping exchange rate update (interval not reached)")
|
||||
logger.info('Skipping exchange rate update (interval not reached)')
|
||||
return
|
||||
|
||||
backend = InvenTreeExchange()
|
||||
@ -590,7 +590,7 @@ def update_exchange_rates(force: bool = False):
|
||||
backend.update_rates(base_currency=base)
|
||||
|
||||
# Remove any exchange rates which are not in the provided currencies
|
||||
Rate.objects.filter(backend="InvenTreeExchange").exclude(
|
||||
Rate.objects.filter(backend='InvenTreeExchange').exclude(
|
||||
currency__in=currency_codes()
|
||||
).delete()
|
||||
|
||||
@ -598,9 +598,9 @@ def update_exchange_rates(force: bool = False):
|
||||
record_task_success('update_exchange_rates')
|
||||
|
||||
except (AppRegistryNotReady, OperationalError, ProgrammingError):
|
||||
logger.warning("Could not update exchange rates - database not ready")
|
||||
logger.warning('Could not update exchange rates - database not ready')
|
||||
except Exception as e: # pragma: no cover
|
||||
logger.exception("Error updating exchange rates: %s", str(type(e)))
|
||||
logger.exception('Error updating exchange rates: %s', str(type(e)))
|
||||
|
||||
|
||||
@scheduled_task(ScheduledTask.DAILY)
|
||||
@ -620,11 +620,11 @@ def run_backup():
|
||||
if not check_daily_holdoff('run_backup', interval):
|
||||
return
|
||||
|
||||
logger.info("Performing automated database backup task")
|
||||
logger.info('Performing automated database backup task')
|
||||
|
||||
call_command("dbbackup", noinput=True, clean=True, compress=True, interactive=False)
|
||||
call_command('dbbackup', noinput=True, clean=True, compress=True, interactive=False)
|
||||
call_command(
|
||||
"mediabackup", noinput=True, clean=True, compress=True, interactive=False
|
||||
'mediabackup', noinput=True, clean=True, compress=True, interactive=False
|
||||
)
|
||||
|
||||
# Record that this task was successful
|
||||
@ -653,7 +653,7 @@ def check_for_migrations():
|
||||
logger.info('There are %s pending migrations', n)
|
||||
InvenTreeSetting.set_setting('_PENDING_MIGRATIONS', n, None)
|
||||
|
||||
logger.info("Checking for pending database migrations")
|
||||
logger.info('Checking for pending database migrations')
|
||||
|
||||
# Force plugin registry reload
|
||||
registry.check_reload()
|
||||
@ -671,12 +671,12 @@ def check_for_migrations():
|
||||
|
||||
# Test if auto-updates are enabled
|
||||
if not get_setting('INVENTREE_AUTO_UPDATE', 'auto_update'):
|
||||
logger.info("Auto-update is disabled - skipping migrations")
|
||||
logger.info('Auto-update is disabled - skipping migrations')
|
||||
return
|
||||
|
||||
# Log open migrations
|
||||
for migration in plan:
|
||||
logger.info("- %s", str(migration[0]))
|
||||
logger.info('- %s', str(migration[0]))
|
||||
|
||||
# Set the application to maintenance mode - no access from now on.
|
||||
set_maintenance_mode(True)
|
||||
@ -694,13 +694,13 @@ def check_for_migrations():
|
||||
else:
|
||||
set_pending_migrations(0)
|
||||
|
||||
logger.info("Completed %s migrations", n)
|
||||
logger.info('Completed %s migrations', n)
|
||||
|
||||
# Make sure we are out of maintenance mode
|
||||
if get_maintenance_mode():
|
||||
logger.warning("Maintenance mode was not disabled - forcing it now")
|
||||
logger.warning('Maintenance mode was not disabled - forcing it now')
|
||||
set_maintenance_mode(False)
|
||||
logger.info("Manually released maintenance mode")
|
||||
logger.info('Manually released maintenance mode')
|
||||
|
||||
# We should be current now - triggering full reload to make sure all models
|
||||
# are loaded fully in their new state.
|
||||
|
@ -69,11 +69,11 @@ class APITests(InvenTreeAPITestCase):
|
||||
"""Helper function to use basic auth."""
|
||||
# Use basic authentication
|
||||
|
||||
authstring = bytes("{u}:{p}".format(u=self.username, p=self.password), "ascii")
|
||||
authstring = bytes('{u}:{p}'.format(u=self.username, p=self.password), 'ascii')
|
||||
|
||||
# Use "basic" auth by default
|
||||
auth = b64encode(authstring).decode("ascii")
|
||||
self.client.credentials(HTTP_AUTHORIZATION="Basic {auth}".format(auth=auth))
|
||||
auth = b64encode(authstring).decode('ascii')
|
||||
self.client.credentials(HTTP_AUTHORIZATION='Basic {auth}'.format(auth=auth))
|
||||
|
||||
def tokenAuth(self):
|
||||
"""Helper function to use token auth."""
|
||||
@ -274,7 +274,7 @@ class BulkDeleteTests(InvenTreeAPITestCase):
|
||||
)
|
||||
|
||||
# DELETE with invalid 'items'
|
||||
response = self.delete(url, {'items': {"hello": "world"}}, expected_code=400)
|
||||
response = self.delete(url, {'items': {'hello': 'world'}}, expected_code=400)
|
||||
|
||||
self.assertIn("'items' must be supplied as a list object", str(response.data))
|
||||
|
||||
|
@ -67,7 +67,7 @@ class URLTest(TestCase):
|
||||
"""Search for all instances of {% url %} in supplied template file."""
|
||||
urls = []
|
||||
|
||||
pattern = "{% url ['\"]([^'\"]+)['\"]([^%]*)%}"
|
||||
pattern = '{% url [\'"]([^\'"]+)[\'"]([^%]*)%}'
|
||||
|
||||
with open(input_file, 'r') as f:
|
||||
data = f.read()
|
||||
@ -91,16 +91,16 @@ class URLTest(TestCase):
|
||||
pk = None
|
||||
|
||||
# TODO: Handle reverse lookup of admin URLs!
|
||||
if url.startswith("admin:"):
|
||||
if url.startswith('admin:'):
|
||||
return
|
||||
|
||||
# TODO can this be more elegant?
|
||||
if url.startswith("account_"):
|
||||
if url.startswith('account_'):
|
||||
return
|
||||
|
||||
if pk:
|
||||
# We will assume that there is at least one item in the database
|
||||
reverse(url, kwargs={"pk": 1})
|
||||
reverse(url, kwargs={'pk': 1})
|
||||
else:
|
||||
reverse(url)
|
||||
|
||||
@ -113,14 +113,14 @@ class URLTest(TestCase):
|
||||
|
||||
def test_html_templates(self):
|
||||
"""Test all HTML templates for broken url tags."""
|
||||
template_files = self.find_files("*.html")
|
||||
template_files = self.find_files('*.html')
|
||||
|
||||
for f in template_files:
|
||||
self.check_file(f)
|
||||
|
||||
def test_js_templates(self):
|
||||
"""Test all JS templates for broken url tags."""
|
||||
template_files = self.find_files("*.js")
|
||||
template_files = self.find_files('*.js')
|
||||
|
||||
for f in template_files:
|
||||
self.check_file(f)
|
||||
|
@ -23,13 +23,13 @@ class ViewTests(InvenTreeTestCase):
|
||||
|
||||
def test_index_redirect(self):
|
||||
"""Top-level URL should redirect to "index" page."""
|
||||
response = self.client.get("/")
|
||||
response = self.client.get('/')
|
||||
|
||||
self.assertEqual(response.status_code, 302)
|
||||
|
||||
def get_index_page(self):
|
||||
"""Retrieve the index page (used for subsequent unit tests)"""
|
||||
response = self.client.get("/index/")
|
||||
response = self.client.get('/index/')
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
@ -68,8 +68,8 @@ class ViewTests(InvenTreeTestCase):
|
||||
|
||||
# Default user has staff access, so all panels will be present
|
||||
for panel in user_panels + staff_panels + plugin_panels:
|
||||
self.assertIn(f"select-{panel}", content)
|
||||
self.assertIn(f"panel-{panel}", content)
|
||||
self.assertIn(f'select-{panel}', content)
|
||||
self.assertIn(f'panel-{panel}', content)
|
||||
|
||||
# Now create a user who does not have staff access
|
||||
pleb_user = get_user_model().objects.create_user(
|
||||
@ -93,24 +93,24 @@ class ViewTests(InvenTreeTestCase):
|
||||
|
||||
# Normal user still has access to user-specific panels
|
||||
for panel in user_panels:
|
||||
self.assertIn(f"select-{panel}", content)
|
||||
self.assertIn(f"panel-{panel}", content)
|
||||
self.assertIn(f'select-{panel}', content)
|
||||
self.assertIn(f'panel-{panel}', content)
|
||||
|
||||
# Normal user does NOT have access to global or plugin settings
|
||||
for panel in staff_panels + plugin_panels:
|
||||
self.assertNotIn(f"select-{panel}", content)
|
||||
self.assertNotIn(f"panel-{panel}", content)
|
||||
self.assertNotIn(f'select-{panel}', content)
|
||||
self.assertNotIn(f'panel-{panel}', content)
|
||||
|
||||
def test_url_login(self):
|
||||
"""Test logging in via arguments"""
|
||||
# Log out
|
||||
self.client.logout()
|
||||
response = self.client.get("/index/")
|
||||
response = self.client.get('/index/')
|
||||
self.assertEqual(response.status_code, 302)
|
||||
|
||||
# Try login with url
|
||||
response = self.client.get(
|
||||
f"/accounts/login/?next=/&login={self.username}&password={self.password}"
|
||||
f'/accounts/login/?next=/&login={self.username}&password={self.password}'
|
||||
)
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertEqual(response.url, '/')
|
||||
|
@ -45,12 +45,12 @@ class ConversionTest(TestCase):
|
||||
def test_prefixes(self):
|
||||
"""Test inputs where prefixes are used"""
|
||||
tests = {
|
||||
"3": 3,
|
||||
"3m": 3,
|
||||
"3mm": 0.003,
|
||||
"3k": 3000,
|
||||
"3u": 0.000003,
|
||||
"3 inch": 0.0762,
|
||||
'3': 3,
|
||||
'3m': 3,
|
||||
'3mm': 0.003,
|
||||
'3k': 3000,
|
||||
'3u': 0.000003,
|
||||
'3 inch': 0.0762,
|
||||
}
|
||||
|
||||
for val, expected in tests.items():
|
||||
@ -60,13 +60,13 @@ class ConversionTest(TestCase):
|
||||
def test_base_units(self):
|
||||
"""Test conversion to specified base units"""
|
||||
tests = {
|
||||
"3": 3,
|
||||
"3 dozen": 36,
|
||||
"50 dozen kW": 600000,
|
||||
"1 / 10": 0.1,
|
||||
"1/2 kW": 500,
|
||||
"1/2 dozen kW": 6000,
|
||||
"0.005 MW": 5000,
|
||||
'3': 3,
|
||||
'3 dozen': 36,
|
||||
'50 dozen kW': 600000,
|
||||
'1 / 10': 0.1,
|
||||
'1/2 kW': 500,
|
||||
'1/2 dozen kW': 6000,
|
||||
'0.005 MW': 5000,
|
||||
}
|
||||
|
||||
for val, expected in tests.items():
|
||||
@ -173,24 +173,24 @@ class ValidatorTest(TestCase):
|
||||
|
||||
def test_overage(self):
|
||||
"""Test overage validator."""
|
||||
validate_overage("100%")
|
||||
validate_overage("10")
|
||||
validate_overage("45.2 %")
|
||||
validate_overage('100%')
|
||||
validate_overage('10')
|
||||
validate_overage('45.2 %')
|
||||
|
||||
with self.assertRaises(django_exceptions.ValidationError):
|
||||
validate_overage("-1")
|
||||
validate_overage('-1')
|
||||
|
||||
with self.assertRaises(django_exceptions.ValidationError):
|
||||
validate_overage("-2.04 %")
|
||||
validate_overage('-2.04 %')
|
||||
|
||||
with self.assertRaises(django_exceptions.ValidationError):
|
||||
validate_overage("105%")
|
||||
validate_overage('105%')
|
||||
|
||||
with self.assertRaises(django_exceptions.ValidationError):
|
||||
validate_overage("xxx %")
|
||||
validate_overage('xxx %')
|
||||
|
||||
with self.assertRaises(django_exceptions.ValidationError):
|
||||
validate_overage("aaaa")
|
||||
validate_overage('aaaa')
|
||||
|
||||
def test_url_validation(self):
|
||||
"""Test for AllowedURLValidator"""
|
||||
@ -230,7 +230,7 @@ class FormatTest(TestCase):
|
||||
def test_parse(self):
|
||||
"""Tests for the 'parse_format_string' function"""
|
||||
# Extract data from a valid format string
|
||||
fmt = "PO-{abc:02f}-{ref:04d}-{date}-???"
|
||||
fmt = 'PO-{abc:02f}-{ref:04d}-{date}-???'
|
||||
|
||||
info = InvenTree.format.parse_format_string(fmt)
|
||||
|
||||
@ -246,10 +246,10 @@ class FormatTest(TestCase):
|
||||
def test_create_regex(self):
|
||||
"""Test function for creating a regex from a format string"""
|
||||
tests = {
|
||||
"PO-123-{ref:04f}": r"^PO\-123\-(?P<ref>.+)$",
|
||||
"{PO}-???-{ref}-{date}-22": r"^(?P<PO>.+)\-...\-(?P<ref>.+)\-(?P<date>.+)\-22$",
|
||||
"ABC-123-###-{ref}": r"^ABC\-123\-\d\d\d\-(?P<ref>.+)$",
|
||||
"ABC-123": r"^ABC\-123$",
|
||||
'PO-123-{ref:04f}': r'^PO\-123\-(?P<ref>.+)$',
|
||||
'{PO}-???-{ref}-{date}-22': r'^(?P<PO>.+)\-...\-(?P<ref>.+)\-(?P<date>.+)\-22$',
|
||||
'ABC-123-###-{ref}': r'^ABC\-123\-\d\d\d\-(?P<ref>.+)$',
|
||||
'ABC-123': r'^ABC\-123$',
|
||||
}
|
||||
|
||||
for fmt, reg in tests.items():
|
||||
@ -259,28 +259,28 @@ class FormatTest(TestCase):
|
||||
"""Test that string validation works as expected"""
|
||||
# These tests should pass
|
||||
for value, pattern in {
|
||||
"ABC-hello-123": "???-{q}-###",
|
||||
"BO-1234": "BO-{ref}",
|
||||
"111.222.fred.china": "???.###.{name}.{place}",
|
||||
"PO-1234": "PO-{ref:04d}",
|
||||
'ABC-hello-123': '???-{q}-###',
|
||||
'BO-1234': 'BO-{ref}',
|
||||
'111.222.fred.china': '???.###.{name}.{place}',
|
||||
'PO-1234': 'PO-{ref:04d}',
|
||||
}.items():
|
||||
self.assertTrue(InvenTree.format.validate_string(value, pattern))
|
||||
|
||||
# These tests should fail
|
||||
for value, pattern in {
|
||||
"ABC-hello-123": "###-{q}-???",
|
||||
"BO-1234": "BO.{ref}",
|
||||
"BO-####": "BO-{pattern}-{next}",
|
||||
"BO-123d": "BO-{ref:04d}",
|
||||
'ABC-hello-123': '###-{q}-???',
|
||||
'BO-1234': 'BO.{ref}',
|
||||
'BO-####': 'BO-{pattern}-{next}',
|
||||
'BO-123d': 'BO-{ref:04d}',
|
||||
}.items():
|
||||
self.assertFalse(InvenTree.format.validate_string(value, pattern))
|
||||
|
||||
def test_extract_value(self):
|
||||
"""Test that we can extract named values based on a format string"""
|
||||
# Simple tests based on a straight-forward format string
|
||||
fmt = "PO-###-{ref:04d}"
|
||||
fmt = 'PO-###-{ref:04d}'
|
||||
|
||||
tests = {"123": "PO-123-123", "456": "PO-123-456", "789": "PO-123-789"}
|
||||
tests = {'123': 'PO-123-123', '456': 'PO-123-456', '789': 'PO-123-789'}
|
||||
|
||||
for k, v in tests.items():
|
||||
self.assertEqual(InvenTree.format.extract_named_group('ref', v, fmt), k)
|
||||
@ -293,8 +293,8 @@ class FormatTest(TestCase):
|
||||
InvenTree.format.extract_named_group('ref', v, fmt)
|
||||
|
||||
# More complex tests
|
||||
fmt = "PO-{date}-{test}-???-{ref}-###"
|
||||
val = "PO-2022-02-01-hello-ABC-12345-222"
|
||||
fmt = 'PO-{date}-{test}-???-{ref}-###'
|
||||
val = 'PO-2022-02-01-hello-ABC-12345-222'
|
||||
|
||||
data = {'date': '2022-02-01', 'test': 'hello', 'ref': '12345'}
|
||||
|
||||
@ -305,42 +305,42 @@ class FormatTest(TestCase):
|
||||
|
||||
# Raises a ValueError as the format string is bad
|
||||
with self.assertRaises(ValueError):
|
||||
InvenTree.format.extract_named_group("test", "PO-1234-5", "PO-{test}-{")
|
||||
InvenTree.format.extract_named_group('test', 'PO-1234-5', 'PO-{test}-{')
|
||||
|
||||
# Raises a NameError as the named group does not exist in the format string
|
||||
with self.assertRaises(NameError):
|
||||
InvenTree.format.extract_named_group("missing", "PO-12345", "PO-{test}")
|
||||
InvenTree.format.extract_named_group('missing', 'PO-12345', 'PO-{test}')
|
||||
|
||||
# Raises a ValueError as the value does not match the format string
|
||||
with self.assertRaises(ValueError):
|
||||
InvenTree.format.extract_named_group("test", "PO-1234", "PO-{test}-1234")
|
||||
InvenTree.format.extract_named_group('test', 'PO-1234', 'PO-{test}-1234')
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
InvenTree.format.extract_named_group("test", "PO-ABC-xyz", "PO-###-{test}")
|
||||
InvenTree.format.extract_named_group('test', 'PO-ABC-xyz', 'PO-###-{test}')
|
||||
|
||||
def test_currency_formatting(self):
|
||||
"""Test that currency formatting works correctly for multiple currencies"""
|
||||
|
||||
test_data = (
|
||||
(Money(3651.285718, "USD"), 4, "$3,651.2857"), # noqa: E201,E202
|
||||
(Money(487587.849178, "CAD"), 5, "CA$487,587.84918"), # noqa: E201,E202
|
||||
(Money(0.348102, "EUR"), 1, "€0.3"), # noqa: E201,E202
|
||||
(Money(0.916530, "GBP"), 1, "£0.9"), # noqa: E201,E202
|
||||
(Money(61.031024, "JPY"), 3, "¥61.031"), # noqa: E201,E202
|
||||
(Money(49609.694602, "JPY"), 1, "¥49,609.7"), # noqa: E201,E202
|
||||
(Money(155565.264777, "AUD"), 2, "A$155,565.26"), # noqa: E201,E202
|
||||
(Money(0.820437, "CNY"), 4, "CN¥0.8204"), # noqa: E201,E202
|
||||
(Money(7587.849178, "EUR"), 0, "€7,588"), # noqa: E201,E202
|
||||
(Money(0.348102, "GBP"), 3, "£0.348"), # noqa: E201,E202
|
||||
(Money(0.652923, "CHF"), 0, "CHF1"), # noqa: E201,E202
|
||||
(Money(0.820437, "CNY"), 1, "CN¥0.8"), # noqa: E201,E202
|
||||
(Money(98789.5295680, "CHF"), 0, "CHF98,790"), # noqa: E201,E202
|
||||
(Money(0.585787, "USD"), 1, "$0.6"), # noqa: E201,E202
|
||||
(Money(0.690541, "CAD"), 3, "CA$0.691"), # noqa: E201,E202
|
||||
(Money(427.814104, "AUD"), 5, "A$427.81410"), # noqa: E201,E202
|
||||
(Money(3651.285718, 'USD'), 4, '$3,651.2857'), # noqa: E201,E202
|
||||
(Money(487587.849178, 'CAD'), 5, 'CA$487,587.84918'), # noqa: E201,E202
|
||||
(Money(0.348102, 'EUR'), 1, '€0.3'), # noqa: E201,E202
|
||||
(Money(0.916530, 'GBP'), 1, '£0.9'), # noqa: E201,E202
|
||||
(Money(61.031024, 'JPY'), 3, '¥61.031'), # noqa: E201,E202
|
||||
(Money(49609.694602, 'JPY'), 1, '¥49,609.7'), # noqa: E201,E202
|
||||
(Money(155565.264777, 'AUD'), 2, 'A$155,565.26'), # noqa: E201,E202
|
||||
(Money(0.820437, 'CNY'), 4, 'CN¥0.8204'), # noqa: E201,E202
|
||||
(Money(7587.849178, 'EUR'), 0, '€7,588'), # noqa: E201,E202
|
||||
(Money(0.348102, 'GBP'), 3, '£0.348'), # noqa: E201,E202
|
||||
(Money(0.652923, 'CHF'), 0, 'CHF1'), # noqa: E201,E202
|
||||
(Money(0.820437, 'CNY'), 1, 'CN¥0.8'), # noqa: E201,E202
|
||||
(Money(98789.5295680, 'CHF'), 0, 'CHF98,790'), # noqa: E201,E202
|
||||
(Money(0.585787, 'USD'), 1, '$0.6'), # noqa: E201,E202
|
||||
(Money(0.690541, 'CAD'), 3, 'CA$0.691'), # noqa: E201,E202
|
||||
(Money(427.814104, 'AUD'), 5, 'A$427.81410'), # noqa: E201,E202
|
||||
)
|
||||
|
||||
with self.settings(LANGUAGE_CODE="en-us"):
|
||||
with self.settings(LANGUAGE_CODE='en-us'):
|
||||
for value, decimal_places, expected_result in test_data:
|
||||
result = InvenTree.format.format_money(
|
||||
value, decimal_places=decimal_places
|
||||
@ -353,22 +353,22 @@ class TestHelpers(TestCase):
|
||||
|
||||
def test_absolute_url(self):
|
||||
"""Test helper function for generating an absolute URL"""
|
||||
base = "https://demo.inventree.org:12345"
|
||||
base = 'https://demo.inventree.org:12345'
|
||||
|
||||
InvenTreeSetting.set_setting('INVENTREE_BASE_URL', base, change_user=None)
|
||||
|
||||
tests = {
|
||||
"": base,
|
||||
"api/": base + "/api/",
|
||||
"/api/": base + "/api/",
|
||||
"api": base + "/api",
|
||||
"media/label/output/": base + "/media/label/output/",
|
||||
"static/logo.png": base + "/static/logo.png",
|
||||
"https://www.google.com": "https://www.google.com",
|
||||
"https://demo.inventree.org:12345/out.html": "https://demo.inventree.org:12345/out.html",
|
||||
"https://demo.inventree.org/test.html": "https://demo.inventree.org/test.html",
|
||||
"http://www.cwi.nl:80/%7Eguido/Python.html": "http://www.cwi.nl:80/%7Eguido/Python.html",
|
||||
"test.org": base + "/test.org",
|
||||
'': base,
|
||||
'api/': base + '/api/',
|
||||
'/api/': base + '/api/',
|
||||
'api': base + '/api',
|
||||
'media/label/output/': base + '/media/label/output/',
|
||||
'static/logo.png': base + '/static/logo.png',
|
||||
'https://www.google.com': 'https://www.google.com',
|
||||
'https://demo.inventree.org:12345/out.html': 'https://demo.inventree.org:12345/out.html',
|
||||
'https://demo.inventree.org/test.html': 'https://demo.inventree.org/test.html',
|
||||
'http://www.cwi.nl:80/%7Eguido/Python.html': 'http://www.cwi.nl:80/%7Eguido/Python.html',
|
||||
'test.org': base + '/test.org',
|
||||
}
|
||||
|
||||
for url, expected in tests.items():
|
||||
@ -442,7 +442,7 @@ class TestHelpers(TestCase):
|
||||
def test_download_image(self):
|
||||
"""Test function for downloading image from remote URL"""
|
||||
# Run check with a sequence of bad URLs
|
||||
for url in ["blog", "htp://test.com/?", "google", "\\invalid-url"]:
|
||||
for url in ['blog', 'htp://test.com/?', 'google', '\\invalid-url']:
|
||||
with self.assertRaises(django_exceptions.ValidationError):
|
||||
InvenTree.helpers_model.download_image_from_url(url)
|
||||
|
||||
@ -467,7 +467,7 @@ class TestHelpers(TestCase):
|
||||
# Re-throw this error
|
||||
raise exc
|
||||
else:
|
||||
print("Unexpected error:", type(exc), exc)
|
||||
print('Unexpected error:', type(exc), exc)
|
||||
|
||||
tries += 1
|
||||
time.sleep(10 * tries)
|
||||
@ -480,7 +480,7 @@ class TestHelpers(TestCase):
|
||||
# TODO: Re-implement this test when we are happier with the external service
|
||||
# dl_helper("https://httpstat.us/200?sleep=5000", requests.exceptions.ReadTimeout, timeout=1)
|
||||
|
||||
large_img = "https://github.com/inventree/InvenTree/raw/master/InvenTree/InvenTree/static/img/paper_splash_large.jpg"
|
||||
large_img = 'https://github.com/inventree/InvenTree/raw/master/InvenTree/InvenTree/static/img/paper_splash_large.jpg'
|
||||
|
||||
InvenTreeSetting.set_setting(
|
||||
'INVENTREE_DOWNLOAD_IMAGE_MAX_SIZE', 1, change_user=None
|
||||
@ -527,14 +527,14 @@ class TestIncrement(TestCase):
|
||||
def tests(self):
|
||||
"""Test 'intelligent' incrementing function."""
|
||||
tests = [
|
||||
("", '1'),
|
||||
(1, "2"),
|
||||
("001", "002"),
|
||||
("1001", "1002"),
|
||||
("ABC123", "ABC124"),
|
||||
("XYZ0", "XYZ1"),
|
||||
("123Q", "123Q"),
|
||||
("QQQ", "QQQ"),
|
||||
('', '1'),
|
||||
(1, '2'),
|
||||
('001', '002'),
|
||||
('1001', '1002'),
|
||||
('ABC123', 'ABC124'),
|
||||
('XYZ0', 'XYZ1'),
|
||||
('123Q', '123Q'),
|
||||
('QQQ', 'QQQ'),
|
||||
]
|
||||
|
||||
for test in tests:
|
||||
@ -550,7 +550,7 @@ class TestMakeBarcode(TestCase):
|
||||
def test_barcode_extended(self):
|
||||
"""Test creation of barcode with extended data."""
|
||||
bc = helpers.MakeBarcode(
|
||||
"part", 3, {"id": 3, "url": "www.google.com"}, brief=False
|
||||
'part', 3, {'id': 3, 'url': 'www.google.com'}, brief=False
|
||||
)
|
||||
|
||||
self.assertIn('part', bc)
|
||||
@ -564,7 +564,7 @@ class TestMakeBarcode(TestCase):
|
||||
|
||||
def test_barcode_brief(self):
|
||||
"""Test creation of simple barcode."""
|
||||
bc = helpers.MakeBarcode("stockitem", 7)
|
||||
bc = helpers.MakeBarcode('stockitem', 7)
|
||||
|
||||
data = json.loads(bc)
|
||||
self.assertEqual(len(data), 1)
|
||||
@ -576,8 +576,8 @@ class TestDownloadFile(TestCase):
|
||||
|
||||
def test_download(self):
|
||||
"""Tests for DownloadFile."""
|
||||
helpers.DownloadFile("hello world", "out.txt")
|
||||
helpers.DownloadFile(bytes(b"hello world"), "out.bin")
|
||||
helpers.DownloadFile('hello world', 'out.txt')
|
||||
helpers.DownloadFile(bytes(b'hello world'), 'out.bin')
|
||||
|
||||
|
||||
class TestMPTT(TestCase):
|
||||
@ -636,62 +636,62 @@ class TestSerialNumberExtraction(TestCase):
|
||||
e = helpers.extract_serial_numbers
|
||||
|
||||
# Test a range of numbers
|
||||
sn = e("1-5", 5, 1)
|
||||
sn = e('1-5', 5, 1)
|
||||
self.assertEqual(len(sn), 5)
|
||||
for i in range(1, 6):
|
||||
self.assertIn(str(i), sn)
|
||||
|
||||
sn = e("11-30", 20, 1)
|
||||
sn = e('11-30', 20, 1)
|
||||
self.assertEqual(len(sn), 20)
|
||||
|
||||
sn = e("1, 2, 3, 4, 5", 5, 1)
|
||||
sn = e('1, 2, 3, 4, 5', 5, 1)
|
||||
self.assertEqual(len(sn), 5)
|
||||
|
||||
# Test partially specifying serials
|
||||
sn = e("1, 2, 4+", 5, 1)
|
||||
sn = e('1, 2, 4+', 5, 1)
|
||||
self.assertEqual(len(sn), 5)
|
||||
self.assertEqual(sn, ['1', '2', '4', '5', '6'])
|
||||
|
||||
# Test groups are not interpolated if enough serials are supplied
|
||||
sn = e("1, 2, 3, AF5-69H, 5", 5, 1)
|
||||
sn = e('1, 2, 3, AF5-69H, 5', 5, 1)
|
||||
self.assertEqual(len(sn), 5)
|
||||
self.assertEqual(sn, ['1', '2', '3', 'AF5-69H', '5'])
|
||||
|
||||
# Test groups are not interpolated with more than one hyphen in a word
|
||||
sn = e("1, 2, TG-4SR-92, 4+", 5, 1)
|
||||
sn = e('1, 2, TG-4SR-92, 4+', 5, 1)
|
||||
self.assertEqual(len(sn), 5)
|
||||
self.assertEqual(sn, ['1', '2', "TG-4SR-92", '4', '5'])
|
||||
self.assertEqual(sn, ['1', '2', 'TG-4SR-92', '4', '5'])
|
||||
|
||||
# Test multiple placeholders
|
||||
sn = e("1 2 ~ ~ ~", 5, 2)
|
||||
sn = e('1 2 ~ ~ ~', 5, 2)
|
||||
self.assertEqual(len(sn), 5)
|
||||
self.assertEqual(sn, ['1', '2', '3', '4', '5'])
|
||||
|
||||
sn = e("1-5, 10-15", 11, 1)
|
||||
sn = e('1-5, 10-15', 11, 1)
|
||||
self.assertIn('3', sn)
|
||||
self.assertIn('13', sn)
|
||||
|
||||
sn = e("1+", 10, 1)
|
||||
sn = e('1+', 10, 1)
|
||||
self.assertEqual(len(sn), 10)
|
||||
self.assertEqual(sn, [str(_) for _ in range(1, 11)])
|
||||
|
||||
sn = e("4, 1+2", 4, 1)
|
||||
sn = e('4, 1+2', 4, 1)
|
||||
self.assertEqual(len(sn), 4)
|
||||
self.assertEqual(sn, ['4', '1', '2', '3'])
|
||||
|
||||
sn = e("~", 1, 1)
|
||||
sn = e('~', 1, 1)
|
||||
self.assertEqual(len(sn), 1)
|
||||
self.assertEqual(sn, ['2'])
|
||||
|
||||
sn = e("~", 1, 3)
|
||||
sn = e('~', 1, 3)
|
||||
self.assertEqual(len(sn), 1)
|
||||
self.assertEqual(sn, ['4'])
|
||||
|
||||
sn = e("~+", 2, 4)
|
||||
sn = e('~+', 2, 4)
|
||||
self.assertEqual(len(sn), 2)
|
||||
self.assertEqual(sn, ['5', '6'])
|
||||
|
||||
sn = e("~+3", 4, 4)
|
||||
sn = e('~+3', 4, 4)
|
||||
self.assertEqual(len(sn), 4)
|
||||
self.assertEqual(sn, ['5', '6', '7', '8'])
|
||||
|
||||
@ -701,70 +701,70 @@ class TestSerialNumberExtraction(TestCase):
|
||||
|
||||
# Test duplicates
|
||||
with self.assertRaises(ValidationError):
|
||||
e("1,2,3,3,3", 5, 1)
|
||||
e('1,2,3,3,3', 5, 1)
|
||||
|
||||
# Test invalid length
|
||||
with self.assertRaises(ValidationError):
|
||||
e("1,2,3", 5, 1)
|
||||
e('1,2,3', 5, 1)
|
||||
|
||||
# Test empty string
|
||||
with self.assertRaises(ValidationError):
|
||||
e(", , ,", 0, 1)
|
||||
e(', , ,', 0, 1)
|
||||
|
||||
# Test incorrect sign in group
|
||||
with self.assertRaises(ValidationError):
|
||||
e("10-2", 8, 1)
|
||||
e('10-2', 8, 1)
|
||||
|
||||
# Test invalid group
|
||||
with self.assertRaises(ValidationError):
|
||||
e("1-5-10", 10, 1)
|
||||
e('1-5-10', 10, 1)
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
e("10, a, 7-70j", 4, 1)
|
||||
e('10, a, 7-70j', 4, 1)
|
||||
|
||||
# Test groups are not interpolated with word characters
|
||||
with self.assertRaises(ValidationError):
|
||||
e("1, 2, 3, E-5", 5, 1)
|
||||
e('1, 2, 3, E-5', 5, 1)
|
||||
|
||||
# Extract a range of values with a smaller range
|
||||
with self.assertRaises(ValidationError) as exc:
|
||||
e("11-50", 10, 1)
|
||||
e('11-50', 10, 1)
|
||||
self.assertIn('Range quantity exceeds 10', str(exc))
|
||||
|
||||
# Test groups are not interpolated with alpha characters
|
||||
with self.assertRaises(ValidationError) as exc:
|
||||
e("1, A-2, 3+", 5, 1)
|
||||
e('1, A-2, 3+', 5, 1)
|
||||
self.assertIn('Invalid group range: A-2', str(exc))
|
||||
|
||||
def test_combinations(self):
|
||||
"""Test complex serial number combinations."""
|
||||
e = helpers.extract_serial_numbers
|
||||
|
||||
sn = e("1 3-5 9+2", 7, 1)
|
||||
sn = e('1 3-5 9+2', 7, 1)
|
||||
self.assertEqual(len(sn), 7)
|
||||
self.assertEqual(sn, ['1', '3', '4', '5', '9', '10', '11'])
|
||||
|
||||
sn = e("1,3-5,9+2", 7, 1)
|
||||
sn = e('1,3-5,9+2', 7, 1)
|
||||
self.assertEqual(len(sn), 7)
|
||||
self.assertEqual(sn, ['1', '3', '4', '5', '9', '10', '11'])
|
||||
|
||||
sn = e("~+2", 3, 13)
|
||||
sn = e('~+2', 3, 13)
|
||||
self.assertEqual(len(sn), 3)
|
||||
self.assertEqual(sn, ['14', '15', '16'])
|
||||
|
||||
sn = e("~+", 2, 13)
|
||||
sn = e('~+', 2, 13)
|
||||
self.assertEqual(len(sn), 2)
|
||||
self.assertEqual(sn, ['14', '15'])
|
||||
|
||||
# Test multiple increment groups
|
||||
sn = e("~+4, 20+4, 30+4", 15, 10)
|
||||
sn = e('~+4, 20+4, 30+4', 15, 10)
|
||||
self.assertEqual(len(sn), 15)
|
||||
|
||||
for v in [14, 24, 34]:
|
||||
self.assertIn(str(v), sn)
|
||||
|
||||
# Test multiple range groups
|
||||
sn = e("11-20, 41-50, 91-100", 30, 1)
|
||||
sn = e('11-20, 41-50, 91-100', 30, 1)
|
||||
self.assertEqual(len(sn), 30)
|
||||
|
||||
for v in range(11, 21):
|
||||
@ -859,7 +859,7 @@ class CurrencyTests(TestCase):
|
||||
break
|
||||
|
||||
else: # pragma: no cover
|
||||
print("Exchange rate update failed - retrying")
|
||||
print('Exchange rate update failed - retrying')
|
||||
print(f'Expected {currency_codes()}, got {[a.currency for a in rates]}')
|
||||
time.sleep(1)
|
||||
|
||||
@ -1030,7 +1030,7 @@ class TestSettings(InvenTreeTestCase):
|
||||
# test typecasting to dict - valid JSON string should be mapped to corresponding dict
|
||||
with self.in_env_context({TEST_ENV_NAME: '{"a": 1}'}):
|
||||
self.assertEqual(
|
||||
config.get_setting(TEST_ENV_NAME, None, typecast=dict), {"a": 1}
|
||||
config.get_setting(TEST_ENV_NAME, None, typecast=dict), {'a': 1}
|
||||
)
|
||||
|
||||
# test typecasting to dict - invalid JSON string should be mapped to empty dict
|
||||
@ -1047,8 +1047,8 @@ class TestInstanceName(InvenTreeTestCase):
|
||||
self.assertEqual(version.inventreeInstanceTitle(), 'InvenTree')
|
||||
|
||||
# set up required setting
|
||||
InvenTreeSetting.set_setting("INVENTREE_INSTANCE_TITLE", True, self.user)
|
||||
InvenTreeSetting.set_setting("INVENTREE_INSTANCE", "Testing title", self.user)
|
||||
InvenTreeSetting.set_setting('INVENTREE_INSTANCE_TITLE', True, self.user)
|
||||
InvenTreeSetting.set_setting('INVENTREE_INSTANCE', 'Testing title', self.user)
|
||||
|
||||
self.assertEqual(version.inventreeInstanceTitle(), 'Testing title')
|
||||
|
||||
@ -1060,7 +1060,7 @@ class TestInstanceName(InvenTreeTestCase):
|
||||
"""Test instance url settings."""
|
||||
# Set up required setting
|
||||
InvenTreeSetting.set_setting(
|
||||
"INVENTREE_BASE_URL", "http://127.1.2.3", self.user
|
||||
'INVENTREE_BASE_URL', 'http://127.1.2.3', self.user
|
||||
)
|
||||
|
||||
# The site should also be changed
|
||||
@ -1107,7 +1107,7 @@ class TestOffloadTask(InvenTreeTestCase):
|
||||
offload_task('dummy_task.numbers', 1, 1, 1, force_sync=True)
|
||||
)
|
||||
|
||||
self.assertIn("Malformed function path", str(log.output))
|
||||
self.assertIn('Malformed function path', str(log.output))
|
||||
|
||||
# Offload dummy task with a Part instance
|
||||
# This should succeed, ensuring that the Part instance is correctly pickled
|
||||
|
@ -39,7 +39,7 @@ def getMigrationFileNames(app):
|
||||
files = local_dir.joinpath('..', app, 'migrations').iterdir()
|
||||
|
||||
# Regex pattern for migration files
|
||||
regex = re.compile(r"^[\d]+_.*\.py$")
|
||||
regex = re.compile(r'^[\d]+_.*\.py$')
|
||||
|
||||
migration_files = []
|
||||
|
||||
@ -241,14 +241,14 @@ class InvenTreeAPITestCase(ExchangeRateMixin, UserMixin, APITestCase):
|
||||
yield # your test will be run here
|
||||
|
||||
if verbose:
|
||||
msg = "\r\n%s" % json.dumps(context.captured_queries, indent=4)
|
||||
msg = '\r\n%s' % json.dumps(context.captured_queries, indent=4)
|
||||
else:
|
||||
msg = None
|
||||
|
||||
n = len(context.captured_queries)
|
||||
|
||||
if debug:
|
||||
print(f"Expected less than {value} queries, got {n} queries")
|
||||
print(f'Expected less than {value} queries, got {n} queries')
|
||||
|
||||
self.assertLess(n, value, msg=msg)
|
||||
|
||||
@ -357,7 +357,7 @@ class InvenTreeAPITestCase(ExchangeRateMixin, UserMixin, APITestCase):
|
||||
# Check that the response is of the correct type
|
||||
if not isinstance(response, StreamingHttpResponse):
|
||||
raise ValueError(
|
||||
"Response is not a StreamingHttpResponse object as expected"
|
||||
'Response is not a StreamingHttpResponse object as expected'
|
||||
)
|
||||
|
||||
# Extract filename
|
||||
|
@ -67,7 +67,7 @@ from .views import (
|
||||
auth_request,
|
||||
)
|
||||
|
||||
admin.site.site_header = "InvenTree Admin"
|
||||
admin.site.site_header = 'InvenTree Admin'
|
||||
|
||||
|
||||
apipatterns = [
|
||||
@ -96,7 +96,7 @@ apipatterns = [
|
||||
),
|
||||
# InvenTree information endpoints
|
||||
path(
|
||||
"version-text", VersionTextView.as_view(), name="api-version-text"
|
||||
'version-text', VersionTextView.as_view(), name='api-version-text'
|
||||
), # version text
|
||||
path('version/', VersionView.as_view(), name='api-version'), # version info
|
||||
path('', InfoView.as_view(), name='api-inventree-info'), # server info
|
||||
@ -153,11 +153,11 @@ apipatterns = [
|
||||
),
|
||||
# Magic login URLs
|
||||
path(
|
||||
"email/generate/",
|
||||
'email/generate/',
|
||||
csrf_exempt(GetSimpleLoginView().as_view()),
|
||||
name="sesame-generate",
|
||||
name='sesame-generate',
|
||||
),
|
||||
path("email/login/", LoginView.as_view(), name="sesame-login"),
|
||||
path('email/login/', LoginView.as_view(), name='sesame-login'),
|
||||
# Unknown endpoint
|
||||
re_path(r'^.*$', NotFoundView.as_view(), name='api-404'),
|
||||
]
|
||||
@ -403,12 +403,12 @@ classic_frontendpatterns = [
|
||||
name='socialaccount_connections',
|
||||
),
|
||||
re_path(
|
||||
r"^accounts/password/reset/key/(?P<uidb36>[0-9A-Za-z]+)-(?P<key>.+)/$",
|
||||
r'^accounts/password/reset/key/(?P<uidb36>[0-9A-Za-z]+)-(?P<key>.+)/$',
|
||||
CustomPasswordResetFromKeyView.as_view(),
|
||||
name="account_reset_password_from_key",
|
||||
name='account_reset_password_from_key',
|
||||
),
|
||||
# Override login page
|
||||
re_path("accounts/login/", CustomLoginView.as_view(), name="account_login"),
|
||||
re_path('accounts/login/', CustomLoginView.as_view(), name='account_login'),
|
||||
re_path(r'^accounts/', include('allauth_2fa.urls')), # MFA support
|
||||
re_path(r'^accounts/', include('allauth.urls')), # included urlpatterns
|
||||
]
|
||||
|
@ -119,7 +119,7 @@ def validate_overage(value):
|
||||
i = Decimal(value)
|
||||
|
||||
if i < 0:
|
||||
raise ValidationError(_("Overage value must not be negative"))
|
||||
raise ValidationError(_('Overage value must not be negative'))
|
||||
|
||||
# Looks like a number
|
||||
return True
|
||||
@ -135,15 +135,15 @@ def validate_overage(value):
|
||||
f = float(v)
|
||||
|
||||
if f < 0:
|
||||
raise ValidationError(_("Overage value must not be negative"))
|
||||
raise ValidationError(_('Overage value must not be negative'))
|
||||
elif f > 100:
|
||||
raise ValidationError(_("Overage must not exceed 100%"))
|
||||
raise ValidationError(_('Overage must not exceed 100%'))
|
||||
|
||||
return True
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
raise ValidationError(_("Invalid value for overage"))
|
||||
raise ValidationError(_('Invalid value for overage'))
|
||||
|
||||
|
||||
def validate_part_name_format(value):
|
||||
|
@ -19,7 +19,7 @@ from dulwich.repo import NotGitRepository, Repo
|
||||
from .api_version import INVENTREE_API_TEXT, INVENTREE_API_VERSION
|
||||
|
||||
# InvenTree software version
|
||||
INVENTREE_SW_VERSION = "0.14.0 dev"
|
||||
INVENTREE_SW_VERSION = '0.14.0 dev'
|
||||
|
||||
# Discover git
|
||||
try:
|
||||
@ -32,8 +32,8 @@ except (NotGitRepository, FileNotFoundError):
|
||||
def checkMinPythonVersion():
|
||||
"""Check that the Python version is at least 3.9"""
|
||||
|
||||
version = sys.version.split(" ")[0]
|
||||
docs = "https://docs.inventree.org/en/stable/start/intro/#python-requirements"
|
||||
version = sys.version.split(' ')[0]
|
||||
docs = 'https://docs.inventree.org/en/stable/start/intro/#python-requirements'
|
||||
|
||||
msg = f"""
|
||||
InvenTree requires Python 3.9 or above - you are running version {version}.
|
||||
@ -47,22 +47,22 @@ def checkMinPythonVersion():
|
||||
if sys.version_info.major == 3 and sys.version_info.minor < 9:
|
||||
raise RuntimeError(msg)
|
||||
|
||||
print(f"Python version {version} - {sys.executable}")
|
||||
print(f'Python version {version} - {sys.executable}')
|
||||
|
||||
|
||||
def inventreeInstanceName():
|
||||
"""Returns the InstanceName settings for the current database."""
|
||||
import common.models
|
||||
|
||||
return common.models.InvenTreeSetting.get_setting("INVENTREE_INSTANCE", "")
|
||||
return common.models.InvenTreeSetting.get_setting('INVENTREE_INSTANCE', '')
|
||||
|
||||
|
||||
def inventreeInstanceTitle():
|
||||
"""Returns the InstanceTitle for the current database."""
|
||||
import common.models
|
||||
|
||||
if common.models.InvenTreeSetting.get_setting("INVENTREE_INSTANCE_TITLE", False):
|
||||
return common.models.InvenTreeSetting.get_setting("INVENTREE_INSTANCE", "")
|
||||
if common.models.InvenTreeSetting.get_setting('INVENTREE_INSTANCE_TITLE', False):
|
||||
return common.models.InvenTreeSetting.get_setting('INVENTREE_INSTANCE', '')
|
||||
return 'InvenTree'
|
||||
|
||||
|
||||
@ -76,7 +76,7 @@ def inventreeVersionTuple(version=None):
|
||||
if version is None:
|
||||
version = INVENTREE_SW_VERSION
|
||||
|
||||
match = re.match(r"^.*(\d+)\.(\d+)\.(\d+).*$", str(version))
|
||||
match = re.match(r'^.*(\d+)\.(\d+)\.(\d+).*$', str(version))
|
||||
|
||||
return [int(g) for g in match.groups()]
|
||||
|
||||
@ -93,14 +93,14 @@ def inventreeDocsVersion():
|
||||
Release -> "major.minor.sub" e.g. "0.5.2"
|
||||
"""
|
||||
if isInvenTreeDevelopmentVersion():
|
||||
return "latest"
|
||||
return 'latest'
|
||||
return INVENTREE_SW_VERSION # pragma: no cover
|
||||
|
||||
|
||||
def inventreeDocUrl():
|
||||
"""Return URL for InvenTree documentation site."""
|
||||
tag = inventreeDocsVersion()
|
||||
return f"https://docs.inventree.org/en/{tag}"
|
||||
return f'https://docs.inventree.org/en/{tag}'
|
||||
|
||||
|
||||
def inventreeAppUrl():
|
||||
@ -110,12 +110,12 @@ def inventreeAppUrl():
|
||||
|
||||
def inventreeCreditsUrl():
|
||||
"""Return URL for InvenTree credits site."""
|
||||
return "https://docs.inventree.org/en/latest/credits/"
|
||||
return 'https://docs.inventree.org/en/latest/credits/'
|
||||
|
||||
|
||||
def inventreeGithubUrl():
|
||||
"""Return URL for InvenTree github site."""
|
||||
return "https://github.com/InvenTree/InvenTree/"
|
||||
return 'https://github.com/InvenTree/InvenTree/'
|
||||
|
||||
|
||||
def isInvenTreeUpToDate():
|
||||
@ -147,26 +147,26 @@ def inventreeApiVersion():
|
||||
|
||||
def parse_version_text():
|
||||
"""Parse the version text to structured data."""
|
||||
patched_data = INVENTREE_API_TEXT.split("\n\n")
|
||||
patched_data = INVENTREE_API_TEXT.split('\n\n')
|
||||
# Remove first newline on latest version
|
||||
patched_data[0] = patched_data[0].replace("\n", "", 1)
|
||||
patched_data[0] = patched_data[0].replace('\n', '', 1)
|
||||
|
||||
version_data = {}
|
||||
for version in patched_data:
|
||||
data = version.split("\n")
|
||||
data = version.split('\n')
|
||||
|
||||
version_split = data[0].split(' -> ')
|
||||
version_detail = (
|
||||
version_split[1].split(':', 1) if len(version_split) > 1 else ['']
|
||||
)
|
||||
new_data = {
|
||||
"version": version_split[0].strip(),
|
||||
"date": version_detail[0].strip(),
|
||||
"gh": version_detail[1].strip() if len(version_detail) > 1 else None,
|
||||
"text": data[1:],
|
||||
"latest": False,
|
||||
'version': version_split[0].strip(),
|
||||
'date': version_detail[0].strip(),
|
||||
'gh': version_detail[1].strip() if len(version_detail) > 1 else None,
|
||||
'text': data[1:],
|
||||
'latest': False,
|
||||
}
|
||||
version_data[new_data["version"]] = new_data
|
||||
version_data[new_data['version']] = new_data
|
||||
return version_data
|
||||
|
||||
|
||||
@ -188,7 +188,7 @@ def inventreeApiText(versions: int = 10, start_version: int = 0):
|
||||
start_version = INVENTREE_API_VERSION - versions
|
||||
|
||||
return {
|
||||
f"v{a}": version_data.get(f"v{a}", None)
|
||||
f'v{a}': version_data.get(f'v{a}', None)
|
||||
for a in range(start_version, start_version + versions)
|
||||
}
|
||||
|
||||
|
@ -135,13 +135,13 @@ class InvenTreeRoleMixin(PermissionRequiredMixin):
|
||||
app_label = model._meta.app_label
|
||||
model_name = model._meta.model_name
|
||||
|
||||
table = f"{app_label}_{model_name}"
|
||||
table = f'{app_label}_{model_name}'
|
||||
|
||||
permission = self.get_permission_class()
|
||||
|
||||
if not permission:
|
||||
raise AttributeError(
|
||||
f"permission_class not defined for {type(self).__name__}"
|
||||
f'permission_class not defined for {type(self).__name__}'
|
||||
)
|
||||
|
||||
# Check if the user has the required permission
|
||||
@ -396,8 +396,8 @@ class AjaxUpdateView(AjaxMixin, UpdateView):
|
||||
class EditUserView(AjaxUpdateView):
|
||||
"""View for editing user information."""
|
||||
|
||||
ajax_template_name = "modal_form.html"
|
||||
ajax_form_title = _("Edit User Information")
|
||||
ajax_template_name = 'modal_form.html'
|
||||
ajax_form_title = _('Edit User Information')
|
||||
form_class = EditUserForm
|
||||
|
||||
def get_object(self):
|
||||
@ -408,8 +408,8 @@ class EditUserView(AjaxUpdateView):
|
||||
class SetPasswordView(AjaxUpdateView):
|
||||
"""View for setting user password."""
|
||||
|
||||
ajax_template_name = "InvenTree/password.html"
|
||||
ajax_form_title = _("Set Password")
|
||||
ajax_template_name = 'InvenTree/password.html'
|
||||
ajax_form_title = _('Set Password')
|
||||
form_class = SetPasswordForm
|
||||
|
||||
def get_object(self):
|
||||
@ -491,14 +491,14 @@ class SearchView(TemplateView):
|
||||
class DynamicJsView(TemplateView):
|
||||
"""View for returning javacsript files, which instead of being served dynamically, are passed through the django translation engine!"""
|
||||
|
||||
template_name = ""
|
||||
template_name = ''
|
||||
content_type = 'text/javascript'
|
||||
|
||||
|
||||
class SettingsView(TemplateView):
|
||||
"""View for configuring User settings."""
|
||||
|
||||
template_name = "InvenTree/settings/settings.html"
|
||||
template_name = 'InvenTree/settings/settings.html'
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
"""Add data for template."""
|
||||
@ -506,12 +506,12 @@ class SettingsView(TemplateView):
|
||||
|
||||
ctx['settings'] = common_models.InvenTreeSetting.objects.all().order_by('key')
|
||||
|
||||
ctx["base_currency"] = common_settings.currency_code_default()
|
||||
ctx["currencies"] = common_settings.currency_codes
|
||||
ctx['base_currency'] = common_settings.currency_code_default()
|
||||
ctx['currencies'] = common_settings.currency_codes
|
||||
|
||||
ctx["rates"] = Rate.objects.filter(backend="InvenTreeExchange")
|
||||
ctx['rates'] = Rate.objects.filter(backend='InvenTreeExchange')
|
||||
|
||||
ctx["categories"] = PartCategory.objects.all().order_by(
|
||||
ctx['categories'] = PartCategory.objects.all().order_by(
|
||||
'tree_id', 'lft', 'name'
|
||||
)
|
||||
|
||||
@ -520,16 +520,16 @@ class SettingsView(TemplateView):
|
||||
backend = ExchangeBackend.objects.filter(name='InvenTreeExchange')
|
||||
if backend.exists():
|
||||
backend = backend.first()
|
||||
ctx["rates_updated"] = backend.last_update
|
||||
ctx['rates_updated'] = backend.last_update
|
||||
except Exception:
|
||||
ctx["rates_updated"] = None
|
||||
ctx['rates_updated'] = None
|
||||
|
||||
# Forms and context for allauth
|
||||
ctx['add_email_form'] = AddEmailForm
|
||||
ctx["can_add_email"] = EmailAddress.objects.can_add_email(self.request.user)
|
||||
ctx['can_add_email'] = EmailAddress.objects.can_add_email(self.request.user)
|
||||
|
||||
# Form and context for allauth social-accounts
|
||||
ctx["request"] = self.request
|
||||
ctx['request'] = self.request
|
||||
ctx['social_form'] = DisconnectForm(request=self.request)
|
||||
|
||||
# user db sessions
|
||||
@ -552,19 +552,19 @@ class AllauthOverrides(LoginRequiredMixin):
|
||||
class CustomEmailView(AllauthOverrides, EmailView):
|
||||
"""Override of allauths EmailView to always show the settings but leave the functions allow."""
|
||||
|
||||
success_url = reverse_lazy("settings")
|
||||
success_url = reverse_lazy('settings')
|
||||
|
||||
|
||||
class CustomConnectionsView(AllauthOverrides, ConnectionsView):
|
||||
"""Override of allauths ConnectionsView to always show the settings but leave the functions allow."""
|
||||
|
||||
success_url = reverse_lazy("settings")
|
||||
success_url = reverse_lazy('settings')
|
||||
|
||||
|
||||
class CustomPasswordResetFromKeyView(PasswordResetFromKeyView):
|
||||
"""Override of allauths PasswordResetFromKeyView to always show the settings but leave the functions allow."""
|
||||
|
||||
success_url = reverse_lazy("account_login")
|
||||
success_url = reverse_lazy('account_login')
|
||||
|
||||
|
||||
class UserSessionOverride:
|
||||
@ -646,18 +646,18 @@ class AppearanceSelectView(RedirectView):
|
||||
class DatabaseStatsView(AjaxView):
|
||||
"""View for displaying database statistics."""
|
||||
|
||||
ajax_template_name = "stats.html"
|
||||
ajax_form_title = _("System Information")
|
||||
ajax_template_name = 'stats.html'
|
||||
ajax_form_title = _('System Information')
|
||||
|
||||
|
||||
class AboutView(AjaxView):
|
||||
"""A view for displaying InvenTree version information"""
|
||||
|
||||
ajax_template_name = "about.html"
|
||||
ajax_form_title = _("About InvenTree")
|
||||
ajax_template_name = 'about.html'
|
||||
ajax_form_title = _('About InvenTree')
|
||||
|
||||
|
||||
class NotificationsView(TemplateView):
|
||||
"""View for showing notifications."""
|
||||
|
||||
template_name = "InvenTree/notifications/notifications.html"
|
||||
template_name = 'InvenTree/notifications/notifications.html'
|
||||
|
@ -11,7 +11,7 @@ import os # pragma: no cover
|
||||
from django.core.wsgi import get_wsgi_application # pragma: no cover
|
||||
|
||||
os.environ.setdefault(
|
||||
"DJANGO_SETTINGS_MODULE", "InvenTree.settings"
|
||||
'DJANGO_SETTINGS_MODULE', 'InvenTree.settings'
|
||||
) # pragma: no cover
|
||||
|
||||
application = get_wsgi_application() # pragma: no cover
|
||||
|
@ -181,7 +181,7 @@ class SettingsList(ListAPI):
|
||||
class GlobalSettingsList(SettingsList):
|
||||
"""API endpoint for accessing a list of global settings objects."""
|
||||
|
||||
queryset = common.models.InvenTreeSetting.objects.exclude(key__startswith="_")
|
||||
queryset = common.models.InvenTreeSetting.objects.exclude(key__startswith='_')
|
||||
serializer_class = common.serializers.GlobalSettingsSerializer
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
@ -214,7 +214,7 @@ class GlobalSettingsDetail(RetrieveUpdateAPI):
|
||||
"""
|
||||
|
||||
lookup_field = 'key'
|
||||
queryset = common.models.InvenTreeSetting.objects.exclude(key__startswith="_")
|
||||
queryset = common.models.InvenTreeSetting.objects.exclude(key__startswith='_')
|
||||
serializer_class = common.serializers.GlobalSettingsSerializer
|
||||
|
||||
def get_object(self):
|
||||
|
@ -29,7 +29,7 @@ class CommonConfig(AppConfig):
|
||||
if common.models.InvenTreeSetting.get_setting(
|
||||
'SERVER_RESTART_REQUIRED', backup_value=False, create=False, cache=False
|
||||
):
|
||||
logger.info("Clearing SERVER_RESTART_REQUIRED flag")
|
||||
logger.info('Clearing SERVER_RESTART_REQUIRED flag')
|
||||
|
||||
if not isImportingData():
|
||||
common.models.InvenTreeSetting.set_setting(
|
||||
|
@ -220,7 +220,7 @@ class BaseInvenTreeSetting(models.Model):
|
||||
|
||||
If a particular setting is not present, create it with the default value
|
||||
"""
|
||||
cache_key = f"BUILD_DEFAULT_VALUES:{str(cls.__name__)}"
|
||||
cache_key = f'BUILD_DEFAULT_VALUES:{str(cls.__name__)}'
|
||||
|
||||
if InvenTree.helpers.str2bool(cache.get(cache_key, False)):
|
||||
# Already built default values
|
||||
@ -234,7 +234,7 @@ class BaseInvenTreeSetting(models.Model):
|
||||
|
||||
if len(missing_keys) > 0:
|
||||
logger.info(
|
||||
"Building %s default values for %s", len(missing_keys), str(cls)
|
||||
'Building %s default values for %s', len(missing_keys), str(cls)
|
||||
)
|
||||
cls.objects.bulk_create([
|
||||
cls(key=key, value=cls.get_setting_default(key), **kwargs)
|
||||
@ -243,7 +243,7 @@ class BaseInvenTreeSetting(models.Model):
|
||||
])
|
||||
except Exception as exc:
|
||||
logger.exception(
|
||||
"Failed to build default values for %s (%s)", str(cls), str(type(exc))
|
||||
'Failed to build default values for %s (%s)', str(cls), str(type(exc))
|
||||
)
|
||||
pass
|
||||
|
||||
@ -299,12 +299,12 @@ class BaseInvenTreeSetting(models.Model):
|
||||
- The unique KEY string
|
||||
- Any key:value kwargs associated with the particular setting type (e.g. user-id)
|
||||
"""
|
||||
key = f"{str(cls.__name__)}:{setting_key}"
|
||||
key = f'{str(cls.__name__)}:{setting_key}'
|
||||
|
||||
for k, v in kwargs.items():
|
||||
key += f"_{k}:{v}"
|
||||
key += f'_{k}:{v}'
|
||||
|
||||
return key.replace(" ", "")
|
||||
return key.replace(' ', '')
|
||||
|
||||
@classmethod
|
||||
def get_filters(cls, **kwargs):
|
||||
@ -366,14 +366,14 @@ class BaseInvenTreeSetting(models.Model):
|
||||
)
|
||||
|
||||
# remove any hidden settings
|
||||
if exclude_hidden and setting.get("hidden", False):
|
||||
if exclude_hidden and setting.get('hidden', False):
|
||||
del settings[key.upper()]
|
||||
|
||||
# format settings values and remove protected
|
||||
for key, setting in settings.items():
|
||||
validator = cls.get_setting_validator(key, **filters)
|
||||
|
||||
if cls.is_protected(key, **filters) and setting.value != "":
|
||||
if cls.is_protected(key, **filters) and setting.value != '':
|
||||
setting.value = '***'
|
||||
elif cls.validator_is_bool(validator):
|
||||
setting.value = InvenTree.helpers.str2bool(setting.value)
|
||||
@ -438,7 +438,7 @@ class BaseInvenTreeSetting(models.Model):
|
||||
if setting.required:
|
||||
value = setting.value or cls.get_setting_default(setting.key, **kwargs)
|
||||
|
||||
if value == "":
|
||||
if value == '':
|
||||
missing_settings.append(setting.key.upper())
|
||||
|
||||
return len(missing_settings) == 0, missing_settings
|
||||
@ -766,7 +766,7 @@ class BaseInvenTreeSetting(models.Model):
|
||||
options = self.valid_options()
|
||||
|
||||
if options and self.value not in options:
|
||||
raise ValidationError(_("Chosen value is not a valid option"))
|
||||
raise ValidationError(_('Chosen value is not a valid option'))
|
||||
|
||||
def run_validator(self, validator):
|
||||
"""Run a validator against the 'value' field for this InvenTreeSetting object."""
|
||||
@ -1049,7 +1049,7 @@ class BaseInvenTreeSetting(models.Model):
|
||||
"""Check if this setting value is required."""
|
||||
setting = cls.get_setting_definition(key, **cls.get_filters(**kwargs))
|
||||
|
||||
return setting.get("required", False)
|
||||
return setting.get('required', False)
|
||||
|
||||
@property
|
||||
def required(self):
|
||||
@ -1131,8 +1131,8 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
||||
class Meta:
|
||||
"""Meta options for InvenTreeSetting."""
|
||||
|
||||
verbose_name = "InvenTree Setting"
|
||||
verbose_name_plural = "InvenTree Settings"
|
||||
verbose_name = 'InvenTree Setting'
|
||||
verbose_name_plural = 'InvenTree Settings'
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""When saving a global setting, check to see if it requires a server restart.
|
||||
@ -1454,7 +1454,7 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
||||
'name': _('Part Name Display Format'),
|
||||
'description': _('Format to display the part name'),
|
||||
'default': "{{ part.IPN if part.IPN }}{{ ' | ' if part.IPN }}{{ part.name }}{{ ' | ' if part.revision }}"
|
||||
"{{ part.revision if part.revision }}",
|
||||
'{{ part.revision if part.revision }}',
|
||||
'validator': InvenTree.validators.validate_part_name_format,
|
||||
},
|
||||
'PART_CATEGORY_DEFAULT_ICON': {
|
||||
@ -1860,7 +1860,7 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
||||
'validator': bool,
|
||||
'after_save': reload_plugin_registry,
|
||||
},
|
||||
"PROJECT_CODES_ENABLED": {
|
||||
'PROJECT_CODES_ENABLED': {
|
||||
'name': _('Enable project codes'),
|
||||
'description': _('Enable project codes for tracking projects'),
|
||||
'default': False,
|
||||
@ -1946,8 +1946,8 @@ class InvenTreeUserSetting(BaseInvenTreeSetting):
|
||||
class Meta:
|
||||
"""Meta options for InvenTreeUserSetting."""
|
||||
|
||||
verbose_name = "InvenTree User Setting"
|
||||
verbose_name_plural = "InvenTree User Settings"
|
||||
verbose_name = 'InvenTree User Setting'
|
||||
verbose_name_plural = 'InvenTree User Settings'
|
||||
constraints = [
|
||||
models.UniqueConstraint(fields=['key', 'user'], name='unique key and user')
|
||||
]
|
||||
@ -2069,7 +2069,7 @@ class InvenTreeUserSetting(BaseInvenTreeSetting):
|
||||
'default': False,
|
||||
'validator': bool,
|
||||
},
|
||||
"LABEL_INLINE": {
|
||||
'LABEL_INLINE': {
|
||||
'name': _('Inline label display'),
|
||||
'description': _(
|
||||
'Display PDF labels in the browser, instead of downloading as a file'
|
||||
@ -2077,7 +2077,7 @@ class InvenTreeUserSetting(BaseInvenTreeSetting):
|
||||
'default': True,
|
||||
'validator': bool,
|
||||
},
|
||||
"LABEL_DEFAULT_PRINTER": {
|
||||
'LABEL_DEFAULT_PRINTER': {
|
||||
'name': _('Default label printer'),
|
||||
'description': _(
|
||||
'Configure which label printer should be selected by default'
|
||||
@ -2085,7 +2085,7 @@ class InvenTreeUserSetting(BaseInvenTreeSetting):
|
||||
'default': '',
|
||||
'choices': label_printer_options,
|
||||
},
|
||||
"REPORT_INLINE": {
|
||||
'REPORT_INLINE': {
|
||||
'name': _('Inline report display'),
|
||||
'description': _(
|
||||
'Display PDF reports in the browser, instead of downloading as a file'
|
||||
@ -2112,7 +2112,7 @@ class InvenTreeUserSetting(BaseInvenTreeSetting):
|
||||
'validator': bool,
|
||||
},
|
||||
'SEARCH_HIDE_INACTIVE_PARTS': {
|
||||
'name': _("Hide Inactive Parts"),
|
||||
'name': _('Hide Inactive Parts'),
|
||||
'description': _('Excluded inactive parts from search preview window'),
|
||||
'default': False,
|
||||
'validator': bool,
|
||||
@ -2360,7 +2360,7 @@ class PriceBreak(MetaMixin):
|
||||
converted = convert_money(self.price, currency_code)
|
||||
except MissingRate:
|
||||
logger.warning(
|
||||
"No currency conversion rate available for %s -> %s",
|
||||
'No currency conversion rate available for %s -> %s',
|
||||
self.price_currency,
|
||||
currency_code,
|
||||
)
|
||||
@ -2510,11 +2510,11 @@ class WebhookEndpoint(models.Model):
|
||||
"""
|
||||
|
||||
# Token
|
||||
TOKEN_NAME = "Token"
|
||||
TOKEN_NAME = 'Token'
|
||||
VERIFICATION_METHOD = VerificationMethod.NONE
|
||||
|
||||
MESSAGE_OK = "Message was received."
|
||||
MESSAGE_TOKEN_ERROR = "Incorrect token in header."
|
||||
MESSAGE_OK = 'Message was received.'
|
||||
MESSAGE_TOKEN_ERROR = 'Incorrect token in header.'
|
||||
|
||||
endpoint_id = models.CharField(
|
||||
max_length=255,
|
||||
@ -2589,7 +2589,7 @@ class WebhookEndpoint(models.Model):
|
||||
|
||||
This can be overridden to create your own token validation method.
|
||||
"""
|
||||
token = headers.get(self.TOKEN_NAME, "")
|
||||
token = headers.get(self.TOKEN_NAME, '')
|
||||
|
||||
# no token
|
||||
if self.verify == VerificationMethod.NONE:
|
||||
|
@ -311,23 +311,23 @@ class InvenTreeNotificationBodies:
|
||||
"""
|
||||
|
||||
NewOrder = NotificationBody(
|
||||
name=_("New {verbose_name}"),
|
||||
name=_('New {verbose_name}'),
|
||||
slug='{app_label}.new_{model_name}',
|
||||
message=_("A new order has been created and assigned to you"),
|
||||
message=_('A new order has been created and assigned to you'),
|
||||
template='email/new_order_assigned.html',
|
||||
)
|
||||
"""Send when a new order (build, sale or purchase) was created."""
|
||||
|
||||
OrderCanceled = NotificationBody(
|
||||
name=_("{verbose_name} canceled"),
|
||||
name=_('{verbose_name} canceled'),
|
||||
slug='{app_label}.canceled_{model_name}',
|
||||
message=_("A order that is assigned to you was canceled"),
|
||||
message=_('A order that is assigned to you was canceled'),
|
||||
template='email/canceled_order_assigned.html',
|
||||
)
|
||||
"""Send when a order (sale, return or purchase) was canceled."""
|
||||
|
||||
ItemsReceived = NotificationBody(
|
||||
name=_("Items Received"),
|
||||
name=_('Items Received'),
|
||||
slug='purchase_order.items_received',
|
||||
message=_('Items have been received against a purchase order'),
|
||||
template='email/purchase_order_received.html',
|
||||
@ -414,7 +414,7 @@ def trigger_notification(obj, category=None, obj_ref='pk', **kwargs):
|
||||
# Unhandled type
|
||||
else:
|
||||
logger.error(
|
||||
"Unknown target passed to trigger_notification method: %s", target
|
||||
'Unknown target passed to trigger_notification method: %s', target
|
||||
)
|
||||
|
||||
if target_users:
|
||||
@ -515,4 +515,4 @@ def deliver_notification(
|
||||
str(obj),
|
||||
)
|
||||
if not success:
|
||||
logger.info("There were some problems")
|
||||
logger.info('There were some problems')
|
||||
|
@ -51,7 +51,7 @@ def update_news_feed():
|
||||
try:
|
||||
d = feedparser.parse(settings.INVENTREE_NEWS_URL)
|
||||
except Exception as entry: # pragma: no cover
|
||||
logger.warning("update_news_feed: Error parsing the newsfeed", entry)
|
||||
logger.warning('update_news_feed: Error parsing the newsfeed', entry)
|
||||
return
|
||||
|
||||
# Get a reference list
|
||||
@ -97,7 +97,7 @@ def delete_old_notes_images():
|
||||
# Remove any notes which point to non-existent image files
|
||||
for note in NotesImage.objects.all():
|
||||
if not os.path.exists(note.image.path):
|
||||
logger.info("Deleting note %s - image file does not exist", note.image.path)
|
||||
logger.info('Deleting note %s - image file does not exist', note.image.path)
|
||||
note.delete()
|
||||
|
||||
note_classes = getModelsWithMixin(InvenTreeNotesMixin)
|
||||
@ -116,7 +116,7 @@ def delete_old_notes_images():
|
||||
break
|
||||
|
||||
if not found:
|
||||
logger.info("Deleting note %s - image file not linked to a note", img)
|
||||
logger.info('Deleting note %s - image file not linked to a note', img)
|
||||
note.delete()
|
||||
|
||||
# Finally, remove any images in the notes dir which are not linked to a note
|
||||
@ -139,5 +139,5 @@ def delete_old_notes_images():
|
||||
break
|
||||
|
||||
if not found:
|
||||
logger.info("Deleting note %s - image file not linked to a note", image)
|
||||
logger.info('Deleting note %s - image file not linked to a note', image)
|
||||
os.remove(os.path.join(notes_dir, image))
|
||||
|
@ -136,19 +136,19 @@ class SettingsTest(InvenTreeTestCase):
|
||||
def test_all_settings(self):
|
||||
"""Make sure that the all_settings function returns correctly"""
|
||||
result = InvenTreeSetting.all_settings()
|
||||
self.assertIn("INVENTREE_INSTANCE", result)
|
||||
self.assertIn('INVENTREE_INSTANCE', result)
|
||||
self.assertIsInstance(result['INVENTREE_INSTANCE'], InvenTreeSetting)
|
||||
|
||||
@mock.patch("common.models.InvenTreeSetting.get_setting_definition")
|
||||
@mock.patch('common.models.InvenTreeSetting.get_setting_definition')
|
||||
def test_check_all_settings(self, get_setting_definition):
|
||||
"""Make sure that the check_all_settings function returns correctly"""
|
||||
# define partial schema
|
||||
settings_definition = {
|
||||
"AB": { # key that's has not already been accessed
|
||||
"required": True
|
||||
'AB': { # key that's has not already been accessed
|
||||
'required': True
|
||||
},
|
||||
"CD": {"required": True, "protected": True},
|
||||
"EF": {},
|
||||
'CD': {'required': True, 'protected': True},
|
||||
'EF': {},
|
||||
}
|
||||
|
||||
def mocked(key, **kwargs):
|
||||
@ -160,28 +160,28 @@ class SettingsTest(InvenTreeTestCase):
|
||||
InvenTreeSetting.check_all_settings(
|
||||
settings_definition=settings_definition
|
||||
),
|
||||
(False, ["AB", "CD"]),
|
||||
(False, ['AB', 'CD']),
|
||||
)
|
||||
InvenTreeSetting.set_setting('AB', "hello", self.user)
|
||||
InvenTreeSetting.set_setting('CD', "world", self.user)
|
||||
InvenTreeSetting.set_setting('AB', 'hello', self.user)
|
||||
InvenTreeSetting.set_setting('CD', 'world', self.user)
|
||||
self.assertEqual(InvenTreeSetting.check_all_settings(), (True, []))
|
||||
|
||||
@mock.patch("common.models.InvenTreeSetting.get_setting_definition")
|
||||
@mock.patch('common.models.InvenTreeSetting.get_setting_definition')
|
||||
def test_settings_validator(self, get_setting_definition):
|
||||
"""Make sure that the validator function gets called on set setting."""
|
||||
|
||||
def validator(x):
|
||||
if x == "hello":
|
||||
if x == 'hello':
|
||||
return x
|
||||
|
||||
raise ValidationError(f"{x} is not valid")
|
||||
raise ValidationError(f'{x} is not valid')
|
||||
|
||||
mock_validator = mock.Mock(side_effect=validator)
|
||||
|
||||
# define partial schema
|
||||
settings_definition = {
|
||||
"AB": { # key that's has not already been accessed
|
||||
"validator": mock_validator
|
||||
'AB': { # key that's has not already been accessed
|
||||
'validator': mock_validator
|
||||
}
|
||||
}
|
||||
|
||||
@ -190,12 +190,12 @@ class SettingsTest(InvenTreeTestCase):
|
||||
|
||||
get_setting_definition.side_effect = mocked
|
||||
|
||||
InvenTreeSetting.set_setting("AB", "hello", self.user)
|
||||
mock_validator.assert_called_with("hello")
|
||||
InvenTreeSetting.set_setting('AB', 'hello', self.user)
|
||||
mock_validator.assert_called_with('hello')
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
InvenTreeSetting.set_setting("AB", "world", self.user)
|
||||
mock_validator.assert_called_with("world")
|
||||
InvenTreeSetting.set_setting('AB', 'world', self.user)
|
||||
mock_validator.assert_called_with('world')
|
||||
|
||||
def run_settings_check(self, key, setting):
|
||||
"""Test that all settings are valid.
|
||||
@ -322,7 +322,7 @@ class SettingsTest(InvenTreeTestCase):
|
||||
# Generate a number of new users
|
||||
for idx in range(5):
|
||||
get_user_model().objects.create(
|
||||
username=f"User_{idx}", password="hunter42", email="email@dot.com"
|
||||
username=f'User_{idx}', password='hunter42', email='email@dot.com'
|
||||
)
|
||||
|
||||
key = 'SEARCH_PREVIEW_RESULTS'
|
||||
@ -333,7 +333,7 @@ class SettingsTest(InvenTreeTestCase):
|
||||
cache_key = setting.cache_key
|
||||
self.assertEqual(
|
||||
cache_key,
|
||||
f"InvenTreeUserSetting:SEARCH_PREVIEW_RESULTS_user:{user.username}",
|
||||
f'InvenTreeUserSetting:SEARCH_PREVIEW_RESULTS_user:{user.username}',
|
||||
)
|
||||
InvenTreeUserSetting.set_setting(key, user.pk, None, user=user)
|
||||
self.assertIsNotNone(cache.get(cache_key))
|
||||
@ -399,7 +399,7 @@ class GlobalSettingsApiTest(InvenTreeAPITestCase):
|
||||
def test_api_detail(self):
|
||||
"""Test that we can access the detail view for a setting based on the <key>."""
|
||||
# These keys are invalid, and should return 404
|
||||
for key in ["apple", "carrot", "dog"]:
|
||||
for key in ['apple', 'carrot', 'dog']:
|
||||
response = self.get(
|
||||
reverse('api-global-setting-detail', kwargs={'key': key}),
|
||||
expected_code=404,
|
||||
@ -770,7 +770,7 @@ class WebhookMessageTests(TestCase):
|
||||
"""
|
||||
response = self.client.post(
|
||||
self.url,
|
||||
data={"this": "is a message"},
|
||||
data={'this': 'is a message'},
|
||||
content_type=CONTENT_TYPE_JSON,
|
||||
**{'HTTP_TOKEN': str(self.endpoint_def.token)},
|
||||
)
|
||||
@ -778,7 +778,7 @@ class WebhookMessageTests(TestCase):
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert str(response.content, 'utf-8') == WebhookView.model_class.MESSAGE_OK
|
||||
message = WebhookMessage.objects.get()
|
||||
assert message.body == {"this": "is a message"}
|
||||
assert message.body == {'this': 'is a message'}
|
||||
|
||||
|
||||
class NotificationTest(InvenTreeAPITestCase):
|
||||
@ -1033,7 +1033,7 @@ class CurrencyAPITests(InvenTreeAPITestCase):
|
||||
# Delay and try again
|
||||
time.sleep(10)
|
||||
|
||||
raise TimeoutError("Could not refresh currency exchange data after 5 attempts")
|
||||
raise TimeoutError('Could not refresh currency exchange data after 5 attempts')
|
||||
|
||||
|
||||
class NotesImageTest(InvenTreeAPITestCase):
|
||||
@ -1048,28 +1048,28 @@ class NotesImageTest(InvenTreeAPITestCase):
|
||||
reverse('api-notes-image-list'),
|
||||
data={
|
||||
'image': SimpleUploadedFile(
|
||||
'test.txt', b"this is not an image file", content_type='text/plain'
|
||||
'test.txt', b'this is not an image file', content_type='text/plain'
|
||||
)
|
||||
},
|
||||
format='multipart',
|
||||
expected_code=400,
|
||||
)
|
||||
|
||||
self.assertIn("Upload a valid image", str(response.data['image']))
|
||||
self.assertIn('Upload a valid image', str(response.data['image']))
|
||||
|
||||
# Test upload of an invalid image file
|
||||
response = self.post(
|
||||
reverse('api-notes-image-list'),
|
||||
data={
|
||||
'image': SimpleUploadedFile(
|
||||
'test.png', b"this is not an image file", content_type='image/png'
|
||||
'test.png', b'this is not an image file', content_type='image/png'
|
||||
)
|
||||
},
|
||||
format='multipart',
|
||||
expected_code=400,
|
||||
)
|
||||
|
||||
self.assertIn("Upload a valid image", str(response.data['image']))
|
||||
self.assertIn('Upload a valid image', str(response.data['image']))
|
||||
|
||||
# Check that no extra database entries have been created
|
||||
self.assertEqual(NotesImage.objects.count(), n)
|
||||
|
@ -81,7 +81,7 @@ class FileManagementFormView(MultiStepFormView):
|
||||
('fields', forms.MatchFieldForm),
|
||||
('items', forms.MatchItemForm),
|
||||
]
|
||||
form_steps_description = [_("Upload File"), _("Match Fields"), _("Match Items")]
|
||||
form_steps_description = [_('Upload File'), _('Match Fields'), _('Match Items')]
|
||||
media_folder = 'file_upload/'
|
||||
extra_context_data = {}
|
||||
|
||||
|
@ -96,7 +96,7 @@ class Company(InvenTreeNotesMixin, MetadataMixin, models.Model):
|
||||
constraints = [
|
||||
UniqueConstraint(fields=['name', 'email'], name='unique_name_email_pair')
|
||||
]
|
||||
verbose_name_plural = "Companies"
|
||||
verbose_name_plural = 'Companies'
|
||||
|
||||
@staticmethod
|
||||
def get_api_url():
|
||||
@ -215,7 +215,7 @@ class Company(InvenTreeNotesMixin, MetadataMixin, models.Model):
|
||||
|
||||
def __str__(self):
|
||||
"""Get string representation of a Company."""
|
||||
return f"{self.name} - {self.description}"
|
||||
return f'{self.name} - {self.description}'
|
||||
|
||||
def get_absolute_url(self):
|
||||
"""Get the web URL for the detail view for this Company."""
|
||||
@ -318,7 +318,7 @@ class Address(models.Model):
|
||||
class Meta:
|
||||
"""Metaclass defines extra model options"""
|
||||
|
||||
verbose_name_plural = "Addresses"
|
||||
verbose_name_plural = 'Addresses'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Custom init function"""
|
||||
@ -340,7 +340,7 @@ class Address(models.Model):
|
||||
if len(line) > 0:
|
||||
populated_lines.append(line)
|
||||
|
||||
return ", ".join(populated_lines)
|
||||
return ', '.join(populated_lines)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""Run checks when saving an address:
|
||||
@ -564,7 +564,7 @@ class ManufacturerPartAttachment(InvenTreeAttachment):
|
||||
|
||||
def getSubdir(self):
|
||||
"""Return the subdirectory where attachment files for the ManufacturerPart model are located"""
|
||||
return os.path.join("manufacturer_part_files", str(self.manufacturer_part.id))
|
||||
return os.path.join('manufacturer_part_files', str(self.manufacturer_part.id))
|
||||
|
||||
manufacturer_part = models.ForeignKey(
|
||||
ManufacturerPart,
|
||||
@ -711,14 +711,14 @@ class SupplierPart(MetadataMixin, InvenTreeBarcodeMixin, common.models.MetaMixin
|
||||
):
|
||||
raise ValidationError({
|
||||
'pack_quantity': _(
|
||||
"Pack units must be compatible with the base part units"
|
||||
'Pack units must be compatible with the base part units'
|
||||
)
|
||||
})
|
||||
|
||||
# Native value must be greater than zero
|
||||
if float(native_value.magnitude) <= 0:
|
||||
raise ValidationError({
|
||||
'pack_quantity': _("Pack units must be greater than zero")
|
||||
'pack_quantity': _('Pack units must be greater than zero')
|
||||
})
|
||||
|
||||
# Update native pack units value
|
||||
@ -732,7 +732,7 @@ class SupplierPart(MetadataMixin, InvenTreeBarcodeMixin, common.models.MetaMixin
|
||||
if self.manufacturer_part.part != self.part:
|
||||
raise ValidationError({
|
||||
'manufacturer_part': _(
|
||||
"Linked manufacturer part must reference the same base part"
|
||||
'Linked manufacturer part must reference the same base part'
|
||||
)
|
||||
})
|
||||
|
||||
@ -787,7 +787,7 @@ class SupplierPart(MetadataMixin, InvenTreeBarcodeMixin, common.models.MetaMixin
|
||||
|
||||
SKU = models.CharField(
|
||||
max_length=100,
|
||||
verbose_name=__("SKU = Stock Keeping Unit (supplier part number)", 'SKU'),
|
||||
verbose_name=__('SKU = Stock Keeping Unit (supplier part number)', 'SKU'),
|
||||
help_text=_('Supplier stock keeping unit'),
|
||||
)
|
||||
|
||||
@ -1007,7 +1007,7 @@ class SupplierPriceBreak(common.models.PriceBreak):
|
||||
class Meta:
|
||||
"""Metaclass defines extra model options"""
|
||||
|
||||
unique_together = ("part", "quantity")
|
||||
unique_together = ('part', 'quantity')
|
||||
|
||||
# This model was moved from the 'Part' app
|
||||
db_table = 'part_supplierpricebreak'
|
||||
|
@ -168,7 +168,7 @@ class CompanySerializer(RemoteImageMixin, InvenTreeModelSerializer):
|
||||
remote_img.save(buffer, format=fmt)
|
||||
|
||||
# Construct a simplified name for the image
|
||||
filename = f"company_{company.pk}_image.{fmt.lower()}"
|
||||
filename = f'company_{company.pk}_image.{fmt.lower()}'
|
||||
|
||||
company.image.save(filename, ContentFile(buffer.getvalue()))
|
||||
|
||||
|
@ -107,8 +107,8 @@ class CompanyTest(InvenTreeAPITestCase):
|
||||
response = self.post(
|
||||
url,
|
||||
{
|
||||
'name': "Another Company",
|
||||
'description': "Also created via the API!",
|
||||
'name': 'Another Company',
|
||||
'description': 'Also created via the API!',
|
||||
'currency': 'AUD',
|
||||
'is_supplier': False,
|
||||
'is_manufacturer': True,
|
||||
@ -125,7 +125,7 @@ class CompanyTest(InvenTreeAPITestCase):
|
||||
# Attempt to create with invalid currency
|
||||
response = self.post(
|
||||
url,
|
||||
{'name': "A name", 'description': 'A description', 'currency': 'POQD'},
|
||||
{'name': 'A name', 'description': 'A description', 'currency': 'POQD'},
|
||||
expected_code=400,
|
||||
)
|
||||
|
||||
@ -144,7 +144,7 @@ class ContactTest(InvenTreeAPITestCase):
|
||||
|
||||
# Create some companies
|
||||
companies = [
|
||||
Company(name=f"Company {idx}", description="Some company")
|
||||
Company(name=f'Company {idx}', description='Some company')
|
||||
for idx in range(3)
|
||||
]
|
||||
|
||||
@ -155,7 +155,7 @@ class ContactTest(InvenTreeAPITestCase):
|
||||
# Create some contacts
|
||||
for cmp in Company.objects.all():
|
||||
contacts += [
|
||||
Contact(company=cmp, name=f"My name {idx}") for idx in range(3)
|
||||
Contact(company=cmp, name=f'My name {idx}') for idx in range(3)
|
||||
]
|
||||
|
||||
Contact.objects.bulk_create(contacts)
|
||||
@ -251,7 +251,7 @@ class AddressTest(InvenTreeAPITestCase):
|
||||
cls.num_addr = 3
|
||||
# Create some companies
|
||||
companies = [
|
||||
Company(name=f"Company {idx}", description="Some company")
|
||||
Company(name=f'Company {idx}', description='Some company')
|
||||
for idx in range(cls.num_companies)
|
||||
]
|
||||
|
||||
@ -262,7 +262,7 @@ class AddressTest(InvenTreeAPITestCase):
|
||||
# Create some contacts
|
||||
for cmp in Company.objects.all():
|
||||
addresses += [
|
||||
Address(company=cmp, title=f"Address no. {idx}")
|
||||
Address(company=cmp, title=f'Address no. {idx}')
|
||||
for idx in range(cls.num_addr)
|
||||
]
|
||||
|
||||
|
@ -228,8 +228,8 @@ class TestCurrencyMigration(MigratorTestCase):
|
||||
Part = self.old_state.apps.get_model('part', 'part')
|
||||
|
||||
part = Part.objects.create(
|
||||
name="PART",
|
||||
description="A purchaseable part",
|
||||
name='PART',
|
||||
description='A purchaseable part',
|
||||
purchaseable=True,
|
||||
level=0,
|
||||
tree_id=0,
|
||||
@ -309,7 +309,7 @@ class TestAddressMigration(MigratorTestCase):
|
||||
a2 = Address.objects.filter(company=c2.pk).first()
|
||||
|
||||
self.assertEqual(a1.line1, self.short_l1)
|
||||
self.assertEqual(a1.line2, "")
|
||||
self.assertEqual(a1.line2, '')
|
||||
self.assertEqual(a2.line1, self.long_l1)
|
||||
self.assertEqual(a2.line2, self.l2)
|
||||
self.assertEqual(c1.address, '')
|
||||
@ -329,8 +329,8 @@ class TestSupplierPartQuantity(MigratorTestCase):
|
||||
SupplierPart = self.old_state.apps.get_model('company', 'supplierpart')
|
||||
|
||||
self.part = Part.objects.create(
|
||||
name="PART",
|
||||
description="A purchaseable part",
|
||||
name='PART',
|
||||
description='A purchaseable part',
|
||||
purchaseable=True,
|
||||
level=0,
|
||||
tree_id=0,
|
||||
|
@ -103,9 +103,9 @@ class CompanySimpleTest(TestCase):
|
||||
"""Unit tests for supplier part pricing"""
|
||||
m2x4 = Part.objects.get(name='M2x4 LPHS')
|
||||
|
||||
self.assertEqual(m2x4.get_price_info(5.5), "38.5 - 41.25")
|
||||
self.assertEqual(m2x4.get_price_info(10), "70 - 75")
|
||||
self.assertEqual(m2x4.get_price_info(100), "125 - 350")
|
||||
self.assertEqual(m2x4.get_price_info(5.5), '38.5 - 41.25')
|
||||
self.assertEqual(m2x4.get_price_info(10), '70 - 75')
|
||||
self.assertEqual(m2x4.get_price_info(100), '125 - 350')
|
||||
|
||||
pmin, pmax = m2x4.get_price_range(5)
|
||||
self.assertEqual(pmin, 35)
|
||||
@ -222,13 +222,13 @@ class AddressTest(TestCase):
|
||||
|
||||
def test_model_str(self):
|
||||
"""Test value of __str__"""
|
||||
t = "Test address"
|
||||
l1 = "Busy street 56"
|
||||
l2 = "Red building"
|
||||
pcd = "12345"
|
||||
pct = "City"
|
||||
pv = "Province"
|
||||
cn = "COUNTRY"
|
||||
t = 'Test address'
|
||||
l1 = 'Busy street 56'
|
||||
l2 = 'Red building'
|
||||
pcd = '12345'
|
||||
pct = 'City'
|
||||
pv = 'Province'
|
||||
cn = 'COUNTRY'
|
||||
addr = Address.objects.create(
|
||||
company=self.c,
|
||||
title=t,
|
||||
|
@ -39,10 +39,10 @@ class StatusView(APIView):
|
||||
status_class = self.get_status_model()
|
||||
|
||||
if not inspect.isclass(status_class):
|
||||
raise NotImplementedError("`status_class` not a class")
|
||||
raise NotImplementedError('`status_class` not a class')
|
||||
|
||||
if not issubclass(status_class, StatusCode):
|
||||
raise NotImplementedError("`status_class` not a valid StatusCode class")
|
||||
raise NotImplementedError('`status_class` not a valid StatusCode class')
|
||||
|
||||
data = {'class': status_class.__name__, 'values': status_class.dict()}
|
||||
|
||||
|
@ -14,9 +14,9 @@ from .states import StatusCode
|
||||
class GeneralStatus(StatusCode):
|
||||
"""Defines a set of status codes for tests."""
|
||||
|
||||
PENDING = 10, _("Pending"), 'secondary'
|
||||
PLACED = 20, _("Placed"), 'primary'
|
||||
COMPLETE = 30, _("Complete"), 'success'
|
||||
PENDING = 10, _('Pending'), 'secondary'
|
||||
PLACED = 20, _('Placed'), 'primary'
|
||||
COMPLETE = 30, _('Complete'), 'success'
|
||||
ABC = None # This should be ignored
|
||||
_DEF = None # This should be ignored
|
||||
jkl = None # This should be ignored
|
||||
@ -183,11 +183,11 @@ class GeneralStateTest(InvenTreeTestCase):
|
||||
# Invalid call - not a class
|
||||
with self.assertRaises(NotImplementedError) as e:
|
||||
resp = view(rqst, **{StatusView.MODEL_REF: 'invalid'})
|
||||
self.assertEqual(str(e.exception), "`status_class` not a class")
|
||||
self.assertEqual(str(e.exception), '`status_class` not a class')
|
||||
|
||||
# Invalid call - not the right class
|
||||
with self.assertRaises(NotImplementedError) as e:
|
||||
resp = view(rqst, **{StatusView.MODEL_REF: object})
|
||||
self.assertEqual(
|
||||
str(e.exception), "`status_class` not a valid StatusCode class"
|
||||
str(e.exception), '`status_class` not a valid StatusCode class'
|
||||
)
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
import multiprocessing
|
||||
|
||||
bind = "0.0.0.0:8000"
|
||||
bind = '0.0.0.0:8000'
|
||||
|
||||
workers = multiprocessing.cpu_count() * 2 + 1
|
||||
|
||||
|
@ -130,12 +130,12 @@ class LabelListView(LabelFilterMixin, ListCreateAPI):
|
||||
class LabelPrintMixin(LabelFilterMixin):
|
||||
"""Mixin for printing labels."""
|
||||
|
||||
rolemap = {"GET": "view", "POST": "view"}
|
||||
rolemap = {'GET': 'view', 'POST': 'view'}
|
||||
|
||||
def check_permissions(self, request):
|
||||
"""Override request method to GET so that also non superusers can print using a post request."""
|
||||
if request.method == "POST":
|
||||
request = clone_request(request, "GET")
|
||||
if request.method == 'POST':
|
||||
request = clone_request(request, 'GET')
|
||||
return super().check_permissions(request)
|
||||
|
||||
@method_decorator(never_cache)
|
||||
@ -199,7 +199,7 @@ class LabelPrintMixin(LabelFilterMixin):
|
||||
if not plugin.is_active():
|
||||
raise ValidationError(f"Plugin '{plugin_key}' is not enabled")
|
||||
|
||||
if not plugin.mixin_enabled("labels"):
|
||||
if not plugin.mixin_enabled('labels'):
|
||||
raise ValidationError(
|
||||
f"Plugin '{plugin_key}' is not a label printing plugin"
|
||||
)
|
||||
|
@ -19,7 +19,7 @@ from InvenTree.ready import (
|
||||
isPluginRegistryLoaded,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def hashFile(filename):
|
||||
|
@ -25,12 +25,12 @@ from plugin.registry import registry
|
||||
try:
|
||||
from django_weasyprint import WeasyTemplateResponseMixin
|
||||
except OSError as err: # pragma: no cover
|
||||
print(f"OSError: {err}")
|
||||
print("You may require some further system packages to be installed.")
|
||||
print(f'OSError: {err}')
|
||||
print('You may require some further system packages to be installed.')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def rename_label(instance, filename):
|
||||
@ -97,7 +97,7 @@ class LabelTemplate(MetadataMixin, models.Model):
|
||||
abstract = True
|
||||
|
||||
# Each class of label files will be stored in a separate subdirectory
|
||||
SUBDIR = "label"
|
||||
SUBDIR = 'label'
|
||||
|
||||
# Object we will be printing against (will be filled out later)
|
||||
object_to_print = None
|
||||
@ -109,7 +109,7 @@ class LabelTemplate(MetadataMixin, models.Model):
|
||||
|
||||
def __str__(self):
|
||||
"""Format a string representation of a label instance"""
|
||||
return f"{self.name} - {self.description}"
|
||||
return f'{self.name} - {self.description}'
|
||||
|
||||
name = models.CharField(
|
||||
blank=False, max_length=100, verbose_name=_('Name'), help_text=_('Label name')
|
||||
@ -154,7 +154,7 @@ class LabelTemplate(MetadataMixin, models.Model):
|
||||
)
|
||||
|
||||
filename_pattern = models.CharField(
|
||||
default="label.pdf",
|
||||
default='label.pdf',
|
||||
verbose_name=_('Filename Pattern'),
|
||||
help_text=_('Pattern for generating label filenames'),
|
||||
max_length=100,
|
||||
@ -265,7 +265,7 @@ class LabelTemplate(MetadataMixin, models.Model):
|
||||
wp = WeasyprintLabelMixin(
|
||||
request,
|
||||
self.template_name,
|
||||
base_url=request.build_absolute_uri("/"),
|
||||
base_url=request.build_absolute_uri('/'),
|
||||
presentational_hints=True,
|
||||
filename=self.generate_filename(request),
|
||||
**kwargs,
|
||||
@ -304,7 +304,7 @@ class StockItemLabel(LabelTemplate):
|
||||
"""Return the API URL associated with the StockItemLabel model"""
|
||||
return reverse('api-stockitem-label-list') # pragma: no cover
|
||||
|
||||
SUBDIR = "stockitem"
|
||||
SUBDIR = 'stockitem'
|
||||
|
||||
filters = models.CharField(
|
||||
blank=True,
|
||||
@ -343,7 +343,7 @@ class StockLocationLabel(LabelTemplate):
|
||||
"""Return the API URL associated with the StockLocationLabel model"""
|
||||
return reverse('api-stocklocation-label-list') # pragma: no cover
|
||||
|
||||
SUBDIR = "stocklocation"
|
||||
SUBDIR = 'stocklocation'
|
||||
|
||||
filters = models.CharField(
|
||||
blank=True,
|
||||
|
@ -55,13 +55,13 @@ class LabelTest(InvenTreeAPITestCase):
|
||||
|
||||
def test_filters(self):
|
||||
"""Test the label filters."""
|
||||
filter_string = "part__pk=10"
|
||||
filter_string = 'part__pk=10'
|
||||
|
||||
filters = validateFilterString(filter_string, model=StockItem)
|
||||
|
||||
self.assertEqual(type(filters), dict)
|
||||
|
||||
bad_filter_string = "part_pk=10"
|
||||
bad_filter_string = 'part_pk=10'
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
validateFilterString(bad_filter_string, model=StockItem)
|
||||
@ -107,7 +107,7 @@ class LabelTest(InvenTreeAPITestCase):
|
||||
buffer = io.StringIO()
|
||||
buffer.write(label_data)
|
||||
|
||||
template = ContentFile(buffer.getvalue(), "label.html")
|
||||
template = ContentFile(buffer.getvalue(), 'label.html')
|
||||
|
||||
# Construct a label template
|
||||
label = PartLabel.objects.create(
|
||||
@ -140,7 +140,7 @@ class LabelTest(InvenTreeAPITestCase):
|
||||
content = f.read()
|
||||
|
||||
# Test that each element has been rendered correctly
|
||||
self.assertIn(f"part: {part_pk} - {part_name}", content)
|
||||
self.assertIn(f'part: {part_pk} - {part_name}', content)
|
||||
self.assertIn(f'data: {{"part": {part_pk}}}', content)
|
||||
self.assertIn(f'http://testserver/part/{part_pk}/', content)
|
||||
|
||||
|
@ -7,17 +7,17 @@ import sys
|
||||
|
||||
def main():
|
||||
"""Run administrative tasks."""
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "InvenTree.settings")
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'InvenTree.settings')
|
||||
try:
|
||||
from django.core.management import execute_from_command_line
|
||||
except ImportError as exc: # pragma: no cover
|
||||
raise ImportError(
|
||||
"Couldn't import Django. Are you sure it's installed and "
|
||||
"available on your PYTHONPATH environment variable? Did you "
|
||||
"forget to activate a virtual environment?"
|
||||
'available on your PYTHONPATH environment variable? Did you '
|
||||
'forget to activate a virtual environment?'
|
||||
) from exc
|
||||
execute_from_command_line(sys.argv)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -86,7 +86,7 @@ class OrderFilter(rest_filters.FilterSet):
|
||||
"""Base class for custom API filters for the OrderList endpoint."""
|
||||
|
||||
# Filter against order status
|
||||
status = rest_filters.NumberFilter(label="Order Status", method='filter_status')
|
||||
status = rest_filters.NumberFilter(label='Order Status', method='filter_status')
|
||||
|
||||
def filter_status(self, queryset, name, value):
|
||||
"""Filter by integer status code"""
|
||||
@ -94,7 +94,7 @@ class OrderFilter(rest_filters.FilterSet):
|
||||
|
||||
# Exact match for reference
|
||||
reference = rest_filters.CharFilter(
|
||||
label='Filter by exact reference', field_name='reference', lookup_expr="iexact"
|
||||
label='Filter by exact reference', field_name='reference', lookup_expr='iexact'
|
||||
)
|
||||
|
||||
assigned_to_me = rest_filters.BooleanFilter(
|
||||
@ -155,7 +155,7 @@ class LineItemFilter(rest_filters.FilterSet):
|
||||
)
|
||||
|
||||
has_pricing = rest_filters.BooleanFilter(
|
||||
label="Has Pricing", method='filter_has_pricing'
|
||||
label='Has Pricing', method='filter_has_pricing'
|
||||
)
|
||||
|
||||
def filter_has_pricing(self, queryset, name, value):
|
||||
@ -271,7 +271,7 @@ class PurchaseOrderList(PurchaseOrderMixin, APIDownloadMixin, ListCreateAPI):
|
||||
|
||||
filedata = dataset.export(export_format)
|
||||
|
||||
filename = f"InvenTree_PurchaseOrders.{export_format}"
|
||||
filename = f'InvenTree_PurchaseOrders.{export_format}'
|
||||
|
||||
return DownloadFile(filedata, filename)
|
||||
|
||||
@ -518,7 +518,7 @@ class PurchaseOrderLineItemList(
|
||||
|
||||
filedata = dataset.export(export_format)
|
||||
|
||||
filename = f"InvenTree_PurchaseOrderItems.{export_format}"
|
||||
filename = f'InvenTree_PurchaseOrderItems.{export_format}'
|
||||
|
||||
return DownloadFile(filedata, filename)
|
||||
|
||||
@ -567,7 +567,7 @@ class PurchaseOrderExtraLineList(GeneralExtraLineList, ListCreateAPI):
|
||||
"""Download this queryset as a file"""
|
||||
dataset = PurchaseOrderExtraLineResource().export(queryset=queryset)
|
||||
filedata = dataset.export(export_format)
|
||||
filename = f"InvenTree_ExtraPurchaseOrderLines.{export_format}"
|
||||
filename = f'InvenTree_ExtraPurchaseOrderLines.{export_format}'
|
||||
|
||||
return DownloadFile(filedata, filename)
|
||||
|
||||
@ -665,7 +665,7 @@ class SalesOrderList(SalesOrderMixin, APIDownloadMixin, ListCreateAPI):
|
||||
|
||||
filedata = dataset.export(export_format)
|
||||
|
||||
filename = f"InvenTree_SalesOrders.{export_format}"
|
||||
filename = f'InvenTree_SalesOrders.{export_format}'
|
||||
|
||||
return DownloadFile(filedata, filename)
|
||||
|
||||
@ -809,7 +809,7 @@ class SalesOrderLineItemList(SalesOrderLineItemMixin, APIDownloadMixin, ListCrea
|
||||
dataset = SalesOrderLineItemResource().export(queryset=queryset)
|
||||
filedata = dataset.export(export_format)
|
||||
|
||||
filename = f"InvenTree_SalesOrderItems.{export_format}"
|
||||
filename = f'InvenTree_SalesOrderItems.{export_format}'
|
||||
|
||||
return DownloadFile(filedata, filename)
|
||||
|
||||
@ -836,7 +836,7 @@ class SalesOrderExtraLineList(GeneralExtraLineList, ListCreateAPI):
|
||||
"""Download this queryset as a file"""
|
||||
dataset = SalesOrderExtraLineResource().export(queryset=queryset)
|
||||
filedata = dataset.export(export_format)
|
||||
filename = f"InvenTree_ExtraSalesOrderLines.{export_format}"
|
||||
filename = f'InvenTree_ExtraSalesOrderLines.{export_format}'
|
||||
|
||||
return DownloadFile(filedata, filename)
|
||||
|
||||
@ -1127,7 +1127,7 @@ class ReturnOrderList(ReturnOrderMixin, APIDownloadMixin, ListCreateAPI):
|
||||
"""Download this queryset as a file"""
|
||||
dataset = ReturnOrderResource().export(queryset=queryset)
|
||||
filedata = dataset.export(export_format)
|
||||
filename = f"InvenTree_ReturnOrders.{export_format}"
|
||||
filename = f'InvenTree_ReturnOrders.{export_format}'
|
||||
|
||||
return DownloadFile(filedata, filename)
|
||||
|
||||
@ -1274,7 +1274,7 @@ class ReturnOrderLineItemList(
|
||||
def download_queryset(self, queryset, export_format):
|
||||
"""Download the requested queryset as a file"""
|
||||
raise NotImplementedError(
|
||||
"download_queryset not yet implemented for this endpoint"
|
||||
'download_queryset not yet implemented for this endpoint'
|
||||
)
|
||||
|
||||
filter_backends = SEARCH_ORDER_FILTER
|
||||
@ -1303,7 +1303,7 @@ class ReturnOrderExtraLineList(GeneralExtraLineList, ListCreateAPI):
|
||||
|
||||
def download_queryset(self, queryset, export_format):
|
||||
"""Download this queryset as a file"""
|
||||
raise NotImplementedError("download_queryset not yet implemented")
|
||||
raise NotImplementedError('download_queryset not yet implemented')
|
||||
|
||||
|
||||
class ReturnOrderExtraLineDetail(RetrieveUpdateDestroyAPI):
|
||||
@ -1339,9 +1339,9 @@ class OrderCalendarExport(ICalFeed):
|
||||
|
||||
instance_url = get_base_url()
|
||||
|
||||
instance_url = instance_url.replace("http://", "").replace("https://", "")
|
||||
instance_url = instance_url.replace('http://', '').replace('https://', '')
|
||||
timezone = settings.TIME_ZONE
|
||||
file_name = "calendar.ics"
|
||||
file_name = 'calendar.ics'
|
||||
|
||||
def __call__(self, request, *args, **kwargs):
|
||||
"""Overload call in order to check for authentication.
|
||||
@ -1367,8 +1367,8 @@ class OrderCalendarExport(ICalFeed):
|
||||
if len(auth) == 2:
|
||||
# NOTE: We are only support basic authentication for now.
|
||||
#
|
||||
if auth[0].lower() == "basic":
|
||||
uname, passwd = base64.b64decode(auth[1]).decode("ascii").split(':')
|
||||
if auth[0].lower() == 'basic':
|
||||
uname, passwd = base64.b64decode(auth[1]).decode('ascii').split(':')
|
||||
user = authenticate(username=uname, password=passwd)
|
||||
if user is not None:
|
||||
if user.is_active:
|
||||
@ -1383,7 +1383,7 @@ class OrderCalendarExport(ICalFeed):
|
||||
# Still nothing - return Unauth. header with info on how to authenticate
|
||||
# Information is needed by client, eg Thunderbird
|
||||
response = JsonResponse({
|
||||
"detail": "Authentication credentials were not provided."
|
||||
'detail': 'Authentication credentials were not provided.'
|
||||
})
|
||||
response['WWW-Authenticate'] = 'Basic realm="api"'
|
||||
response.status_code = 401
|
||||
@ -1402,11 +1402,11 @@ class OrderCalendarExport(ICalFeed):
|
||||
|
||||
def title(self, obj):
|
||||
"""Return calendar title."""
|
||||
if obj["ordertype"] == 'purchase-order':
|
||||
if obj['ordertype'] == 'purchase-order':
|
||||
ordertype_title = _('Purchase Order')
|
||||
elif obj["ordertype"] == 'sales-order':
|
||||
elif obj['ordertype'] == 'sales-order':
|
||||
ordertype_title = _('Sales Order')
|
||||
elif obj["ordertype"] == 'return-order':
|
||||
elif obj['ordertype'] == 'return-order':
|
||||
ordertype_title = _('Return Order')
|
||||
else:
|
||||
ordertype_title = _('Unknown')
|
||||
@ -1433,7 +1433,7 @@ class OrderCalendarExport(ICalFeed):
|
||||
).filter(status__lt=PurchaseOrderStatus.COMPLETE.value)
|
||||
else:
|
||||
outlist = models.PurchaseOrder.objects.filter(target_date__isnull=False)
|
||||
elif obj["ordertype"] == 'sales-order':
|
||||
elif obj['ordertype'] == 'sales-order':
|
||||
if obj['include_completed'] is False:
|
||||
# Do not include completed (=shipped) orders from list in this case
|
||||
# Shipped status = 20
|
||||
@ -1442,7 +1442,7 @@ class OrderCalendarExport(ICalFeed):
|
||||
).filter(status__lt=SalesOrderStatus.SHIPPED.value)
|
||||
else:
|
||||
outlist = models.SalesOrder.objects.filter(target_date__isnull=False)
|
||||
elif obj["ordertype"] == 'return-order':
|
||||
elif obj['ordertype'] == 'return-order':
|
||||
if obj['include_completed'] is False:
|
||||
# Do not include completed orders from list in this case
|
||||
# Complete status = 30
|
||||
@ -1458,11 +1458,11 @@ class OrderCalendarExport(ICalFeed):
|
||||
|
||||
def item_title(self, item):
|
||||
"""Set the event title to the order reference"""
|
||||
return f"{item.reference}"
|
||||
return f'{item.reference}'
|
||||
|
||||
def item_description(self, item):
|
||||
"""Set the event description"""
|
||||
return f"Company: {item.company.name}\nStatus: {item.get_status_display()}\nDescription: {item.description}"
|
||||
return f'Company: {item.company.name}\nStatus: {item.get_status_display()}\nDescription: {item.description}'
|
||||
|
||||
def item_start_datetime(self, item):
|
||||
"""Set event start to target date. Goal is all-day event."""
|
||||
|
@ -224,7 +224,7 @@ class Order(
|
||||
if self.company and self.contact:
|
||||
if self.contact.company != self.company:
|
||||
raise ValidationError({
|
||||
"contact": _("Contact does not match selected company")
|
||||
'contact': _('Contact does not match selected company')
|
||||
})
|
||||
|
||||
@classmethod
|
||||
@ -327,7 +327,7 @@ class Order(
|
||||
@classmethod
|
||||
def get_status_class(cls):
|
||||
"""Return the enumeration class which represents the 'status' field for this model"""
|
||||
raise NotImplementedError(f"get_status_class() not implemented for {__class__}")
|
||||
raise NotImplementedError(f'get_status_class() not implemented for {__class__}')
|
||||
|
||||
|
||||
class PurchaseOrder(TotalPriceMixin, Order):
|
||||
@ -454,7 +454,7 @@ class PurchaseOrder(TotalPriceMixin, Order):
|
||||
max_length=64,
|
||||
blank=True,
|
||||
verbose_name=_('Supplier Reference'),
|
||||
help_text=_("Supplier order reference code"),
|
||||
help_text=_('Supplier order reference code'),
|
||||
)
|
||||
|
||||
received_by = models.ForeignKey(
|
||||
@ -514,14 +514,14 @@ class PurchaseOrder(TotalPriceMixin, Order):
|
||||
quantity = int(quantity)
|
||||
if quantity <= 0:
|
||||
raise ValidationError({
|
||||
'quantity': _("Quantity must be greater than zero")
|
||||
'quantity': _('Quantity must be greater than zero')
|
||||
})
|
||||
except ValueError:
|
||||
raise ValidationError({'quantity': _("Invalid quantity provided")})
|
||||
raise ValidationError({'quantity': _('Invalid quantity provided')})
|
||||
|
||||
if supplier_part.supplier != self.supplier:
|
||||
raise ValidationError({
|
||||
'supplier': _("Part supplier must match PO supplier")
|
||||
'supplier': _('Part supplier must match PO supplier')
|
||||
})
|
||||
|
||||
if group:
|
||||
@ -715,11 +715,11 @@ class PurchaseOrder(TotalPriceMixin, Order):
|
||||
try:
|
||||
if quantity < 0:
|
||||
raise ValidationError({
|
||||
"quantity": _("Quantity must be a positive number")
|
||||
'quantity': _('Quantity must be a positive number')
|
||||
})
|
||||
quantity = InvenTree.helpers.clean_decimal(quantity)
|
||||
except TypeError:
|
||||
raise ValidationError({"quantity": _("Invalid quantity provided")})
|
||||
raise ValidationError({'quantity': _('Invalid quantity provided')})
|
||||
|
||||
# Create a new stock item
|
||||
if line.part and quantity > 0:
|
||||
@ -882,7 +882,7 @@ class SalesOrder(TotalPriceMixin, Order):
|
||||
limit_choices_to={'is_customer': True},
|
||||
related_name='return_orders',
|
||||
verbose_name=_('Customer'),
|
||||
help_text=_("Company to which the items are being sold"),
|
||||
help_text=_('Company to which the items are being sold'),
|
||||
)
|
||||
|
||||
@property
|
||||
@ -906,7 +906,7 @@ class SalesOrder(TotalPriceMixin, Order):
|
||||
max_length=64,
|
||||
blank=True,
|
||||
verbose_name=_('Customer Reference '),
|
||||
help_text=_("Customer order reference code"),
|
||||
help_text=_('Customer order reference code'),
|
||||
)
|
||||
|
||||
shipment_date = models.DateField(
|
||||
@ -979,12 +979,12 @@ class SalesOrder(TotalPriceMixin, Order):
|
||||
|
||||
elif self.pending_shipment_count > 0:
|
||||
raise ValidationError(
|
||||
_("Order cannot be completed as there are incomplete shipments")
|
||||
_('Order cannot be completed as there are incomplete shipments')
|
||||
)
|
||||
|
||||
elif not allow_incomplete_lines and self.pending_line_count > 0:
|
||||
raise ValidationError(
|
||||
_("Order cannot be completed as there are incomplete line items")
|
||||
_('Order cannot be completed as there are incomplete line items')
|
||||
)
|
||||
|
||||
except ValidationError as e:
|
||||
@ -1174,10 +1174,10 @@ class PurchaseOrderAttachment(InvenTreeAttachment):
|
||||
|
||||
def getSubdir(self):
|
||||
"""Return the directory path where PurchaseOrderAttachment files are located"""
|
||||
return os.path.join("po_files", str(self.order.id))
|
||||
return os.path.join('po_files', str(self.order.id))
|
||||
|
||||
order = models.ForeignKey(
|
||||
PurchaseOrder, on_delete=models.CASCADE, related_name="attachments"
|
||||
PurchaseOrder, on_delete=models.CASCADE, related_name='attachments'
|
||||
)
|
||||
|
||||
|
||||
@ -1191,7 +1191,7 @@ class SalesOrderAttachment(InvenTreeAttachment):
|
||||
|
||||
def getSubdir(self):
|
||||
"""Return the directory path where SalesOrderAttachment files are located"""
|
||||
return os.path.join("so_files", str(self.order.id))
|
||||
return os.path.join('so_files', str(self.order.id))
|
||||
|
||||
order = models.ForeignKey(
|
||||
SalesOrder, on_delete=models.CASCADE, related_name='attachments'
|
||||
@ -1342,7 +1342,7 @@ class PurchaseOrderLineItem(OrderLineItem):
|
||||
|
||||
def __str__(self):
|
||||
"""Render a string representation of a PurchaseOrderLineItem instance"""
|
||||
return "{n} x {part} from {supplier} (for {po})".format(
|
||||
return '{n} x {part} from {supplier} (for {po})'.format(
|
||||
n=decimal2string(self.quantity),
|
||||
part=self.part.SKU if self.part else 'unknown part',
|
||||
supplier=self.order.supplier.name if self.order.supplier else _('deleted'),
|
||||
@ -1373,7 +1373,7 @@ class PurchaseOrderLineItem(OrderLineItem):
|
||||
null=True,
|
||||
related_name='purchase_order_line_items',
|
||||
verbose_name=_('Part'),
|
||||
help_text=_("Supplier part"),
|
||||
help_text=_('Supplier part'),
|
||||
)
|
||||
|
||||
received = models.DecimalField(
|
||||
@ -1483,12 +1483,12 @@ class SalesOrderLineItem(OrderLineItem):
|
||||
if self.part:
|
||||
if self.part.virtual:
|
||||
raise ValidationError({
|
||||
'part': _("Virtual part cannot be assigned to a sales order")
|
||||
'part': _('Virtual part cannot be assigned to a sales order')
|
||||
})
|
||||
|
||||
if not self.part.salable:
|
||||
raise ValidationError({
|
||||
'part': _("Only salable parts can be assigned to a sales order")
|
||||
'part': _('Only salable parts can be assigned to a sales order')
|
||||
})
|
||||
|
||||
order = models.ForeignKey(
|
||||
@ -1668,10 +1668,10 @@ class SalesOrderShipment(InvenTreeNotesMixin, MetadataMixin, models.Model):
|
||||
try:
|
||||
if self.shipment_date:
|
||||
# Shipment has already been sent!
|
||||
raise ValidationError(_("Shipment has already been sent"))
|
||||
raise ValidationError(_('Shipment has already been sent'))
|
||||
|
||||
if self.allocations.count() == 0:
|
||||
raise ValidationError(_("Shipment has no allocated stock items"))
|
||||
raise ValidationError(_('Shipment has no allocated stock items'))
|
||||
|
||||
except ValidationError as e:
|
||||
if raise_error:
|
||||
@ -1807,7 +1807,7 @@ class SalesOrderAllocation(models.Model):
|
||||
# Ensure that we do not 'over allocate' a stock item
|
||||
build_allocation_count = self.item.build_allocation_count()
|
||||
sales_allocation_count = self.item.sales_order_allocation_count(
|
||||
exclude_allocations={"pk": self.pk}
|
||||
exclude_allocations={'pk': self.pk}
|
||||
)
|
||||
|
||||
total_allocation = (
|
||||
@ -1954,7 +1954,7 @@ class ReturnOrder(TotalPriceMixin, Order):
|
||||
limit_choices_to={'is_customer': True},
|
||||
related_name='sales_orders',
|
||||
verbose_name=_('Customer'),
|
||||
help_text=_("Company from which items are being returned"),
|
||||
help_text=_('Company from which items are being returned'),
|
||||
)
|
||||
|
||||
@property
|
||||
@ -1973,7 +1973,7 @@ class ReturnOrder(TotalPriceMixin, Order):
|
||||
max_length=64,
|
||||
blank=True,
|
||||
verbose_name=_('Customer Reference '),
|
||||
help_text=_("Customer order reference code"),
|
||||
help_text=_('Customer order reference code'),
|
||||
)
|
||||
|
||||
issue_date = models.DateField(
|
||||
@ -2078,7 +2078,7 @@ class ReturnOrder(TotalPriceMixin, Order):
|
||||
"""
|
||||
# Prevent an item from being "received" multiple times
|
||||
if line.received_date is not None:
|
||||
logger.warning("receive_line_item called with item already returned")
|
||||
logger.warning('receive_line_item called with item already returned')
|
||||
return
|
||||
|
||||
stock_item = line.item
|
||||
@ -2144,7 +2144,7 @@ class ReturnOrderLineItem(OrderLineItem):
|
||||
|
||||
if self.item and not self.item.serialized:
|
||||
raise ValidationError({
|
||||
'item': _("Only serialized items can be assigned to a Return Order")
|
||||
'item': _('Only serialized items can be assigned to a Return Order')
|
||||
})
|
||||
|
||||
order = models.ForeignKey(
|
||||
|
@ -261,7 +261,7 @@ class PurchaseOrderCancelSerializer(serializers.Serializer):
|
||||
order = self.context['order']
|
||||
|
||||
if not order.can_cancel:
|
||||
raise ValidationError(_("Order cannot be cancelled"))
|
||||
raise ValidationError(_('Order cannot be cancelled'))
|
||||
|
||||
order.cancel_order()
|
||||
|
||||
@ -286,7 +286,7 @@ class PurchaseOrderCompleteSerializer(serializers.Serializer):
|
||||
order = self.context['order']
|
||||
|
||||
if not value and not order.is_complete:
|
||||
raise ValidationError(_("Order has incomplete line items"))
|
||||
raise ValidationError(_('Order has incomplete line items'))
|
||||
|
||||
return value
|
||||
|
||||
@ -390,7 +390,7 @@ class PurchaseOrderLineItemSerializer(InvenTreeModelSerializer):
|
||||
def validate_quantity(self, quantity):
|
||||
"""Validation for the 'quantity' field"""
|
||||
if quantity <= 0:
|
||||
raise ValidationError(_("Quantity must be greater than zero"))
|
||||
raise ValidationError(_('Quantity must be greater than zero'))
|
||||
|
||||
return quantity
|
||||
|
||||
@ -517,7 +517,7 @@ class PurchaseOrderLineItemReceiveSerializer(serializers.Serializer):
|
||||
def validate_quantity(self, quantity):
|
||||
"""Validation for the 'quantity' field"""
|
||||
if quantity <= 0:
|
||||
raise ValidationError(_("Quantity must be greater than zero"))
|
||||
raise ValidationError(_('Quantity must be greater than zero'))
|
||||
|
||||
return quantity
|
||||
|
||||
@ -647,7 +647,7 @@ class PurchaseOrderReceiveSerializer(serializers.Serializer):
|
||||
|
||||
if not item['location']:
|
||||
raise ValidationError({
|
||||
'location': _("Destination location must be specified")
|
||||
'location': _('Destination location must be specified')
|
||||
})
|
||||
|
||||
# Ensure barcodes are unique
|
||||
@ -1075,7 +1075,7 @@ class SalesOrderShipmentCompleteSerializer(serializers.ModelSerializer):
|
||||
shipment = self.context.get('shipment', None)
|
||||
|
||||
if not shipment:
|
||||
raise ValidationError(_("No shipment details provided"))
|
||||
raise ValidationError(_('No shipment details provided'))
|
||||
|
||||
shipment.check_can_complete(raise_error=True)
|
||||
|
||||
@ -1135,7 +1135,7 @@ class SalesOrderShipmentAllocationItemSerializer(serializers.Serializer):
|
||||
|
||||
# Ensure that the line item points to the correct order
|
||||
if line_item.order != order:
|
||||
raise ValidationError(_("Line item is not associated with this order"))
|
||||
raise ValidationError(_('Line item is not associated with this order'))
|
||||
|
||||
return line_item
|
||||
|
||||
@ -1154,7 +1154,7 @@ class SalesOrderShipmentAllocationItemSerializer(serializers.Serializer):
|
||||
def validate_quantity(self, quantity):
|
||||
"""Custom validation for the 'quantity' field"""
|
||||
if quantity <= 0:
|
||||
raise ValidationError(_("Quantity must be positive"))
|
||||
raise ValidationError(_('Quantity must be positive'))
|
||||
|
||||
return quantity
|
||||
|
||||
@ -1171,13 +1171,13 @@ class SalesOrderShipmentAllocationItemSerializer(serializers.Serializer):
|
||||
|
||||
if stock_item.serialized and quantity != 1:
|
||||
raise ValidationError({
|
||||
'quantity': _("Quantity must be 1 for serialized stock item")
|
||||
'quantity': _('Quantity must be 1 for serialized stock item')
|
||||
})
|
||||
|
||||
q = normalize(stock_item.unallocated_quantity())
|
||||
|
||||
if quantity > q:
|
||||
raise ValidationError({'quantity': _(f"Available quantity ({q}) exceeded")})
|
||||
raise ValidationError({'quantity': _(f'Available quantity ({q}) exceeded')})
|
||||
|
||||
return data
|
||||
|
||||
@ -1197,7 +1197,7 @@ class SalesOrderCompleteSerializer(serializers.Serializer):
|
||||
order = self.context['order']
|
||||
|
||||
if not value and not order.is_completed():
|
||||
raise ValidationError(_("Order has incomplete line items"))
|
||||
raise ValidationError(_('Order has incomplete line items'))
|
||||
|
||||
return value
|
||||
|
||||
@ -1274,7 +1274,7 @@ class SalesOrderSerialAllocationSerializer(serializers.Serializer):
|
||||
|
||||
# Ensure that the line item points to the correct order
|
||||
if line_item.order != order:
|
||||
raise ValidationError(_("Line item is not associated with this order"))
|
||||
raise ValidationError(_('Line item is not associated with this order'))
|
||||
|
||||
return line_item
|
||||
|
||||
@ -1283,8 +1283,8 @@ class SalesOrderSerialAllocationSerializer(serializers.Serializer):
|
||||
)
|
||||
|
||||
serial_numbers = serializers.CharField(
|
||||
label=_("Serial Numbers"),
|
||||
help_text=_("Enter serial numbers to allocate"),
|
||||
label=_('Serial Numbers'),
|
||||
help_text=_('Enter serial numbers to allocate'),
|
||||
required=True,
|
||||
allow_blank=False,
|
||||
)
|
||||
@ -1306,10 +1306,10 @@ class SalesOrderSerialAllocationSerializer(serializers.Serializer):
|
||||
order = self.context['order']
|
||||
|
||||
if shipment.shipment_date is not None:
|
||||
raise ValidationError(_("Shipment has already been shipped"))
|
||||
raise ValidationError(_('Shipment has already been shipped'))
|
||||
|
||||
if shipment.order != order:
|
||||
raise ValidationError(_("Shipment is not associated with this order"))
|
||||
raise ValidationError(_('Shipment is not associated with this order'))
|
||||
|
||||
return shipment
|
||||
|
||||
@ -1356,16 +1356,16 @@ class SalesOrderSerialAllocationSerializer(serializers.Serializer):
|
||||
serials_allocated.append(str(serial))
|
||||
|
||||
if len(serials_not_exist) > 0:
|
||||
error_msg = _("No match found for the following serial numbers")
|
||||
error_msg += ": "
|
||||
error_msg += ",".join(serials_not_exist)
|
||||
error_msg = _('No match found for the following serial numbers')
|
||||
error_msg += ': '
|
||||
error_msg += ','.join(serials_not_exist)
|
||||
|
||||
raise ValidationError({'serial_numbers': error_msg})
|
||||
|
||||
if len(serials_allocated) > 0:
|
||||
error_msg = _("The following serial numbers are already allocated")
|
||||
error_msg += ": "
|
||||
error_msg += ",".join(serials_allocated)
|
||||
error_msg = _('The following serial numbers are already allocated')
|
||||
error_msg += ': '
|
||||
error_msg += ','.join(serials_allocated)
|
||||
|
||||
raise ValidationError({'serial_numbers': error_msg})
|
||||
|
||||
@ -1412,10 +1412,10 @@ class SalesOrderShipmentAllocationSerializer(serializers.Serializer):
|
||||
order = self.context['order']
|
||||
|
||||
if shipment.shipment_date is not None:
|
||||
raise ValidationError(_("Shipment has already been shipped"))
|
||||
raise ValidationError(_('Shipment has already been shipped'))
|
||||
|
||||
if shipment.order != order:
|
||||
raise ValidationError(_("Shipment is not associated with this order"))
|
||||
raise ValidationError(_('Shipment is not associated with this order'))
|
||||
|
||||
return shipment
|
||||
|
||||
@ -1596,10 +1596,10 @@ class ReturnOrderLineItemReceiveSerializer(serializers.Serializer):
|
||||
def validate_line_item(self, item):
|
||||
"""Validation for a single line item"""
|
||||
if item.order != self.context['order']:
|
||||
raise ValidationError(_("Line item does not match return order"))
|
||||
raise ValidationError(_('Line item does not match return order'))
|
||||
|
||||
if item.received:
|
||||
raise ValidationError(_("Line item has already been received"))
|
||||
raise ValidationError(_('Line item has already been received'))
|
||||
|
||||
return item
|
||||
|
||||
@ -1628,7 +1628,7 @@ class ReturnOrderReceiveSerializer(serializers.Serializer):
|
||||
order = self.context['order']
|
||||
if order.status != ReturnOrderStatus.IN_PROGRESS:
|
||||
raise ValidationError(
|
||||
_("Items can only be received against orders which are in progress")
|
||||
_('Items can only be received against orders which are in progress')
|
||||
)
|
||||
|
||||
data = super().validate(data)
|
||||
@ -1636,7 +1636,7 @@ class ReturnOrderReceiveSerializer(serializers.Serializer):
|
||||
items = data.get('items', [])
|
||||
|
||||
if len(items) == 0:
|
||||
raise ValidationError(_("Line items must be provided"))
|
||||
raise ValidationError(_('Line items must be provided'))
|
||||
|
||||
return data
|
||||
|
||||
|
@ -76,7 +76,7 @@ def notify_overdue_sales_order(so: order.models.SalesOrder):
|
||||
context = {
|
||||
'order': so,
|
||||
'name': name,
|
||||
'message': _(f"Sales order {so} is now overdue"),
|
||||
'message': _(f'Sales order {so} is now overdue'),
|
||||
'link': InvenTree.helpers_model.construct_absolute_url(so.get_absolute_url()),
|
||||
'template': {'html': 'email/overdue_sales_order.html', 'subject': name},
|
||||
}
|
||||
|
@ -237,7 +237,7 @@ class PurchaseOrderTest(OrderTest):
|
||||
self.assignRole('purchase_order.add')
|
||||
|
||||
url = reverse('api-po-list')
|
||||
huge_number = "PO-92233720368547758089999999999999999"
|
||||
huge_number = 'PO-92233720368547758089999999999999999'
|
||||
|
||||
response = self.post(
|
||||
url,
|
||||
@ -333,7 +333,7 @@ class PurchaseOrderTest(OrderTest):
|
||||
response = self.delete(url, expected_code=403)
|
||||
|
||||
# Now, add the "delete" permission!
|
||||
self.assignRole("purchase_order.delete")
|
||||
self.assignRole('purchase_order.delete')
|
||||
|
||||
response = self.delete(url, expected_code=204)
|
||||
|
||||
@ -589,7 +589,7 @@ class PurchaseOrderTest(OrderTest):
|
||||
|
||||
resp_dict = response.json()
|
||||
self.assertEqual(
|
||||
resp_dict['detail'], "Authentication credentials were not provided."
|
||||
resp_dict['detail'], 'Authentication credentials were not provided.'
|
||||
)
|
||||
|
||||
def test_po_calendar_auth(self):
|
||||
@ -731,7 +731,7 @@ class PurchaseOrderReceiveTest(OrderTest):
|
||||
def test_no_items(self):
|
||||
"""Test with an empty list of items."""
|
||||
data = self.post(
|
||||
self.url, {"items": [], "location": None}, expected_code=400
|
||||
self.url, {'items': [], 'location': None}, expected_code=400
|
||||
).data
|
||||
|
||||
self.assertIn('Line items must be provided', str(data))
|
||||
@ -743,14 +743,14 @@ class PurchaseOrderReceiveTest(OrderTest):
|
||||
"""Test than errors are returned as expected for invalid data."""
|
||||
data = self.post(
|
||||
self.url,
|
||||
{"items": [{"line_item": 12345, "location": 12345}]},
|
||||
{'items': [{'line_item': 12345, 'location': 12345}]},
|
||||
expected_code=400,
|
||||
).data
|
||||
|
||||
items = data['items'][0]
|
||||
|
||||
self.assertIn('Invalid pk "12345"', str(items['line_item']))
|
||||
self.assertIn("object does not exist", str(items['location']))
|
||||
self.assertIn('object does not exist', str(items['location']))
|
||||
|
||||
# No new stock items have been created
|
||||
self.assertEqual(self.n, StockItem.objects.count())
|
||||
@ -760,8 +760,8 @@ class PurchaseOrderReceiveTest(OrderTest):
|
||||
data = self.post(
|
||||
self.url,
|
||||
{
|
||||
"items": [
|
||||
{"line_item": 22, "location": 1, "status": 99999, "quantity": 5}
|
||||
'items': [
|
||||
{'line_item': 22, 'location': 1, 'status': 99999, 'quantity': 5}
|
||||
]
|
||||
},
|
||||
expected_code=400,
|
||||
@ -1330,7 +1330,7 @@ class SalesOrderTest(OrderTest):
|
||||
{'export': fmt},
|
||||
decode=True if fmt == 'csv' else False,
|
||||
expected_code=200,
|
||||
expected_fn=f"InvenTree_SalesOrders.{fmt}",
|
||||
expected_fn=f'InvenTree_SalesOrders.{fmt}',
|
||||
)
|
||||
|
||||
|
||||
@ -1357,7 +1357,7 @@ class SalesOrderLineItemTest(OrderTest):
|
||||
order=so,
|
||||
part=part,
|
||||
quantity=(idx + 1) * 5,
|
||||
reference=f"Order {so.reference} - line {idx}",
|
||||
reference=f'Order {so.reference} - line {idx}',
|
||||
)
|
||||
)
|
||||
|
||||
@ -1376,7 +1376,7 @@ class SalesOrderLineItemTest(OrderTest):
|
||||
self.assertEqual(len(response.data), n)
|
||||
|
||||
# List *all* lines, but paginate
|
||||
response = self.get(self.url, {"limit": 5}, expected_code=200)
|
||||
response = self.get(self.url, {'limit': 5}, expected_code=200)
|
||||
|
||||
self.assertEqual(response.data['count'], n)
|
||||
self.assertEqual(len(response.data['results']), 5)
|
||||
@ -1530,9 +1530,9 @@ class SalesOrderAllocateTest(OrderTest):
|
||||
data = {
|
||||
'items': [
|
||||
{
|
||||
"line_item": line.pk,
|
||||
"stock_item": part.stock_items.last().pk,
|
||||
"quantity": 0,
|
||||
'line_item': line.pk,
|
||||
'stock_item': part.stock_items.last().pk,
|
||||
'quantity': 0,
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -1576,16 +1576,16 @@ class SalesOrderAllocateTest(OrderTest):
|
||||
# First, check that there are no line items allocated against this SalesOrder
|
||||
self.assertEqual(self.order.stock_allocations.count(), 0)
|
||||
|
||||
data = {"items": [], "shipment": self.shipment.pk}
|
||||
data = {'items': [], 'shipment': self.shipment.pk}
|
||||
|
||||
for line in self.order.lines.all():
|
||||
stock_item = line.part.stock_items.last()
|
||||
|
||||
# Fully-allocate each line
|
||||
data['items'].append({
|
||||
"line_item": line.pk,
|
||||
"stock_item": stock_item.pk,
|
||||
"quantity": 5,
|
||||
'line_item': line.pk,
|
||||
'stock_item': stock_item.pk,
|
||||
'quantity': 5,
|
||||
})
|
||||
|
||||
self.post(self.url, data, expected_code=201)
|
||||
@ -1603,7 +1603,7 @@ class SalesOrderAllocateTest(OrderTest):
|
||||
# First, check that there are no line items allocated against this SalesOrder
|
||||
self.assertEqual(self.order.stock_allocations.count(), 0)
|
||||
|
||||
data = {"items": [], "shipment": self.shipment.pk}
|
||||
data = {'items': [], 'shipment': self.shipment.pk}
|
||||
|
||||
def check_template(line_item):
|
||||
return line_item.part.is_template
|
||||
@ -1619,9 +1619,9 @@ class SalesOrderAllocateTest(OrderTest):
|
||||
|
||||
# Fully-allocate each line
|
||||
data['items'].append({
|
||||
"line_item": line.pk,
|
||||
"stock_item": stock_item.pk,
|
||||
"quantity": 5,
|
||||
'line_item': line.pk,
|
||||
'stock_item': stock_item.pk,
|
||||
'quantity': 5,
|
||||
})
|
||||
|
||||
self.post(self.url, data, expected_code=201)
|
||||
@ -1719,8 +1719,8 @@ class SalesOrderAllocateTest(OrderTest):
|
||||
url,
|
||||
{
|
||||
'order': order.pk,
|
||||
'reference': f"SH{idx + 1}",
|
||||
'tracking_number': f"TRK_{order.pk}_{idx}",
|
||||
'reference': f'SH{idx + 1}',
|
||||
'tracking_number': f'TRK_{order.pk}_{idx}',
|
||||
},
|
||||
expected_code=201,
|
||||
)
|
||||
@ -1932,7 +1932,7 @@ class ReturnOrderTests(InvenTreeAPITestCase):
|
||||
# Issue the order (via the API)
|
||||
self.assertIsNone(rma.issue_date)
|
||||
self.post(
|
||||
reverse("api-return-order-issue", kwargs={"pk": rma.pk}), expected_code=201
|
||||
reverse('api-return-order-issue', kwargs={'pk': rma.pk}), expected_code=201
|
||||
)
|
||||
|
||||
rma.refresh_from_db()
|
||||
|
@ -30,8 +30,8 @@ class TestRefIntMigrations(MigratorTestCase):
|
||||
for ii in range(10):
|
||||
order = PurchaseOrder.objects.create(
|
||||
supplier=supplier,
|
||||
reference=f"{ii}-abcde",
|
||||
description="Just a test order",
|
||||
reference=f'{ii}-abcde',
|
||||
description='Just a test order',
|
||||
)
|
||||
|
||||
# Initially, the 'reference_int' field is unavailable
|
||||
@ -40,8 +40,8 @@ class TestRefIntMigrations(MigratorTestCase):
|
||||
|
||||
sales_order = SalesOrder.objects.create(
|
||||
customer=supplier,
|
||||
reference=f"{ii}-xyz",
|
||||
description="A test sales order",
|
||||
reference=f'{ii}-xyz',
|
||||
description='A test sales order',
|
||||
)
|
||||
|
||||
# Initially, the 'reference_int' field is unavailable
|
||||
@ -67,8 +67,8 @@ class TestRefIntMigrations(MigratorTestCase):
|
||||
SalesOrder = self.new_state.apps.get_model('order', 'salesorder')
|
||||
|
||||
for ii in range(10):
|
||||
po = PurchaseOrder.objects.get(reference=f"{ii}-abcde")
|
||||
so = SalesOrder.objects.get(reference=f"{ii}-xyz")
|
||||
po = PurchaseOrder.objects.get(reference=f'{ii}-abcde')
|
||||
so = SalesOrder.objects.get(reference=f'{ii}-xyz')
|
||||
|
||||
# The integer reference field must have been correctly updated
|
||||
self.assertEqual(po.reference_int, ii)
|
||||
@ -166,8 +166,8 @@ class TestAdditionalLineMigration(MigratorTestCase):
|
||||
for ii in range(10):
|
||||
order = PurchaseOrder.objects.create(
|
||||
supplier=supplier,
|
||||
reference=f"{ii}-abcde",
|
||||
description="Just a test order",
|
||||
reference=f'{ii}-abcde',
|
||||
description='Just a test order',
|
||||
)
|
||||
order.lines.create(part=supplierpart, quantity=12, received=1)
|
||||
order.lines.create(quantity=12, received=1)
|
||||
@ -188,7 +188,7 @@ class TestAdditionalLineMigration(MigratorTestCase):
|
||||
"""Test that the the PO lines where converted correctly."""
|
||||
PurchaseOrder = self.new_state.apps.get_model('order', 'purchaseorder')
|
||||
for ii in range(10):
|
||||
po = PurchaseOrder.objects.get(reference=f"{ii}-abcde")
|
||||
po = PurchaseOrder.objects.get(reference=f'{ii}-abcde')
|
||||
self.assertEqual(po.extra_lines.count(), 1)
|
||||
self.assertEqual(po.lines.count(), 1)
|
||||
|
||||
|
@ -33,7 +33,7 @@ class SalesOrderTest(TestCase):
|
||||
"""Initial setup for this set of unit tests"""
|
||||
# Create a Company to ship the goods to
|
||||
cls.customer = Company.objects.create(
|
||||
name="ABC Co", description="My customer", is_customer=True
|
||||
name='ABC Co', description='My customer', is_customer=True
|
||||
)
|
||||
|
||||
# Create a Part to ship
|
||||
@ -72,7 +72,7 @@ class SalesOrderTest(TestCase):
|
||||
|
||||
# Create an extra line
|
||||
cls.extraline = SalesOrderExtraLine.objects.create(
|
||||
quantity=1, order=cls.order, reference="Extra line"
|
||||
quantity=1, order=cls.order, reference='Extra line'
|
||||
)
|
||||
|
||||
def test_so_reference(self):
|
||||
|
@ -46,7 +46,7 @@ class OrderTest(TestCase):
|
||||
self.assertEqual(order.reference, f'PO-{pk:04d}')
|
||||
|
||||
line = PurchaseOrderLineItem.objects.get(pk=1)
|
||||
self.assertEqual(str(line), "100 x ACME0001 from ACME (for PO-0001 - ACME)")
|
||||
self.assertEqual(str(line), '100 x ACME0001 from ACME (for PO-0001 - ACME)')
|
||||
|
||||
def test_rebuild_reference(self):
|
||||
"""Test that the reference_int field is correctly updated when the model is saved"""
|
||||
@ -236,7 +236,7 @@ class OrderTest(TestCase):
|
||||
|
||||
# Create a new PurchaseOrder
|
||||
po = PurchaseOrder.objects.create(
|
||||
supplier=sup, reference=f"PO-{n + 1}", description='Some PO'
|
||||
supplier=sup, reference=f'PO-{n + 1}', description='Some PO'
|
||||
)
|
||||
|
||||
# Add line items
|
||||
|
@ -32,7 +32,7 @@ from .models import (
|
||||
SalesOrderLineItem,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class PurchaseOrderIndex(InvenTreeRoleMixin, ListView):
|
||||
@ -115,9 +115,9 @@ class PurchaseOrderUpload(FileManagementFormView):
|
||||
'order/order_wizard/match_parts.html',
|
||||
]
|
||||
form_steps_description = [
|
||||
_("Upload File"),
|
||||
_("Match Fields"),
|
||||
_("Match Supplier Parts"),
|
||||
_('Upload File'),
|
||||
_('Match Fields'),
|
||||
_('Match Supplier Parts'),
|
||||
]
|
||||
form_field_map = {
|
||||
'item_select': 'part',
|
||||
@ -294,7 +294,7 @@ class SalesOrderExport(AjaxView):
|
||||
|
||||
export_format = request.GET.get('format', 'csv')
|
||||
|
||||
filename = f"{str(order)} - {order.customer.name}.{export_format}"
|
||||
filename = f'{str(order)} - {order.customer.name}.{export_format}'
|
||||
|
||||
dataset = SalesOrderLineItemResource().export(queryset=order.lines.all())
|
||||
|
||||
|
@ -114,7 +114,7 @@ class CategoryList(CategoryMixin, APIDownloadMixin, ListCreateAPI):
|
||||
"""Download the filtered queryset as a data file"""
|
||||
dataset = PartCategoryResource().export(queryset=queryset)
|
||||
filedata = dataset.export(export_format)
|
||||
filename = f"InvenTree_Categories.{export_format}"
|
||||
filename = f'InvenTree_Categories.{export_format}'
|
||||
|
||||
return DownloadFile(filedata, filename)
|
||||
|
||||
@ -682,23 +682,23 @@ class PartRequirements(RetrieveAPI):
|
||||
part = self.get_object()
|
||||
|
||||
data = {
|
||||
"available_stock": part.available_stock,
|
||||
"on_order": part.on_order,
|
||||
"required_build_order_quantity": part.required_build_order_quantity(),
|
||||
"allocated_build_order_quantity": part.build_order_allocation_count(),
|
||||
"required_sales_order_quantity": part.required_sales_order_quantity(),
|
||||
"allocated_sales_order_quantity": part.sales_order_allocation_count(
|
||||
'available_stock': part.available_stock,
|
||||
'on_order': part.on_order,
|
||||
'required_build_order_quantity': part.required_build_order_quantity(),
|
||||
'allocated_build_order_quantity': part.build_order_allocation_count(),
|
||||
'required_sales_order_quantity': part.required_sales_order_quantity(),
|
||||
'allocated_sales_order_quantity': part.sales_order_allocation_count(
|
||||
pending=True
|
||||
),
|
||||
}
|
||||
|
||||
data["allocated"] = (
|
||||
data["allocated_build_order_quantity"]
|
||||
+ data["allocated_sales_order_quantity"]
|
||||
data['allocated'] = (
|
||||
data['allocated_build_order_quantity']
|
||||
+ data['allocated_sales_order_quantity']
|
||||
)
|
||||
data["required"] = (
|
||||
data["required_build_order_quantity"]
|
||||
+ data["required_sales_order_quantity"]
|
||||
data['required'] = (
|
||||
data['required_build_order_quantity']
|
||||
+ data['required_sales_order_quantity']
|
||||
)
|
||||
|
||||
return Response(data)
|
||||
@ -850,7 +850,7 @@ class PartFilter(rest_filters.FilterSet):
|
||||
IPN = rest_filters.CharFilter(
|
||||
label='Filter by exact IPN (internal part number)',
|
||||
field_name='IPN',
|
||||
lookup_expr="iexact",
|
||||
lookup_expr='iexact',
|
||||
)
|
||||
|
||||
# Regex match for IPN
|
||||
@ -895,7 +895,7 @@ class PartFilter(rest_filters.FilterSet):
|
||||
return queryset.filter(Q(unallocated_stock__lte=0))
|
||||
|
||||
convert_from = rest_filters.ModelChoiceFilter(
|
||||
label="Can convert from",
|
||||
label='Can convert from',
|
||||
queryset=Part.objects.all(),
|
||||
method='filter_convert_from',
|
||||
)
|
||||
@ -909,7 +909,7 @@ class PartFilter(rest_filters.FilterSet):
|
||||
return queryset
|
||||
|
||||
exclude_tree = rest_filters.ModelChoiceFilter(
|
||||
label="Exclude Part tree",
|
||||
label='Exclude Part tree',
|
||||
queryset=Part.objects.all(),
|
||||
method='filter_exclude_tree',
|
||||
)
|
||||
@ -947,7 +947,7 @@ class PartFilter(rest_filters.FilterSet):
|
||||
return queryset.filter(id__in=[p.pk for p in bom_parts])
|
||||
|
||||
has_pricing = rest_filters.BooleanFilter(
|
||||
label="Has Pricing", method="filter_has_pricing"
|
||||
label='Has Pricing', method='filter_has_pricing'
|
||||
)
|
||||
|
||||
def filter_has_pricing(self, queryset, name, value):
|
||||
@ -961,7 +961,7 @@ class PartFilter(rest_filters.FilterSet):
|
||||
return queryset.filter(q_a | q_b).distinct()
|
||||
|
||||
stocktake = rest_filters.BooleanFilter(
|
||||
label="Has stocktake", method='filter_has_stocktake'
|
||||
label='Has stocktake', method='filter_has_stocktake'
|
||||
)
|
||||
|
||||
def filter_has_stocktake(self, queryset, name, value):
|
||||
@ -997,7 +997,7 @@ class PartFilter(rest_filters.FilterSet):
|
||||
return queryset.exclude(Q(in_stock=0) & ~Q(stock_item_count=0))
|
||||
|
||||
default_location = rest_filters.ModelChoiceFilter(
|
||||
label="Default Location", queryset=StockLocation.objects.all()
|
||||
label='Default Location', queryset=StockLocation.objects.all()
|
||||
)
|
||||
|
||||
is_template = rest_filters.BooleanFilter()
|
||||
@ -1095,7 +1095,7 @@ class PartList(PartMixin, APIDownloadMixin, ListCreateAPI):
|
||||
dataset = PartResource().export(queryset=queryset)
|
||||
|
||||
filedata = dataset.export(export_format)
|
||||
filename = f"InvenTree_Parts.{export_format}"
|
||||
filename = f'InvenTree_Parts.{export_format}'
|
||||
|
||||
return DownloadFile(filedata, filename)
|
||||
|
||||
@ -1668,7 +1668,7 @@ class BomFilter(rest_filters.FilterSet):
|
||||
)
|
||||
|
||||
available_stock = rest_filters.BooleanFilter(
|
||||
label="Has available stock", method="filter_available_stock"
|
||||
label='Has available stock', method='filter_available_stock'
|
||||
)
|
||||
|
||||
def filter_available_stock(self, queryset, name, value):
|
||||
@ -1677,7 +1677,7 @@ class BomFilter(rest_filters.FilterSet):
|
||||
return queryset.filter(available_stock__gt=0)
|
||||
return queryset.filter(available_stock=0)
|
||||
|
||||
on_order = rest_filters.BooleanFilter(label="On order", method="filter_on_order")
|
||||
on_order = rest_filters.BooleanFilter(label='On order', method='filter_on_order')
|
||||
|
||||
def filter_on_order(self, queryset, name, value):
|
||||
"""Filter the queryset based on whether each line item has any stock on order"""
|
||||
@ -1686,7 +1686,7 @@ class BomFilter(rest_filters.FilterSet):
|
||||
return queryset.filter(on_order=0)
|
||||
|
||||
has_pricing = rest_filters.BooleanFilter(
|
||||
label="Has Pricing", method="filter_has_pricing"
|
||||
label='Has Pricing', method='filter_has_pricing'
|
||||
)
|
||||
|
||||
def filter_has_pricing(self, queryset, name, value):
|
||||
|
@ -12,7 +12,7 @@ from InvenTree.ready import (
|
||||
isPluginRegistryLoaded,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class PartConfig(AppConfig):
|
||||
@ -67,11 +67,11 @@ class PartConfig(AppConfig):
|
||||
if items.count() > 0:
|
||||
# Find any pricing objects which have the 'scheduled_for_update' flag set
|
||||
logger.info(
|
||||
"Resetting update flags for %s pricing objects...", items.count()
|
||||
'Resetting update flags for %s pricing objects...', items.count()
|
||||
)
|
||||
|
||||
for pricing in items:
|
||||
pricing.scheduled_for_update = False
|
||||
pricing.save()
|
||||
except Exception:
|
||||
logger.exception("Failed to reset pricing flags - database not ready")
|
||||
logger.exception('Failed to reset pricing flags - database not ready')
|
||||
|
@ -269,14 +269,14 @@ def ExportBom(
|
||||
|
||||
# Generate column names for this supplier
|
||||
k_sup = (
|
||||
str(_("Supplier"))
|
||||
+ "_"
|
||||
str(_('Supplier'))
|
||||
+ '_'
|
||||
+ str(mp_idx)
|
||||
+ "_"
|
||||
+ '_'
|
||||
+ str(sp_idx)
|
||||
)
|
||||
k_sku = (
|
||||
str(_("SKU")) + "_" + str(mp_idx) + "_" + str(sp_idx)
|
||||
str(_('SKU')) + '_' + str(mp_idx) + '_' + str(sp_idx)
|
||||
)
|
||||
|
||||
try:
|
||||
@ -307,8 +307,8 @@ def ExportBom(
|
||||
supplier_sku = sp_part.SKU
|
||||
|
||||
# Generate column names for this supplier
|
||||
k_sup = str(_("Supplier")) + "_" + str(sp_idx)
|
||||
k_sku = str(_("SKU")) + "_" + str(sp_idx)
|
||||
k_sup = str(_('Supplier')) + '_' + str(sp_idx)
|
||||
k_sku = str(_('SKU')) + '_' + str(sp_idx)
|
||||
|
||||
try:
|
||||
manufacturer_cols[k_sup].update({bom_idx: supplier_name})
|
||||
@ -322,6 +322,6 @@ def ExportBom(
|
||||
|
||||
data = dataset.export(fmt)
|
||||
|
||||
filename = f"{part.full_name}_BOM.{fmt}"
|
||||
filename = f'{part.full_name}_BOM.{fmt}'
|
||||
|
||||
return DownloadFile(data, filename)
|
||||
|
@ -69,7 +69,7 @@ def render_part_full_name(part) -> str:
|
||||
return template.render(part=part)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"exception while trying to create full name for part %s: %s",
|
||||
'exception while trying to create full name for part %s: %s',
|
||||
part.name,
|
||||
e,
|
||||
)
|
||||
@ -80,7 +80,7 @@ def render_part_full_name(part) -> str:
|
||||
|
||||
|
||||
# Subdirectory for storing part images
|
||||
PART_IMAGE_DIR = "part_images"
|
||||
PART_IMAGE_DIR = 'part_images'
|
||||
|
||||
|
||||
def get_part_image_directory() -> str:
|
||||
|
@ -67,7 +67,7 @@ from InvenTree.status_codes import (
|
||||
from order import models as OrderModels
|
||||
from stock import models as StockModels
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class PartCategory(MetadataMixin, InvenTreeTree):
|
||||
@ -85,8 +85,8 @@ class PartCategory(MetadataMixin, InvenTreeTree):
|
||||
class Meta:
|
||||
"""Metaclass defines extra model properties"""
|
||||
|
||||
verbose_name = _("Part Category")
|
||||
verbose_name_plural = _("Part Categories")
|
||||
verbose_name = _('Part Category')
|
||||
verbose_name_plural = _('Part Categories')
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
"""Custom model deletion routine, which updates any child categories or parts.
|
||||
@ -101,7 +101,7 @@ class PartCategory(MetadataMixin, InvenTreeTree):
|
||||
|
||||
default_location = TreeForeignKey(
|
||||
'stock.StockLocation',
|
||||
related_name="default_categories",
|
||||
related_name='default_categories',
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
@ -129,8 +129,8 @@ class PartCategory(MetadataMixin, InvenTreeTree):
|
||||
icon = models.CharField(
|
||||
blank=True,
|
||||
max_length=100,
|
||||
verbose_name=_("Icon"),
|
||||
help_text=_("Icon (optional)"),
|
||||
verbose_name=_('Icon'),
|
||||
help_text=_('Icon (optional)'),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@ -150,8 +150,8 @@ class PartCategory(MetadataMixin, InvenTreeTree):
|
||||
if self.pk and self.structural and self.partcount(False, False) > 0:
|
||||
raise ValidationError(
|
||||
_(
|
||||
"You cannot make this part category structural because some parts "
|
||||
"are already assigned to it!"
|
||||
'You cannot make this part category structural because some parts '
|
||||
'are already assigned to it!'
|
||||
)
|
||||
)
|
||||
super().clean()
|
||||
@ -387,8 +387,8 @@ class Part(InvenTreeBarcodeMixin, InvenTreeNotesMixin, MetadataMixin, MPTTModel)
|
||||
class Meta:
|
||||
"""Metaclass defines extra model properties"""
|
||||
|
||||
verbose_name = _("Part")
|
||||
verbose_name_plural = _("Parts")
|
||||
verbose_name = _('Part')
|
||||
verbose_name_plural = _('Parts')
|
||||
ordering = ['name']
|
||||
constraints = [
|
||||
UniqueConstraint(fields=['name', 'IPN', 'revision'], name='unique_part')
|
||||
@ -482,7 +482,7 @@ class Part(InvenTreeBarcodeMixin, InvenTreeNotesMixin, MetadataMixin, MPTTModel)
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string representation of the Part (for use in the admin interface)"""
|
||||
return f"{self.full_name} - {self.description}"
|
||||
return f'{self.full_name} - {self.description}'
|
||||
|
||||
def get_parts_in_bom(self, **kwargs):
|
||||
"""Return a list of all parts in the BOM for this part.
|
||||
@ -686,8 +686,8 @@ class Part(InvenTreeBarcodeMixin, InvenTreeNotesMixin, MetadataMixin, MPTTModel)
|
||||
if stock.exists():
|
||||
if raise_error:
|
||||
raise ValidationError(
|
||||
_("Stock item with this serial number already exists")
|
||||
+ ": "
|
||||
_('Stock item with this serial number already exists')
|
||||
+ ': '
|
||||
+ serial
|
||||
)
|
||||
else:
|
||||
@ -800,7 +800,7 @@ class Part(InvenTreeBarcodeMixin, InvenTreeNotesMixin, MetadataMixin, MPTTModel)
|
||||
.exists()
|
||||
):
|
||||
raise ValidationError(
|
||||
_("Part with this Name, IPN and Revision already exists.")
|
||||
_('Part with this Name, IPN and Revision already exists.')
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
@ -815,7 +815,7 @@ class Part(InvenTreeBarcodeMixin, InvenTreeNotesMixin, MetadataMixin, MPTTModel)
|
||||
"""
|
||||
if self.category is not None and self.category.structural:
|
||||
raise ValidationError({
|
||||
'category': _("Parts cannot be assigned to structural part categories!")
|
||||
'category': _('Parts cannot be assigned to structural part categories!')
|
||||
})
|
||||
|
||||
super().clean()
|
||||
@ -989,7 +989,7 @@ class Part(InvenTreeBarcodeMixin, InvenTreeNotesMixin, MetadataMixin, MPTTModel)
|
||||
|
||||
units = models.CharField(
|
||||
max_length=20,
|
||||
default="",
|
||||
default='',
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_('Units'),
|
||||
@ -1024,7 +1024,7 @@ class Part(InvenTreeBarcodeMixin, InvenTreeNotesMixin, MetadataMixin, MPTTModel)
|
||||
salable = models.BooleanField(
|
||||
default=part_settings.part_salable_default,
|
||||
verbose_name=_('Salable'),
|
||||
help_text=_("Can this part be sold to customers?"),
|
||||
help_text=_('Can this part be sold to customers?'),
|
||||
)
|
||||
|
||||
active = models.BooleanField(
|
||||
@ -1823,7 +1823,7 @@ class Part(InvenTreeBarcodeMixin, InvenTreeNotesMixin, MetadataMixin, MPTTModel)
|
||||
min_price = normalize(min_price)
|
||||
max_price = normalize(max_price)
|
||||
|
||||
return f"{min_price} - {max_price}"
|
||||
return f'{min_price} - {max_price}'
|
||||
|
||||
def get_supplier_price_range(self, quantity=1):
|
||||
"""Return the supplier price range of this part:
|
||||
@ -1872,7 +1872,7 @@ class Part(InvenTreeBarcodeMixin, InvenTreeNotesMixin, MetadataMixin, MPTTModel)
|
||||
|
||||
for item in self.get_bom_items().select_related('sub_part'):
|
||||
if item.sub_part.pk == self.pk:
|
||||
logger.warning("WARNING: BomItem ID %s contains itself in BOM", item.pk)
|
||||
logger.warning('WARNING: BomItem ID %s contains itself in BOM', item.pk)
|
||||
continue
|
||||
|
||||
q = decimal.Decimal(quantity)
|
||||
@ -2402,7 +2402,7 @@ class PartPricing(common.models.MetaMixin):
|
||||
result = convert_money(money, target_currency)
|
||||
except MissingRate:
|
||||
logger.warning(
|
||||
"No currency conversion rate available for %s -> %s",
|
||||
'No currency conversion rate available for %s -> %s',
|
||||
money.currency,
|
||||
target_currency,
|
||||
)
|
||||
@ -2432,7 +2432,7 @@ class PartPricing(common.models.MetaMixin):
|
||||
or not Part.objects.filter(pk=self.part.pk).exists()
|
||||
):
|
||||
logger.warning(
|
||||
"Referenced part instance does not exist - skipping pricing update."
|
||||
'Referenced part instance does not exist - skipping pricing update.'
|
||||
)
|
||||
return
|
||||
|
||||
@ -2458,13 +2458,13 @@ class PartPricing(common.models.MetaMixin):
|
||||
|
||||
if self.scheduled_for_update:
|
||||
# Ignore if the pricing is already scheduled to be updated
|
||||
logger.debug("Pricing for %s already scheduled for update - skipping", p)
|
||||
logger.debug('Pricing for %s already scheduled for update - skipping', p)
|
||||
return
|
||||
|
||||
if counter > 25:
|
||||
# Prevent infinite recursion / stack depth issues
|
||||
logger.debug(
|
||||
counter, f"Skipping pricing update for {p} - maximum depth exceeded"
|
||||
counter, f'Skipping pricing update for {p} - maximum depth exceeded'
|
||||
)
|
||||
return
|
||||
|
||||
@ -3260,7 +3260,7 @@ class PartAttachment(InvenTreeAttachment):
|
||||
|
||||
def getSubdir(self):
|
||||
"""Returns the media subdirectory where part attachments are stored"""
|
||||
return os.path.join("part_files", str(self.part.id))
|
||||
return os.path.join('part_files', str(self.part.id))
|
||||
|
||||
part = models.ForeignKey(
|
||||
Part,
|
||||
@ -3423,7 +3423,7 @@ class PartTestTemplate(MetadataMixin, models.Model):
|
||||
for test in tests:
|
||||
if test.key == key:
|
||||
raise ValidationError({
|
||||
'test_name': _("Test with this name already exists for this part")
|
||||
'test_name': _('Test with this name already exists for this part')
|
||||
})
|
||||
|
||||
super().validate_unique(exclude)
|
||||
@ -3444,35 +3444,35 @@ class PartTestTemplate(MetadataMixin, models.Model):
|
||||
test_name = models.CharField(
|
||||
blank=False,
|
||||
max_length=100,
|
||||
verbose_name=_("Test Name"),
|
||||
help_text=_("Enter a name for the test"),
|
||||
verbose_name=_('Test Name'),
|
||||
help_text=_('Enter a name for the test'),
|
||||
)
|
||||
|
||||
description = models.CharField(
|
||||
blank=False,
|
||||
null=True,
|
||||
max_length=100,
|
||||
verbose_name=_("Test Description"),
|
||||
help_text=_("Enter description for this test"),
|
||||
verbose_name=_('Test Description'),
|
||||
help_text=_('Enter description for this test'),
|
||||
)
|
||||
|
||||
required = models.BooleanField(
|
||||
default=True,
|
||||
verbose_name=_("Required"),
|
||||
help_text=_("Is this test required to pass?"),
|
||||
verbose_name=_('Required'),
|
||||
help_text=_('Is this test required to pass?'),
|
||||
)
|
||||
|
||||
requires_value = models.BooleanField(
|
||||
default=False,
|
||||
verbose_name=_("Requires Value"),
|
||||
help_text=_("Does this test require a value when adding a test result?"),
|
||||
verbose_name=_('Requires Value'),
|
||||
help_text=_('Does this test require a value when adding a test result?'),
|
||||
)
|
||||
|
||||
requires_attachment = models.BooleanField(
|
||||
default=False,
|
||||
verbose_name=_("Requires Attachment"),
|
||||
verbose_name=_('Requires Attachment'),
|
||||
help_text=_(
|
||||
"Does this test require a file attachment when adding a test result?"
|
||||
'Does this test require a file attachment when adding a test result?'
|
||||
),
|
||||
)
|
||||
|
||||
@ -3503,7 +3503,7 @@ class PartParameterTemplate(MetadataMixin, models.Model):
|
||||
"""Return a string representation of a PartParameterTemplate instance"""
|
||||
s = str(self.name)
|
||||
if self.units:
|
||||
s += f" ({self.units})"
|
||||
s += f' ({self.units})'
|
||||
return s
|
||||
|
||||
def clean(self):
|
||||
@ -3557,8 +3557,8 @@ class PartParameterTemplate(MetadataMixin, models.Model):
|
||||
).exclude(pk=self.pk)
|
||||
|
||||
if others.exists():
|
||||
msg = _("Parameter template name must be unique")
|
||||
raise ValidationError({"name": msg})
|
||||
msg = _('Parameter template name must be unique')
|
||||
raise ValidationError({'name': msg})
|
||||
except PartParameterTemplate.DoesNotExist:
|
||||
pass
|
||||
|
||||
@ -3644,7 +3644,7 @@ class PartParameter(MetadataMixin, models.Model):
|
||||
|
||||
def __str__(self):
|
||||
"""String representation of a PartParameter (used in the admin interface)"""
|
||||
return f"{self.part.full_name} : {self.template.name} = {self.data} ({self.template.units})"
|
||||
return f'{self.part.full_name} : {self.template.name} = {self.data} ({self.template.units})'
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""Custom save method for the PartParameter model."""
|
||||
@ -3856,11 +3856,11 @@ class BomItem(DataImportMixin, MetadataMixin, models.Model):
|
||||
class Meta:
|
||||
"""Metaclass providing extra model definition"""
|
||||
|
||||
verbose_name = _("BOM Item")
|
||||
verbose_name = _('BOM Item')
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string representation of this BomItem instance"""
|
||||
return f"{decimal2string(self.quantity)} x {self.sub_part.full_name} to make {self.part.full_name}"
|
||||
return f'{decimal2string(self.quantity)} x {self.sub_part.full_name} to make {self.part.full_name}'
|
||||
|
||||
@staticmethod
|
||||
def get_api_url():
|
||||
@ -3968,13 +3968,13 @@ class BomItem(DataImportMixin, MetadataMixin, models.Model):
|
||||
optional = models.BooleanField(
|
||||
default=False,
|
||||
verbose_name=_('Optional'),
|
||||
help_text=_("This BOM item is optional"),
|
||||
help_text=_('This BOM item is optional'),
|
||||
)
|
||||
|
||||
consumable = models.BooleanField(
|
||||
default=False,
|
||||
verbose_name=_('Consumable'),
|
||||
help_text=_("This BOM item is consumable (it is not tracked in build orders)"),
|
||||
help_text=_('This BOM item is consumable (it is not tracked in build orders)'),
|
||||
)
|
||||
|
||||
overage = models.CharField(
|
||||
@ -4106,8 +4106,8 @@ class BomItem(DataImportMixin, MetadataMixin, models.Model):
|
||||
if self.sub_part.trackable:
|
||||
if self.quantity != int(self.quantity):
|
||||
raise ValidationError({
|
||||
"quantity": _(
|
||||
"Quantity must be integer value for trackable parts"
|
||||
'quantity': _(
|
||||
'Quantity must be integer value for trackable parts'
|
||||
)
|
||||
})
|
||||
|
||||
@ -4204,7 +4204,7 @@ class BomItem(DataImportMixin, MetadataMixin, models.Model):
|
||||
pmin = decimal2money(pmin)
|
||||
pmax = decimal2money(pmax)
|
||||
|
||||
return f"{pmin} to {pmax}"
|
||||
return f'{pmin} to {pmax}'
|
||||
|
||||
|
||||
@receiver(post_save, sender=BomItem, dispatch_uid='update_bom_build_lines')
|
||||
@ -4259,7 +4259,7 @@ class BomItemSubstitute(MetadataMixin, models.Model):
|
||||
class Meta:
|
||||
"""Metaclass providing extra model definition"""
|
||||
|
||||
verbose_name = _("BOM Item Substitute")
|
||||
verbose_name = _('BOM Item Substitute')
|
||||
|
||||
# Prevent duplication of substitute parts
|
||||
unique_together = ('part', 'bom_item')
|
||||
@ -4280,7 +4280,7 @@ class BomItemSubstitute(MetadataMixin, models.Model):
|
||||
|
||||
if self.part == self.bom_item.sub_part:
|
||||
raise ValidationError({
|
||||
"part": _("Substitute part cannot be the same as the master part")
|
||||
'part': _('Substitute part cannot be the same as the master part')
|
||||
})
|
||||
|
||||
@staticmethod
|
||||
@ -4345,9 +4345,9 @@ class PartRelated(MetadataMixin, models.Model):
|
||||
|
||||
if self.part_1 == self.part_2:
|
||||
raise ValidationError(
|
||||
_("Part relationship cannot be created between a part and itself")
|
||||
_('Part relationship cannot be created between a part and itself')
|
||||
)
|
||||
|
||||
# Check for inverse relationship
|
||||
if PartRelated.objects.filter(part_1=self.part_2, part_2=self.part_1).exists():
|
||||
raise ValidationError(_("Duplicate relationship already exists"))
|
||||
raise ValidationError(_('Duplicate relationship already exists'))
|
||||
|
@ -55,7 +55,7 @@ from .models import (
|
||||
PartTestTemplate,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class CategorySerializer(InvenTree.serializers.InvenTreeModelSerializer):
|
||||
@ -220,7 +220,7 @@ class PartThumbSerializerUpdate(InvenTree.serializers.InvenTreeModelSerializer):
|
||||
"""Check that file is an image."""
|
||||
validate = imghdr.what(value)
|
||||
if not validate:
|
||||
raise serializers.ValidationError("File is not an image")
|
||||
raise serializers.ValidationError('File is not an image')
|
||||
return value
|
||||
|
||||
image = InvenTree.serializers.InvenTreeAttachmentSerializerField(required=True)
|
||||
@ -346,7 +346,7 @@ class PartSetCategorySerializer(serializers.Serializer):
|
||||
def validate_parts(self, parts):
|
||||
"""Validate the selected parts"""
|
||||
if len(parts) == 0:
|
||||
raise serializers.ValidationError(_("No parts selected"))
|
||||
raise serializers.ValidationError(_('No parts selected'))
|
||||
|
||||
return parts
|
||||
|
||||
@ -881,7 +881,7 @@ class PartSerializer(
|
||||
)
|
||||
except IntegrityError:
|
||||
logger.exception(
|
||||
"Could not create new PartParameter for part %s", instance
|
||||
'Could not create new PartParameter for part %s', instance
|
||||
)
|
||||
|
||||
# Create initial stock entry
|
||||
@ -945,7 +945,7 @@ class PartSerializer(
|
||||
remote_img.save(buffer, format=fmt)
|
||||
|
||||
# Construct a simplified name for the image
|
||||
filename = f"part_{part.pk}_image.{fmt.lower()}"
|
||||
filename = f'part_{part.pk}_image.{fmt.lower()}'
|
||||
|
||||
part.image.save(filename, ContentFile(buffer.getvalue()))
|
||||
|
||||
@ -1071,12 +1071,12 @@ class PartStocktakeReportGenerateSerializer(serializers.Serializer):
|
||||
# Stocktake functionality must be enabled
|
||||
if not common.models.InvenTreeSetting.get_setting('STOCKTAKE_ENABLE', False):
|
||||
raise serializers.ValidationError(
|
||||
_("Stocktake functionality is not enabled")
|
||||
_('Stocktake functionality is not enabled')
|
||||
)
|
||||
|
||||
# Check that background worker is running
|
||||
if not InvenTree.status.is_worker_running():
|
||||
raise serializers.ValidationError(_("Background worker check failed"))
|
||||
raise serializers.ValidationError(_('Background worker check failed'))
|
||||
|
||||
return data
|
||||
|
||||
@ -1381,7 +1381,7 @@ class BomItemSerializer(InvenTree.serializers.InvenTreeModelSerializer):
|
||||
def validate_quantity(self, quantity):
|
||||
"""Perform validation for the BomItem quantity field"""
|
||||
if quantity <= 0:
|
||||
raise serializers.ValidationError(_("Quantity must be greater than zero"))
|
||||
raise serializers.ValidationError(_('Quantity must be greater than zero'))
|
||||
|
||||
return quantity
|
||||
|
||||
@ -1680,7 +1680,7 @@ class BomImportExtractSerializer(InvenTree.serializers.DataFileExtractSerializer
|
||||
|
||||
if not any(col in self.columns for col in part_columns):
|
||||
# At least one part column is required!
|
||||
raise serializers.ValidationError(_("No part column specified"))
|
||||
raise serializers.ValidationError(_('No part column specified'))
|
||||
|
||||
@staticmethod
|
||||
def process_row(row):
|
||||
@ -1768,7 +1768,7 @@ class BomImportSubmitSerializer(serializers.Serializer):
|
||||
items = data['items']
|
||||
|
||||
if len(items) == 0:
|
||||
raise serializers.ValidationError(_("At least one BOM item is required"))
|
||||
raise serializers.ValidationError(_('At least one BOM item is required'))
|
||||
|
||||
data = super().validate(data)
|
||||
|
||||
@ -1798,7 +1798,7 @@ class BomImportSubmitSerializer(serializers.Serializer):
|
||||
bom_items.append(BomItem(**item))
|
||||
|
||||
if len(bom_items) > 0:
|
||||
logger.info("Importing %s BOM items", len(bom_items))
|
||||
logger.info('Importing %s BOM items', len(bom_items))
|
||||
BomItem.objects.bulk_create(bom_items)
|
||||
|
||||
except Exception as e:
|
||||
|
@ -62,7 +62,7 @@ def perform_stocktake(
|
||||
|
||||
if not pricing.is_valid:
|
||||
# If pricing is not valid, let's update
|
||||
logger.info("Pricing not valid for %s - updating", target)
|
||||
logger.info('Pricing not valid for %s - updating', target)
|
||||
pricing.update_pricing(cascade=False)
|
||||
pricing.refresh_from_db()
|
||||
|
||||
@ -204,10 +204,10 @@ def generate_stocktake_report(**kwargs):
|
||||
n_parts = parts.count()
|
||||
|
||||
if n_parts == 0:
|
||||
logger.info("No parts selected for stocktake report - exiting")
|
||||
logger.info('No parts selected for stocktake report - exiting')
|
||||
return
|
||||
|
||||
logger.info("Generating new stocktake report for %s parts", n_parts)
|
||||
logger.info('Generating new stocktake report for %s parts', n_parts)
|
||||
|
||||
base_currency = common.settings.currency_code_default()
|
||||
|
||||
@ -266,7 +266,7 @@ def generate_stocktake_report(**kwargs):
|
||||
buffer.write(dataset.export('csv'))
|
||||
|
||||
today = datetime.now().date().isoformat()
|
||||
filename = f"InvenTree_Stocktake_{today}.csv"
|
||||
filename = f'InvenTree_Stocktake_{today}.csv'
|
||||
report_file = ContentFile(buffer.getvalue(), name=filename)
|
||||
|
||||
if generate_report:
|
||||
@ -295,7 +295,7 @@ def generate_stocktake_report(**kwargs):
|
||||
|
||||
t_stocktake = time.time() - t_start
|
||||
logger.info(
|
||||
"Generated stocktake report for %s parts in %ss",
|
||||
'Generated stocktake report for %s parts in %ss',
|
||||
total_parts,
|
||||
round(t_stocktake, 2),
|
||||
)
|
||||
|
@ -24,7 +24,7 @@ from InvenTree.tasks import (
|
||||
scheduled_task,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def notify_low_stock(part: part.models.Part):
|
||||
@ -33,7 +33,7 @@ def notify_low_stock(part: part.models.Part):
|
||||
- Triggered when the available stock for a given part falls be low the configured threhsold
|
||||
- A notification is delivered to any users who are 'subscribed' to this part
|
||||
"""
|
||||
name = _("Low stock notification")
|
||||
name = _('Low stock notification')
|
||||
message = _(
|
||||
f'The available stock for {part.name} has fallen below the configured minimum level'
|
||||
)
|
||||
@ -70,7 +70,7 @@ def update_part_pricing(pricing: part.models.PartPricing, counter: int = 0):
|
||||
pricing: The target PartPricing instance to be updated
|
||||
counter: How many times this function has been called in sequence
|
||||
"""
|
||||
logger.info("Updating part pricing for %s", pricing.part)
|
||||
logger.info('Updating part pricing for %s', pricing.part)
|
||||
|
||||
pricing.update_pricing(counter=counter)
|
||||
|
||||
@ -90,7 +90,7 @@ def check_missing_pricing(limit=250):
|
||||
results = part.models.PartPricing.objects.filter(updated=None)[:limit]
|
||||
|
||||
if results.count() > 0:
|
||||
logger.info("Found %s parts with empty pricing", results.count())
|
||||
logger.info('Found %s parts with empty pricing', results.count())
|
||||
|
||||
for pp in results:
|
||||
pp.schedule_for_update()
|
||||
@ -102,7 +102,7 @@ def check_missing_pricing(limit=250):
|
||||
results = part.models.PartPricing.objects.filter(updated__lte=stale_date)[:limit]
|
||||
|
||||
if results.count() > 0:
|
||||
logger.info("Found %s stale pricing entries", results.count())
|
||||
logger.info('Found %s stale pricing entries', results.count())
|
||||
|
||||
for pp in results:
|
||||
pp.schedule_for_update()
|
||||
@ -112,7 +112,7 @@ def check_missing_pricing(limit=250):
|
||||
results = part.models.PartPricing.objects.exclude(currency=currency)
|
||||
|
||||
if results.count() > 0:
|
||||
logger.info("Found %s pricing entries in the wrong currency", results.count())
|
||||
logger.info('Found %s pricing entries in the wrong currency', results.count())
|
||||
|
||||
for pp in results:
|
||||
pp.schedule_for_update()
|
||||
@ -121,7 +121,7 @@ def check_missing_pricing(limit=250):
|
||||
results = part.models.Part.objects.filter(pricing_data=None)[:limit]
|
||||
|
||||
if results.count() > 0:
|
||||
logger.info("Found %s parts without pricing", results.count())
|
||||
logger.info('Found %s parts without pricing', results.count())
|
||||
|
||||
for p in results:
|
||||
pricing = p.pricing
|
||||
@ -151,14 +151,14 @@ def scheduled_stocktake_reports():
|
||||
old_reports = part.models.PartStocktakeReport.objects.filter(date__lt=threshold)
|
||||
|
||||
if old_reports.count() > 0:
|
||||
logger.info("Deleting %s stale stocktake reports", old_reports.count())
|
||||
logger.info('Deleting %s stale stocktake reports', old_reports.count())
|
||||
old_reports.delete()
|
||||
|
||||
# Next, check if stocktake functionality is enabled
|
||||
if not common.models.InvenTreeSetting.get_setting(
|
||||
'STOCKTAKE_ENABLE', False, cache=False
|
||||
):
|
||||
logger.info("Stocktake functionality is not enabled - exiting")
|
||||
logger.info('Stocktake functionality is not enabled - exiting')
|
||||
return
|
||||
|
||||
report_n_days = int(
|
||||
@ -168,11 +168,11 @@ def scheduled_stocktake_reports():
|
||||
)
|
||||
|
||||
if report_n_days < 1:
|
||||
logger.info("Stocktake auto reports are disabled, exiting")
|
||||
logger.info('Stocktake auto reports are disabled, exiting')
|
||||
return
|
||||
|
||||
if not check_daily_holdoff('STOCKTAKE_RECENT_REPORT', report_n_days):
|
||||
logger.info("Stocktake report was recently generated - exiting")
|
||||
logger.info('Stocktake report was recently generated - exiting')
|
||||
return
|
||||
|
||||
# Let's start a new stocktake report for all parts
|
||||
|
@ -42,15 +42,15 @@ class CustomTranslateNode(TranslateNode):
|
||||
result = result.replace(c, '')
|
||||
|
||||
# Escape any quotes contained in the string
|
||||
result = result.replace("'", r"\'")
|
||||
result = result.replace("'", r'\'')
|
||||
result = result.replace('"', r'\"')
|
||||
|
||||
# Return the 'clean' resulting string
|
||||
return result
|
||||
|
||||
|
||||
@register.tag("translate")
|
||||
@register.tag("trans")
|
||||
@register.tag('translate')
|
||||
@register.tag('trans')
|
||||
def do_translate(parser, token):
|
||||
"""Custom translation function, lifted from https://github.com/django/django/blob/main/django/templatetags/i18n.py
|
||||
|
||||
@ -66,7 +66,7 @@ def do_translate(parser, token):
|
||||
asvar = None
|
||||
message_context = None
|
||||
seen = set()
|
||||
invalid_context = {"as", "noop"}
|
||||
invalid_context = {'as', 'noop'}
|
||||
|
||||
while remaining:
|
||||
option = remaining.pop(0)
|
||||
@ -74,9 +74,9 @@ def do_translate(parser, token):
|
||||
raise TemplateSyntaxError(
|
||||
"The '%s' option was specified more than once." % option
|
||||
)
|
||||
elif option == "noop":
|
||||
elif option == 'noop':
|
||||
noop = True
|
||||
elif option == "context":
|
||||
elif option == 'context':
|
||||
try:
|
||||
value = remaining.pop(0)
|
||||
except IndexError:
|
||||
@ -87,10 +87,10 @@ def do_translate(parser, token):
|
||||
if value in invalid_context:
|
||||
raise TemplateSyntaxError(
|
||||
"Invalid argument '%s' provided to the '%s' tag for the context "
|
||||
"option" % (value, bits[0])
|
||||
'option' % (value, bits[0])
|
||||
)
|
||||
message_context = parser.compile_filter(value)
|
||||
elif option == "as":
|
||||
elif option == 'as':
|
||||
try:
|
||||
value = remaining.pop(0)
|
||||
except IndexError:
|
||||
@ -110,26 +110,26 @@ def do_translate(parser, token):
|
||||
|
||||
|
||||
# Re-register tags which we have not explicitly overridden
|
||||
register.tag("blocktrans", django.templatetags.i18n.do_block_translate)
|
||||
register.tag("blocktranslate", django.templatetags.i18n.do_block_translate)
|
||||
register.tag('blocktrans', django.templatetags.i18n.do_block_translate)
|
||||
register.tag('blocktranslate', django.templatetags.i18n.do_block_translate)
|
||||
|
||||
register.tag("language", django.templatetags.i18n.language)
|
||||
register.tag('language', django.templatetags.i18n.language)
|
||||
|
||||
register.tag(
|
||||
"get_available_languages", django.templatetags.i18n.do_get_available_languages
|
||||
'get_available_languages', django.templatetags.i18n.do_get_available_languages
|
||||
)
|
||||
register.tag("get_language_info", django.templatetags.i18n.do_get_language_info)
|
||||
register.tag('get_language_info', django.templatetags.i18n.do_get_language_info)
|
||||
register.tag(
|
||||
"get_language_info_list", django.templatetags.i18n.do_get_language_info_list
|
||||
'get_language_info_list', django.templatetags.i18n.do_get_language_info_list
|
||||
)
|
||||
register.tag("get_current_language", django.templatetags.i18n.do_get_current_language)
|
||||
register.tag('get_current_language', django.templatetags.i18n.do_get_current_language)
|
||||
register.tag(
|
||||
"get_current_language_bidi", django.templatetags.i18n.do_get_current_language_bidi
|
||||
'get_current_language_bidi', django.templatetags.i18n.do_get_current_language_bidi
|
||||
)
|
||||
|
||||
register.filter("language_name", django.templatetags.i18n.language_name)
|
||||
register.filter('language_name', django.templatetags.i18n.language_name)
|
||||
register.filter(
|
||||
"language_name_translated", django.templatetags.i18n.language_name_translated
|
||||
'language_name_translated', django.templatetags.i18n.language_name_translated
|
||||
)
|
||||
register.filter("language_name_local", django.templatetags.i18n.language_name_local)
|
||||
register.filter("language_bidi", django.templatetags.i18n.language_bidi)
|
||||
register.filter('language_name_local', django.templatetags.i18n.language_name_local)
|
||||
register.filter('language_bidi', django.templatetags.i18n.language_bidi)
|
||||
|
@ -65,7 +65,7 @@ def render_date(context, date_object):
|
||||
try:
|
||||
date_object = date.fromisoformat(date_object)
|
||||
except ValueError:
|
||||
logger.warning("Tried to convert invalid date string: %s", date_object)
|
||||
logger.warning('Tried to convert invalid date string: %s', date_object)
|
||||
return None
|
||||
|
||||
# We may have already pre-cached the date format by calling this already!
|
||||
@ -220,7 +220,7 @@ def python_version(*args, **kwargs):
|
||||
def inventree_version(shortstring=False, *args, **kwargs):
|
||||
"""Return InvenTree version string."""
|
||||
if shortstring:
|
||||
return _(f"{version.inventreeInstanceTitle()} v{version.inventreeVersion()}")
|
||||
return _(f'{version.inventreeInstanceTitle()} v{version.inventreeVersion()}')
|
||||
return version.inventreeVersion()
|
||||
|
||||
|
||||
@ -645,18 +645,18 @@ def admin_url(user, table, pk):
|
||||
from django.urls import reverse
|
||||
|
||||
if not djangosettings.INVENTREE_ADMIN_ENABLED:
|
||||
return ""
|
||||
return ''
|
||||
|
||||
if not user.is_staff:
|
||||
return ""
|
||||
return ''
|
||||
|
||||
# Check the user has the correct permission
|
||||
perm_string = f"{app}.change_{model}"
|
||||
perm_string = f'{app}.change_{model}'
|
||||
if not user.has_perm(perm_string):
|
||||
return ''
|
||||
|
||||
# Fallback URL
|
||||
url = reverse(f"admin:{app}_{model}_changelist")
|
||||
url = reverse(f'admin:{app}_{model}_changelist')
|
||||
|
||||
if pk:
|
||||
try:
|
||||
|
@ -178,14 +178,14 @@ class PartCategoryAPITest(InvenTreeAPITestCase):
|
||||
# Create child categories
|
||||
for ii in range(10):
|
||||
child = PartCategory.objects.create(
|
||||
name=f"Child cat {ii}", description="A child category", parent=cat
|
||||
name=f'Child cat {ii}', description='A child category', parent=cat
|
||||
)
|
||||
|
||||
# Create parts in this category
|
||||
for jj in range(10):
|
||||
Part.objects.create(
|
||||
name=f"Part xyz {jj}_{ii}",
|
||||
description="A test part with a description",
|
||||
name=f'Part xyz {jj}_{ii}',
|
||||
description='A test part with a description',
|
||||
category=child,
|
||||
)
|
||||
|
||||
@ -351,8 +351,8 @@ class PartCategoryAPITest(InvenTreeAPITestCase):
|
||||
for jj in range(3):
|
||||
parts.append(
|
||||
Part.objects.create(
|
||||
name=f"Part xyz {i}_{jj}",
|
||||
description="Child part of the deleted category",
|
||||
name=f'Part xyz {i}_{jj}',
|
||||
description='Child part of the deleted category',
|
||||
category=cat_to_delete,
|
||||
)
|
||||
)
|
||||
@ -362,8 +362,8 @@ class PartCategoryAPITest(InvenTreeAPITestCase):
|
||||
# Create child categories under the category to be deleted
|
||||
for ii in range(3):
|
||||
child = PartCategory.objects.create(
|
||||
name=f"Child parent_cat {i}_{ii}",
|
||||
description="A child category of the deleted category",
|
||||
name=f'Child parent_cat {i}_{ii}',
|
||||
description='A child category of the deleted category',
|
||||
parent=cat_to_delete,
|
||||
)
|
||||
child_categories.append(child)
|
||||
@ -372,8 +372,8 @@ class PartCategoryAPITest(InvenTreeAPITestCase):
|
||||
for jj in range(3):
|
||||
child_categories_parts.append(
|
||||
Part.objects.create(
|
||||
name=f"Part xyz {i}_{jj}_{ii}",
|
||||
description="Child part in the child category of the deleted category",
|
||||
name=f'Part xyz {i}_{jj}_{ii}',
|
||||
description='Child part in the child category of the deleted category',
|
||||
category=child,
|
||||
)
|
||||
)
|
||||
@ -438,8 +438,8 @@ class PartCategoryAPITest(InvenTreeAPITestCase):
|
||||
# Make sure that we get an error if we try to create part in the structural category
|
||||
with self.assertRaises(ValidationError):
|
||||
part = Part.objects.create(
|
||||
name="-",
|
||||
description="Part which shall not be created",
|
||||
name='-',
|
||||
description='Part which shall not be created',
|
||||
category=structural_category,
|
||||
)
|
||||
|
||||
@ -456,8 +456,8 @@ class PartCategoryAPITest(InvenTreeAPITestCase):
|
||||
|
||||
# Create the test part assigned to a non-structural category
|
||||
part = Part.objects.create(
|
||||
name="-",
|
||||
description="Part which category will be changed to structural",
|
||||
name='-',
|
||||
description='Part which category will be changed to structural',
|
||||
category=non_structural_category,
|
||||
)
|
||||
|
||||
@ -752,8 +752,8 @@ class PartAPITest(PartAPITestBase):
|
||||
for color in ['Red', 'Green', 'Blue', 'Yellow', 'Pink', 'Black']:
|
||||
variants.append(
|
||||
Part.objects.create(
|
||||
name=f"{color} Variant",
|
||||
description="Variant part with a specific color",
|
||||
name=f'{color} Variant',
|
||||
description='Variant part with a specific color',
|
||||
variant_of=master_part,
|
||||
category=category,
|
||||
)
|
||||
@ -839,7 +839,7 @@ class PartAPITest(PartAPITestBase):
|
||||
# Try to post a new test with the same name (should fail)
|
||||
response = self.post(
|
||||
url,
|
||||
data={'part': 10004, 'test_name': " newtest", 'description': 'dafsdf'},
|
||||
data={'part': 10004, 'test_name': ' newtest', 'description': 'dafsdf'},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
@ -971,8 +971,8 @@ class PartAPITest(PartAPITestBase):
|
||||
|
||||
for i in range(10):
|
||||
gcv = Part.objects.create(
|
||||
name=f"GC Var {i}",
|
||||
description="Green chair variant",
|
||||
name=f'GC Var {i}',
|
||||
description='Green chair variant',
|
||||
variant_of=green_chair,
|
||||
)
|
||||
|
||||
@ -1237,10 +1237,10 @@ class PartCreationTests(PartAPITestBase):
|
||||
"""Test that non-standard ASCII chars are accepted."""
|
||||
url = reverse('api-part-list')
|
||||
|
||||
name = "Kaltgerätestecker"
|
||||
description = "Gerät Kaltgerätestecker strange chars should get through"
|
||||
name = 'Kaltgerätestecker'
|
||||
description = 'Gerät Kaltgerätestecker strange chars should get through'
|
||||
|
||||
data = {"name": name, "description": description, "category": 2}
|
||||
data = {'name': name, 'description': description, 'category': 2}
|
||||
|
||||
response = self.post(url, data, expected_code=201)
|
||||
|
||||
@ -1284,7 +1284,7 @@ class PartCreationTests(PartAPITestBase):
|
||||
PartCategoryParameterTemplate.objects.create(
|
||||
parameter_template=PartParameterTemplate.objects.get(pk=pk),
|
||||
category=cat,
|
||||
default_value=f"Value {pk}",
|
||||
default_value=f'Value {pk}',
|
||||
)
|
||||
|
||||
self.assertEqual(cat.parameter_templates.count(), 3)
|
||||
@ -1630,8 +1630,8 @@ class PartListTests(PartAPITestBase):
|
||||
for ii in range(100):
|
||||
parts.append(
|
||||
Part(
|
||||
name=f"Extra part {ii}",
|
||||
description="A new part which will appear via the API",
|
||||
name=f'Extra part {ii}',
|
||||
description='A new part which will appear via the API',
|
||||
level=0,
|
||||
tree_id=0,
|
||||
lft=0,
|
||||
@ -1975,15 +1975,15 @@ class PartAPIAggregationTest(InvenTreeAPITestCase):
|
||||
|
||||
# First, create some parts
|
||||
paint = PartCategory.objects.create(
|
||||
parent=None, name="Paint", description="Paints and such"
|
||||
parent=None, name='Paint', description='Paints and such'
|
||||
)
|
||||
|
||||
for color in ['Red', 'Green', 'Blue', 'Orange', 'Yellow']:
|
||||
p = Part.objects.create(
|
||||
category=paint,
|
||||
units='litres',
|
||||
name=f"{color} Paint",
|
||||
description=f"Paint which is {color} in color",
|
||||
name=f'{color} Paint',
|
||||
description=f'Paint which is {color} in color',
|
||||
)
|
||||
|
||||
# Create multiple supplier parts in different sizes
|
||||
@ -1991,7 +1991,7 @@ class PartAPIAggregationTest(InvenTreeAPITestCase):
|
||||
sp = SupplierPart.objects.create(
|
||||
part=p,
|
||||
supplier=supplier,
|
||||
SKU=f"PNT-{color}-{pk_sz}L",
|
||||
SKU=f'PNT-{color}-{pk_sz}L',
|
||||
pack_quantity=str(pk_sz),
|
||||
)
|
||||
|
||||
@ -2137,7 +2137,7 @@ class BomItemTest(InvenTreeAPITestCase):
|
||||
url = reverse('api-bom-list')
|
||||
|
||||
# Order by increasing quantity
|
||||
response = self.get(f"{url}?ordering=+quantity", expected_code=200)
|
||||
response = self.get(f'{url}?ordering=+quantity', expected_code=200)
|
||||
|
||||
self.assertEqual(len(response.data), 6)
|
||||
|
||||
@ -2147,7 +2147,7 @@ class BomItemTest(InvenTreeAPITestCase):
|
||||
self.assertTrue(q1 < q2)
|
||||
|
||||
# Order by decreasing quantity
|
||||
response = self.get(f"{url}?ordering=-quantity", expected_code=200)
|
||||
response = self.get(f'{url}?ordering=-quantity', expected_code=200)
|
||||
|
||||
self.assertEqual(q1, response.data[-1]['quantity'])
|
||||
self.assertEqual(q2, response.data[0]['quantity'])
|
||||
@ -2247,8 +2247,8 @@ class BomItemTest(InvenTreeAPITestCase):
|
||||
for ii in range(5):
|
||||
# Create a variant part!
|
||||
variant = Part.objects.create(
|
||||
name=f"Variant_{ii}",
|
||||
description="A variant part, with a description",
|
||||
name=f'Variant_{ii}',
|
||||
description='A variant part, with a description',
|
||||
component=True,
|
||||
variant_of=sub_part,
|
||||
)
|
||||
@ -2295,7 +2295,7 @@ class BomItemTest(InvenTreeAPITestCase):
|
||||
bom_item = BomItem.objects.get(pk=1)
|
||||
|
||||
# Filter stock items which can be assigned against this stock item
|
||||
response = self.get(stock_url, {"bom_item": bom_item.pk}, expected_code=200)
|
||||
response = self.get(stock_url, {'bom_item': bom_item.pk}, expected_code=200)
|
||||
|
||||
n_items = len(response.data)
|
||||
|
||||
@ -2304,8 +2304,8 @@ class BomItemTest(InvenTreeAPITestCase):
|
||||
# Let's make some!
|
||||
for ii in range(5):
|
||||
sub_part = Part.objects.create(
|
||||
name=f"Substitute {ii}",
|
||||
description="A substitute part",
|
||||
name=f'Substitute {ii}',
|
||||
description='A substitute part',
|
||||
component=True,
|
||||
is_template=False,
|
||||
assembly=False,
|
||||
@ -2322,7 +2322,7 @@ class BomItemTest(InvenTreeAPITestCase):
|
||||
self.assertEqual(len(response.data), 1)
|
||||
|
||||
# We should also have more stock available to allocate against this BOM item!
|
||||
response = self.get(stock_url, {"bom_item": bom_item.pk}, expected_code=200)
|
||||
response = self.get(stock_url, {'bom_item': bom_item.pk}, expected_code=200)
|
||||
|
||||
self.assertEqual(len(response.data), n_items + ii + 1)
|
||||
|
||||
@ -2355,8 +2355,8 @@ class BomItemTest(InvenTreeAPITestCase):
|
||||
|
||||
for i in range(5):
|
||||
assy = Part.objects.create(
|
||||
name=f"Assy_{i}",
|
||||
description="An assembly made of other parts",
|
||||
name=f'Assy_{i}',
|
||||
description='An assembly made of other parts',
|
||||
active=True,
|
||||
assembly=True,
|
||||
)
|
||||
@ -2368,8 +2368,8 @@ class BomItemTest(InvenTreeAPITestCase):
|
||||
# Create some sub-components
|
||||
for i in range(5):
|
||||
cmp = Part.objects.create(
|
||||
name=f"Component_{i}",
|
||||
description="A sub component",
|
||||
name=f'Component_{i}',
|
||||
description='A sub component',
|
||||
active=True,
|
||||
component=True,
|
||||
)
|
||||
@ -2403,8 +2403,8 @@ class BomItemTest(InvenTreeAPITestCase):
|
||||
for i in range(10):
|
||||
# Create a variant part
|
||||
vp = Part.objects.create(
|
||||
name=f"Var {i}",
|
||||
description="Variant part description field",
|
||||
name=f'Var {i}',
|
||||
description='Variant part description field',
|
||||
variant_of=bom_item.sub_part,
|
||||
)
|
||||
|
||||
@ -2523,7 +2523,7 @@ class PartInternalPriceBreakTest(InvenTreeAPITestCase):
|
||||
p.active = False
|
||||
p.save()
|
||||
|
||||
response = self.delete(reverse("api-part-detail", kwargs={"pk": 1}))
|
||||
response = self.delete(reverse('api-part-detail', kwargs={'pk': 1}))
|
||||
self.assertEqual(response.status_code, 204)
|
||||
|
||||
with self.assertRaises(Part.DoesNotExist):
|
||||
@ -2588,7 +2588,7 @@ class PartStocktakeTest(InvenTreeAPITestCase):
|
||||
# Initially no stocktake information available
|
||||
self.assertIsNone(p.latest_stocktake)
|
||||
|
||||
note = f"Note {p.pk}"
|
||||
note = f'Note {p.pk}'
|
||||
quantity = p.pk + 5
|
||||
|
||||
self.post(
|
||||
|
@ -33,9 +33,9 @@ class BomUploadTest(InvenTreeAPITestCase):
|
||||
for i in range(10):
|
||||
parts.append(
|
||||
Part(
|
||||
name=f"Component {i}",
|
||||
IPN=f"CMP_{i}",
|
||||
description="A subcomponent that can be used in a BOM",
|
||||
name=f'Component {i}',
|
||||
IPN=f'CMP_{i}',
|
||||
description='A subcomponent that can be used in a BOM',
|
||||
component=True,
|
||||
assembly=False,
|
||||
lft=0,
|
||||
|
@ -70,7 +70,7 @@ class BomItemTest(TestCase):
|
||||
def test_integer_quantity(self):
|
||||
"""Test integer validation for BomItem."""
|
||||
p = Part.objects.create(
|
||||
name="test", description="part description", component=True, trackable=True
|
||||
name='test', description='part description', component=True, trackable=True
|
||||
)
|
||||
|
||||
# Creation of a BOMItem with a non-integer quantity of a trackable Part should fail
|
||||
@ -157,8 +157,8 @@ class BomItemTest(TestCase):
|
||||
for ii in range(5):
|
||||
# Create a new part
|
||||
sub_part = Part.objects.create(
|
||||
name=f"Orphan {ii}",
|
||||
description="A substitute part for the orphan part",
|
||||
name=f'Orphan {ii}',
|
||||
description='A substitute part for the orphan part',
|
||||
component=True,
|
||||
is_template=False,
|
||||
assembly=False,
|
||||
@ -196,7 +196,7 @@ class BomItemTest(TestCase):
|
||||
"""Tests for the 'consumable' BomItem field"""
|
||||
# Create an assembly part
|
||||
assembly = Part.objects.create(
|
||||
name="An assembly", description="Made with parts", assembly=True
|
||||
name='An assembly', description='Made with parts', assembly=True
|
||||
)
|
||||
|
||||
# No BOM information initially
|
||||
@ -204,16 +204,16 @@ class BomItemTest(TestCase):
|
||||
|
||||
# Create some component items
|
||||
c1 = Part.objects.create(
|
||||
name="C1", description="Part C1 - this is just the part description"
|
||||
name='C1', description='Part C1 - this is just the part description'
|
||||
)
|
||||
c2 = Part.objects.create(
|
||||
name="C2", description="Part C2 - this is just the part description"
|
||||
name='C2', description='Part C2 - this is just the part description'
|
||||
)
|
||||
c3 = Part.objects.create(
|
||||
name="C3", description="Part C3 - this is just the part description"
|
||||
name='C3', description='Part C3 - this is just the part description'
|
||||
)
|
||||
c4 = Part.objects.create(
|
||||
name="C4", description="Part C4 - this is just the part description"
|
||||
name='C4', description='Part C4 - this is just the part description'
|
||||
)
|
||||
|
||||
for p in [c1, c2, c3, c4]:
|
||||
@ -261,20 +261,20 @@ class BomItemTest(TestCase):
|
||||
# Second test: A recursive BOM
|
||||
part_a = Part.objects.create(
|
||||
name='Part A',
|
||||
description="A part which is called A",
|
||||
description='A part which is called A',
|
||||
assembly=True,
|
||||
is_template=True,
|
||||
component=True,
|
||||
)
|
||||
part_b = Part.objects.create(
|
||||
name='Part B',
|
||||
description="A part which is called B",
|
||||
description='A part which is called B',
|
||||
assembly=True,
|
||||
component=True,
|
||||
)
|
||||
part_c = Part.objects.create(
|
||||
name='Part C',
|
||||
description="A part which is called C",
|
||||
description='A part which is called C',
|
||||
assembly=True,
|
||||
component=True,
|
||||
)
|
||||
|
@ -106,7 +106,7 @@ class CategoryTest(TestCase):
|
||||
letter = chr(ord('A') + idx)
|
||||
|
||||
child = PartCategory.objects.create(
|
||||
name=letter * 10, description=f"Subcategory {letter}", parent=parent
|
||||
name=letter * 10, description=f'Subcategory {letter}', parent=parent
|
||||
)
|
||||
|
||||
parent = child
|
||||
@ -114,7 +114,7 @@ class CategoryTest(TestCase):
|
||||
self.assertTrue(len(child.path), 26)
|
||||
self.assertEqual(
|
||||
child.pathstring,
|
||||
"Cat/AAAAAAAAAA/BBBBBBBBBB/CCCCCCCCCC/DDDDDDDDDD/EEEEEEEEEE/FFFFFFFFFF/GGGGGGGGGG/HHHHHHHHHH/IIIIIIIIII/JJJJJJJJJJ/KKKKKKKKK...OO/PPPPPPPPPP/QQQQQQQQQQ/RRRRRRRRRR/SSSSSSSSSS/TTTTTTTTTT/UUUUUUUUUU/VVVVVVVVVV/WWWWWWWWWW/XXXXXXXXXX/YYYYYYYYYY/ZZZZZZZZZZ",
|
||||
'Cat/AAAAAAAAAA/BBBBBBBBBB/CCCCCCCCCC/DDDDDDDDDD/EEEEEEEEEE/FFFFFFFFFF/GGGGGGGGGG/HHHHHHHHHH/IIIIIIIIII/JJJJJJJJJJ/KKKKKKKKK...OO/PPPPPPPPPP/QQQQQQQQQQ/RRRRRRRRRR/SSSSSSSSSS/TTTTTTTTTT/UUUUUUUUUU/VVVVVVVVVV/WWWWWWWWWW/XXXXXXXXXX/YYYYYYYYYY/ZZZZZZZZZZ',
|
||||
)
|
||||
self.assertTrue(len(child.pathstring) <= 250)
|
||||
|
||||
|
@ -45,7 +45,7 @@ class TestForwardMigrations(MigratorTestCase):
|
||||
|
||||
for name in ['A', 'C', 'E']:
|
||||
part = Part.objects.get(name=name)
|
||||
self.assertEqual(part.description, f"My part {name}")
|
||||
self.assertEqual(part.description, f'My part {name}')
|
||||
|
||||
|
||||
class TestBomItemMigrations(MigratorTestCase):
|
||||
|
@ -181,7 +181,7 @@ class ParameterTests(TestCase):
|
||||
template2 = PartParameterTemplate.objects.create(
|
||||
name='My Template 2', units='%'
|
||||
)
|
||||
for value in ["1", "1%", "1 percent"]:
|
||||
for value in ['1', '1%', '1 percent']:
|
||||
param = PartParameter(part=prt, template=template2, data=value)
|
||||
param.full_clean()
|
||||
|
||||
|
@ -180,7 +180,7 @@ class PartTest(TestCase):
|
||||
def test_str(self):
|
||||
"""Test string representation of a Part"""
|
||||
p = Part.objects.get(pk=100)
|
||||
self.assertEqual(str(p), "BOB | Bob | A2 - Can we build it? Yes we can!")
|
||||
self.assertEqual(str(p), 'BOB | Bob | A2 - Can we build it? Yes we can!')
|
||||
|
||||
def test_duplicate(self):
|
||||
"""Test that we cannot create a "duplicate" Part."""
|
||||
|
@ -258,8 +258,8 @@ class PartPricingTests(InvenTreeTestCase):
|
||||
for ii in range(10):
|
||||
# Create a new part for the BOM
|
||||
sub_part = part.models.Part.objects.create(
|
||||
name=f"Sub Part {ii}",
|
||||
description="A sub part for use in a BOM",
|
||||
name=f'Sub Part {ii}',
|
||||
description='A sub part for use in a BOM',
|
||||
component=True,
|
||||
assembly=False,
|
||||
)
|
||||
@ -403,7 +403,7 @@ class PartPricingTests(InvenTreeTestCase):
|
||||
# Create some parts
|
||||
for ii in range(100):
|
||||
part.models.Part.objects.create(
|
||||
name=f"Part_{ii}", description="A test part"
|
||||
name=f'Part_{ii}', description='A test part'
|
||||
)
|
||||
|
||||
# Ensure there is no pricing data
|
||||
@ -424,7 +424,7 @@ class PartPricingTests(InvenTreeTestCase):
|
||||
but it pointed to a Part instance which was slated to be deleted inside an atomic transaction.
|
||||
"""
|
||||
p = part.models.Part.objects.create(
|
||||
name="my part", description="my part description", active=False
|
||||
name='my part', description='my part description', active=False
|
||||
)
|
||||
|
||||
# Create some stock items
|
||||
|
@ -105,9 +105,9 @@ class PartImport(FileManagementFormView):
|
||||
'part/import_wizard/match_references.html',
|
||||
]
|
||||
form_steps_description = [
|
||||
_("Upload File"),
|
||||
_("Match Fields"),
|
||||
_("Match References"),
|
||||
_('Upload File'),
|
||||
_('Match Fields'),
|
||||
_('Match References'),
|
||||
]
|
||||
|
||||
form_field_map = {
|
||||
@ -540,8 +540,8 @@ class PartPricing(AjaxView):
|
||||
"""View for inspecting part pricing information."""
|
||||
|
||||
model = Part
|
||||
ajax_template_name = "part/part_pricing.html"
|
||||
ajax_form_title = _("Part Pricing")
|
||||
ajax_template_name = 'part/part_pricing.html'
|
||||
ajax_form_title = _('Part Pricing')
|
||||
form_class = part_forms.PartPriceForm
|
||||
|
||||
role_required = ['sales_order.view', 'part.view']
|
||||
|
@ -49,7 +49,7 @@ class PluginSettingInline(admin.TabularInline):
|
||||
class PluginConfigAdmin(admin.ModelAdmin):
|
||||
"""Custom admin with restricted id fields."""
|
||||
|
||||
readonly_fields = ["key", "name"]
|
||||
readonly_fields = ['key', 'name']
|
||||
list_display = [
|
||||
'name',
|
||||
'key',
|
||||
|
@ -227,7 +227,7 @@ def check_plugin(plugin_slug: str, plugin_pk: int) -> InvenTreePlugin:
|
||||
"""
|
||||
# Make sure that a plugin reference is specified
|
||||
if plugin_slug is None and plugin_pk is None:
|
||||
raise NotFound(detail="Plugin not specified")
|
||||
raise NotFound(detail='Plugin not specified')
|
||||
|
||||
# Define filter
|
||||
filter = {}
|
||||
@ -342,13 +342,13 @@ class RegistryStatusView(APIView):
|
||||
for error_detail in errors:
|
||||
for name, message in error_detail.items():
|
||||
error_list.append({
|
||||
"stage": stage,
|
||||
"name": name,
|
||||
"message": message,
|
||||
'stage': stage,
|
||||
'name': name,
|
||||
'message': message,
|
||||
})
|
||||
|
||||
result = PluginSerializers.PluginRegistryStatusSerializer({
|
||||
"registry_errors": error_list
|
||||
'registry_errors': error_list
|
||||
}).data
|
||||
|
||||
return Response(result)
|
||||
@ -382,7 +382,7 @@ plugin_api_urls = [
|
||||
r'<int:pk>/',
|
||||
include([
|
||||
re_path(
|
||||
r"^settings/",
|
||||
r'^settings/',
|
||||
include([
|
||||
re_path(
|
||||
r'^(?P<key>\w+)/',
|
||||
@ -390,9 +390,9 @@ plugin_api_urls = [
|
||||
name='api-plugin-setting-detail-pk',
|
||||
),
|
||||
re_path(
|
||||
r"^.*$",
|
||||
r'^.*$',
|
||||
PluginAllSettingList.as_view(),
|
||||
name="api-plugin-settings",
|
||||
name='api-plugin-settings',
|
||||
),
|
||||
]),
|
||||
),
|
||||
@ -419,9 +419,9 @@ plugin_api_urls = [
|
||||
),
|
||||
# Registry status
|
||||
re_path(
|
||||
r"^status/",
|
||||
r'^status/',
|
||||
RegistryStatusView.as_view(),
|
||||
name="api-plugin-registry-status",
|
||||
name='api-plugin-registry-status',
|
||||
),
|
||||
# Anything else
|
||||
re_path(r'^.*$', PluginList.as_view(), name='api-plugin-list'),
|
||||
|
@ -28,7 +28,7 @@ class PluginAppConfig(AppConfig):
|
||||
return
|
||||
|
||||
if not canAppAccessDatabase(allow_test=True, allow_plugins=True):
|
||||
logger.info("Skipping plugin loading sequence") # pragma: no cover
|
||||
logger.info('Skipping plugin loading sequence') # pragma: no cover
|
||||
else:
|
||||
logger.info('Loading InvenTree plugins')
|
||||
|
||||
|
@ -21,7 +21,7 @@ class ActionPluginView(APIView):
|
||||
data = request.data.get('data', None)
|
||||
|
||||
if action is None:
|
||||
return Response({'error': _("No action specified")})
|
||||
return Response({'error': _('No action specified')})
|
||||
|
||||
action_plugins = registry.with_mixin('action')
|
||||
for plugin in action_plugins:
|
||||
@ -30,4 +30,4 @@ class ActionPluginView(APIView):
|
||||
return Response(plugin.get_response(request.user, data=data))
|
||||
|
||||
# If we got to here, no matching action was found
|
||||
return Response({'error': _("No matching action found"), "action": action})
|
||||
return Response({'error': _('No matching action found'), 'action': action})
|
||||
|
@ -4,7 +4,7 @@
|
||||
class ActionMixin:
|
||||
"""Mixin that enables custom actions."""
|
||||
|
||||
ACTION_NAME = ""
|
||||
ACTION_NAME = ''
|
||||
|
||||
class MixinMeta:
|
||||
"""Meta options for this mixin."""
|
||||
@ -47,7 +47,7 @@ class ActionMixin:
|
||||
Default implementation is a simple response which can be overridden.
|
||||
"""
|
||||
return {
|
||||
"action": self.action_name(),
|
||||
"result": self.get_result(user, data),
|
||||
"info": self.get_info(user, data),
|
||||
'action': self.action_name(),
|
||||
'result': self.get_result(user, data),
|
||||
'info': self.get_info(user, data),
|
||||
}
|
||||
|
@ -57,7 +57,7 @@ class ActionMixinTests(TestCase):
|
||||
self.assertEqual(self.plugin.get_result(), False)
|
||||
self.assertIsNone(self.plugin.get_info())
|
||||
self.assertEqual(
|
||||
self.plugin.get_response(), {"action": '', "result": False, "info": None}
|
||||
self.plugin.get_response(), {'action': '', 'result': False, 'info': None}
|
||||
)
|
||||
|
||||
# overridden functions
|
||||
@ -69,9 +69,9 @@ class ActionMixinTests(TestCase):
|
||||
self.assertEqual(
|
||||
self.action_plugin.get_response(),
|
||||
{
|
||||
"action": 'abc123',
|
||||
"result": self.ACTION_RETURN + 'result',
|
||||
"info": self.ACTION_RETURN + 'info',
|
||||
'action': 'abc123',
|
||||
'result': self.ACTION_RETURN + 'result',
|
||||
'info': self.ACTION_RETURN + 'info',
|
||||
},
|
||||
)
|
||||
|
||||
@ -87,7 +87,7 @@ class APITests(InvenTreeTestCase):
|
||||
self.assertEqual(response.data, {'error': 'No action specified'})
|
||||
|
||||
# Test non-exsisting action
|
||||
response = self.client.post('/api/action/', data={'action': "nonexsisting"})
|
||||
response = self.client.post('/api/action/', data={'action': 'nonexsisting'})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(
|
||||
response.data,
|
||||
|
@ -58,7 +58,7 @@ class BarcodeView(CreateAPIView):
|
||||
Any custom fields passed by the specific serializer
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
f"handle_barcode not implemented for {self.__class__}"
|
||||
f'handle_barcode not implemented for {self.__class__}'
|
||||
)
|
||||
|
||||
def scan_barcode(self, barcode: str, request, **kwargs):
|
||||
@ -79,11 +79,11 @@ class BarcodeView(CreateAPIView):
|
||||
if result is None:
|
||||
continue
|
||||
|
||||
if "error" in result:
|
||||
if 'error' in result:
|
||||
logger.info(
|
||||
"%s.scan(...) returned an error: %s",
|
||||
'%s.scan(...) returned an error: %s',
|
||||
current_plugin.__class__.__name__,
|
||||
result["error"],
|
||||
result['error'],
|
||||
)
|
||||
if not response:
|
||||
plugin = current_plugin
|
||||
@ -155,9 +155,9 @@ class BarcodeAssign(BarcodeView):
|
||||
result = plugin.scan(barcode)
|
||||
|
||||
if result is not None:
|
||||
result["error"] = _("Barcode matches existing item")
|
||||
result["plugin"] = plugin.name
|
||||
result["barcode_data"] = barcode
|
||||
result['error'] = _('Barcode matches existing item')
|
||||
result['plugin'] = plugin.name
|
||||
result['barcode_data'] = barcode
|
||||
|
||||
raise ValidationError(result)
|
||||
|
||||
@ -174,20 +174,20 @@ class BarcodeAssign(BarcodeView):
|
||||
app_label = model._meta.app_label
|
||||
model_name = model._meta.model_name
|
||||
|
||||
table = f"{app_label}_{model_name}"
|
||||
table = f'{app_label}_{model_name}'
|
||||
|
||||
if not RuleSet.check_table_permission(request.user, table, "change"):
|
||||
if not RuleSet.check_table_permission(request.user, table, 'change'):
|
||||
raise PermissionDenied({
|
||||
"error": f"You do not have the required permissions for {table}"
|
||||
'error': f'You do not have the required permissions for {table}'
|
||||
})
|
||||
|
||||
instance.assign_barcode(barcode_data=barcode, barcode_hash=barcode_hash)
|
||||
|
||||
return Response({
|
||||
'success': f"Assigned barcode to {label} instance",
|
||||
'success': f'Assigned barcode to {label} instance',
|
||||
label: {'pk': instance.pk},
|
||||
"barcode_data": barcode,
|
||||
"barcode_hash": barcode_hash,
|
||||
'barcode_data': barcode,
|
||||
'barcode_hash': barcode_hash,
|
||||
})
|
||||
|
||||
# If we got here, it means that no valid model types were provided
|
||||
@ -238,11 +238,11 @@ class BarcodeUnassign(BarcodeView):
|
||||
app_label = model._meta.app_label
|
||||
model_name = model._meta.model_name
|
||||
|
||||
table = f"{app_label}_{model_name}"
|
||||
table = f'{app_label}_{model_name}'
|
||||
|
||||
if not RuleSet.check_table_permission(request.user, table, "change"):
|
||||
if not RuleSet.check_table_permission(request.user, table, 'change'):
|
||||
raise PermissionDenied({
|
||||
"error": f"You do not have the required permissions for {table}"
|
||||
'error': f'You do not have the required permissions for {table}'
|
||||
})
|
||||
|
||||
# Unassign the barcode data from the model instance
|
||||
@ -313,11 +313,11 @@ class BarcodePOAllocate(BarcodeView):
|
||||
)
|
||||
|
||||
if supplier_parts.count() == 0:
|
||||
raise ValidationError({"error": _("No matching supplier parts found")})
|
||||
raise ValidationError({'error': _('No matching supplier parts found')})
|
||||
|
||||
if supplier_parts.count() > 1:
|
||||
raise ValidationError({
|
||||
"error": _("Multiple matching supplier parts found")
|
||||
'error': _('Multiple matching supplier parts found')
|
||||
})
|
||||
|
||||
# At this stage, we have a single matching supplier part
|
||||
@ -342,7 +342,7 @@ class BarcodePOAllocate(BarcodeView):
|
||||
manufacturer_part=result.get('manufacturerpart', None),
|
||||
)
|
||||
|
||||
result['success'] = _("Matched supplier part")
|
||||
result['success'] = _('Matched supplier part')
|
||||
result['supplierpart'] = supplier_part.format_matched_response()
|
||||
|
||||
# TODO: Determine the 'quantity to order' for the supplier part
|
||||
@ -379,24 +379,24 @@ class BarcodePOReceive(BarcodeView):
|
||||
purchase_order = kwargs.get('purchase_order', None)
|
||||
location = kwargs.get('location', None)
|
||||
|
||||
plugins = registry.with_mixin("barcode")
|
||||
plugins = registry.with_mixin('barcode')
|
||||
|
||||
# Look for a barcode plugin which knows how to deal with this barcode
|
||||
plugin = None
|
||||
|
||||
response = {"barcode_data": barcode, "barcode_hash": hash_barcode(barcode)}
|
||||
response = {'barcode_data': barcode, 'barcode_hash': hash_barcode(barcode)}
|
||||
|
||||
internal_barcode_plugin = next(
|
||||
filter(lambda plugin: plugin.name == "InvenTreeBarcode", plugins)
|
||||
filter(lambda plugin: plugin.name == 'InvenTreeBarcode', plugins)
|
||||
)
|
||||
|
||||
if result := internal_barcode_plugin.scan(barcode):
|
||||
if 'stockitem' in result:
|
||||
response["error"] = _("Item has already been received")
|
||||
response['error'] = _('Item has already been received')
|
||||
raise ValidationError(response)
|
||||
|
||||
# Now, look just for "supplier-barcode" plugins
|
||||
plugins = registry.with_mixin("supplier-barcode")
|
||||
plugins = registry.with_mixin('supplier-barcode')
|
||||
|
||||
plugin_response = None
|
||||
|
||||
@ -408,11 +408,11 @@ class BarcodePOReceive(BarcodeView):
|
||||
if result is None:
|
||||
continue
|
||||
|
||||
if "error" in result:
|
||||
if 'error' in result:
|
||||
logger.info(
|
||||
"%s.scan_receive_item(...) returned an error: %s",
|
||||
'%s.scan_receive_item(...) returned an error: %s',
|
||||
current_plugin.__class__.__name__,
|
||||
result["error"],
|
||||
result['error'],
|
||||
)
|
||||
if not plugin_response:
|
||||
plugin = current_plugin
|
||||
@ -429,9 +429,9 @@ class BarcodePOReceive(BarcodeView):
|
||||
|
||||
# A plugin has not been found!
|
||||
if plugin is None:
|
||||
response["error"] = _("No match for supplier barcode")
|
||||
response['error'] = _('No match for supplier barcode')
|
||||
raise ValidationError(response)
|
||||
elif "error" in response:
|
||||
elif 'error' in response:
|
||||
raise ValidationError(response)
|
||||
else:
|
||||
return Response(response)
|
||||
@ -583,11 +583,11 @@ barcode_api_urls = [
|
||||
# Unlink a third-party barcode from an item
|
||||
path('unlink/', BarcodeUnassign.as_view(), name='api-barcode-unlink'),
|
||||
# Receive a purchase order item by scanning its barcode
|
||||
path("po-receive/", BarcodePOReceive.as_view(), name="api-barcode-po-receive"),
|
||||
path('po-receive/', BarcodePOReceive.as_view(), name='api-barcode-po-receive'),
|
||||
# Allocate parts to a purchase order by scanning their barcode
|
||||
path("po-allocate/", BarcodePOAllocate.as_view(), name="api-barcode-po-allocate"),
|
||||
path('po-allocate/', BarcodePOAllocate.as_view(), name='api-barcode-po-allocate'),
|
||||
# Allocate stock to a sales order by scanning barcode
|
||||
path("so-allocate/", BarcodeSOAllocate.as_view(), name="api-barcode-so-allocate"),
|
||||
path('so-allocate/', BarcodeSOAllocate.as_view(), name='api-barcode-so-allocate'),
|
||||
# Catch-all performs barcode 'scan'
|
||||
re_path(r'^.*$', BarcodeScan.as_view(), name='api-barcode-scan'),
|
||||
]
|
||||
|
@ -23,7 +23,7 @@ class BarcodeMixin:
|
||||
Custom barcode plugins should use and extend this mixin as necessary.
|
||||
"""
|
||||
|
||||
ACTION_NAME = ""
|
||||
ACTION_NAME = ''
|
||||
|
||||
class MixinMeta:
|
||||
"""Meta options for this mixin."""
|
||||
@ -62,19 +62,19 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
||||
"""
|
||||
|
||||
# Set of standard field names which can be extracted from the barcode
|
||||
CUSTOMER_ORDER_NUMBER = "customer_order_number"
|
||||
SUPPLIER_ORDER_NUMBER = "supplier_order_number"
|
||||
PACKING_LIST_NUMBER = "packing_list_number"
|
||||
SHIP_DATE = "ship_date"
|
||||
CUSTOMER_PART_NUMBER = "customer_part_number"
|
||||
SUPPLIER_PART_NUMBER = "supplier_part_number"
|
||||
PURCHASE_ORDER_LINE = "purchase_order_line"
|
||||
QUANTITY = "quantity"
|
||||
DATE_CODE = "date_code"
|
||||
LOT_CODE = "lot_code"
|
||||
COUNTRY_OF_ORIGIN = "country_of_origin"
|
||||
MANUFACTURER = "manufacturer"
|
||||
MANUFACTURER_PART_NUMBER = "manufacturer_part_number"
|
||||
CUSTOMER_ORDER_NUMBER = 'customer_order_number'
|
||||
SUPPLIER_ORDER_NUMBER = 'supplier_order_number'
|
||||
PACKING_LIST_NUMBER = 'packing_list_number'
|
||||
SHIP_DATE = 'ship_date'
|
||||
CUSTOMER_PART_NUMBER = 'customer_part_number'
|
||||
SUPPLIER_PART_NUMBER = 'supplier_part_number'
|
||||
PURCHASE_ORDER_LINE = 'purchase_order_line'
|
||||
QUANTITY = 'quantity'
|
||||
DATE_CODE = 'date_code'
|
||||
LOT_CODE = 'lot_code'
|
||||
COUNTRY_OF_ORIGIN = 'country_of_origin'
|
||||
MANUFACTURER = 'manufacturer'
|
||||
MANUFACTURER_PART_NUMBER = 'manufacturer_part_number'
|
||||
|
||||
def __init__(self):
|
||||
"""Register mixin."""
|
||||
@ -83,7 +83,7 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
||||
|
||||
def get_field_value(self, key, backup_value=None):
|
||||
"""Return the value of a barcode field."""
|
||||
fields = getattr(self, "barcode_fields", None) or {}
|
||||
fields = getattr(self, 'barcode_fields', None) or {}
|
||||
|
||||
return fields.get(key, backup_value)
|
||||
|
||||
@ -125,7 +125,7 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
||||
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"extract_barcode_fields must be implemented by each plugin"
|
||||
'extract_barcode_fields must be implemented by each plugin'
|
||||
)
|
||||
|
||||
def scan(self, barcode_data):
|
||||
@ -145,16 +145,16 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
||||
)
|
||||
|
||||
if len(supplier_parts) > 1:
|
||||
return {"error": _("Found multiple matching supplier parts for barcode")}
|
||||
return {'error': _('Found multiple matching supplier parts for barcode')}
|
||||
elif not supplier_parts:
|
||||
return None
|
||||
|
||||
supplier_part = supplier_parts[0]
|
||||
|
||||
data = {
|
||||
"pk": supplier_part.pk,
|
||||
"api_url": f"{SupplierPart.get_api_url()}{supplier_part.pk}/",
|
||||
"web_url": supplier_part.get_absolute_url(),
|
||||
'pk': supplier_part.pk,
|
||||
'api_url': f'{SupplierPart.get_api_url()}{supplier_part.pk}/',
|
||||
'web_url': supplier_part.get_absolute_url(),
|
||||
}
|
||||
|
||||
return {SupplierPart.barcode_model_type(): data}
|
||||
@ -178,7 +178,7 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
||||
)
|
||||
|
||||
if len(supplier_parts) > 1:
|
||||
return {"error": _("Found multiple matching supplier parts for barcode")}
|
||||
return {'error': _('Found multiple matching supplier parts for barcode')}
|
||||
elif not supplier_parts:
|
||||
return None
|
||||
|
||||
@ -196,17 +196,17 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
||||
|
||||
if len(matching_orders) > 1:
|
||||
return {
|
||||
"error": _(f"Found multiple purchase orders matching '{order}'")
|
||||
'error': _(f"Found multiple purchase orders matching '{order}'")
|
||||
}
|
||||
|
||||
if len(matching_orders) == 0:
|
||||
return {"error": _(f"No matching purchase order for '{order}'")}
|
||||
return {'error': _(f"No matching purchase order for '{order}'")}
|
||||
|
||||
purchase_order = matching_orders.first()
|
||||
|
||||
if supplier and purchase_order:
|
||||
if purchase_order.supplier != supplier:
|
||||
return {"error": _("Purchase order does not match supplier")}
|
||||
return {'error': _('Purchase order does not match supplier')}
|
||||
|
||||
return self.receive_purchase_order_item(
|
||||
supplier_part,
|
||||
@ -226,17 +226,17 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
||||
if not isinstance(self, SettingsMixin):
|
||||
return None
|
||||
|
||||
if supplier_pk := self.get_setting("SUPPLIER_ID"):
|
||||
if supplier_pk := self.get_setting('SUPPLIER_ID'):
|
||||
if supplier := Company.objects.get(pk=supplier_pk):
|
||||
return supplier
|
||||
else:
|
||||
logger.error(
|
||||
"No company with pk %d (set \"SUPPLIER_ID\" setting to a valid value)",
|
||||
'No company with pk %d (set "SUPPLIER_ID" setting to a valid value)',
|
||||
supplier_pk,
|
||||
)
|
||||
return None
|
||||
|
||||
if not (supplier_name := getattr(self, "DEFAULT_SUPPLIER_NAME", None)):
|
||||
if not (supplier_name := getattr(self, 'DEFAULT_SUPPLIER_NAME', None)):
|
||||
return None
|
||||
|
||||
suppliers = Company.objects.filter(
|
||||
@ -246,7 +246,7 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
||||
if len(suppliers) != 1:
|
||||
return None
|
||||
|
||||
self.set_setting("SUPPLIER_ID", suppliers.first().pk)
|
||||
self.set_setting('SUPPLIER_ID', suppliers.first().pk)
|
||||
|
||||
return suppliers.first()
|
||||
|
||||
@ -260,21 +260,21 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
||||
if it does not use the standard field names.
|
||||
"""
|
||||
return {
|
||||
"K": cls.CUSTOMER_ORDER_NUMBER,
|
||||
"1K": cls.SUPPLIER_ORDER_NUMBER,
|
||||
"11K": cls.PACKING_LIST_NUMBER,
|
||||
"6D": cls.SHIP_DATE,
|
||||
"9D": cls.DATE_CODE,
|
||||
"10D": cls.DATE_CODE,
|
||||
"4K": cls.PURCHASE_ORDER_LINE,
|
||||
"14K": cls.PURCHASE_ORDER_LINE,
|
||||
"P": cls.SUPPLIER_PART_NUMBER,
|
||||
"1P": cls.MANUFACTURER_PART_NUMBER,
|
||||
"30P": cls.SUPPLIER_PART_NUMBER,
|
||||
"1T": cls.LOT_CODE,
|
||||
"4L": cls.COUNTRY_OF_ORIGIN,
|
||||
"1V": cls.MANUFACTURER,
|
||||
"Q": cls.QUANTITY,
|
||||
'K': cls.CUSTOMER_ORDER_NUMBER,
|
||||
'1K': cls.SUPPLIER_ORDER_NUMBER,
|
||||
'11K': cls.PACKING_LIST_NUMBER,
|
||||
'6D': cls.SHIP_DATE,
|
||||
'9D': cls.DATE_CODE,
|
||||
'10D': cls.DATE_CODE,
|
||||
'4K': cls.PURCHASE_ORDER_LINE,
|
||||
'14K': cls.PURCHASE_ORDER_LINE,
|
||||
'P': cls.SUPPLIER_PART_NUMBER,
|
||||
'1P': cls.MANUFACTURER_PART_NUMBER,
|
||||
'30P': cls.SUPPLIER_PART_NUMBER,
|
||||
'1T': cls.LOT_CODE,
|
||||
'4L': cls.COUNTRY_OF_ORIGIN,
|
||||
'1V': cls.MANUFACTURER,
|
||||
'Q': cls.QUANTITY,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@ -324,10 +324,10 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
||||
def parse_isoiec_15434_barcode2d(barcode_data: str) -> list[str]:
|
||||
"""Parse a ISO/IEC 15434 barcode, returning the split data section."""
|
||||
|
||||
OLD_MOUSER_HEADER = ">[)>06\x1d"
|
||||
HEADER = "[)>\x1e06\x1d"
|
||||
TRAILER = "\x1e\x04"
|
||||
DELIMITER = "\x1d"
|
||||
OLD_MOUSER_HEADER = '>[)>06\x1d'
|
||||
HEADER = '[)>\x1e06\x1d'
|
||||
TRAILER = '\x1e\x04'
|
||||
DELIMITER = '\x1d'
|
||||
|
||||
# Some old mouser barcodes start with this messed up header
|
||||
if barcode_data.startswith(OLD_MOUSER_HEADER):
|
||||
@ -419,7 +419,7 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
||||
|
||||
# find incomplete line_items that match the supplier_part
|
||||
line_items = purchase_order.lines.filter(
|
||||
part=supplier_part.pk, quantity__gt=F("received")
|
||||
part=supplier_part.pk, quantity__gt=F('received')
|
||||
)
|
||||
if len(line_items) == 1 or not quantity:
|
||||
line_item = line_items[0]
|
||||
@ -439,7 +439,7 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
||||
line_item = line_items.first()
|
||||
|
||||
if not line_item:
|
||||
return {"error": _("Failed to find pending line item for supplier part")}
|
||||
return {'error': _('Failed to find pending line item for supplier part')}
|
||||
|
||||
no_stock_locations = False
|
||||
if not location:
|
||||
@ -457,20 +457,20 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
||||
no_stock_locations = True
|
||||
|
||||
response = {
|
||||
"lineitem": {"pk": line_item.pk, "purchase_order": purchase_order.pk}
|
||||
'lineitem': {'pk': line_item.pk, 'purchase_order': purchase_order.pk}
|
||||
}
|
||||
|
||||
if quantity:
|
||||
response["lineitem"]["quantity"] = quantity
|
||||
response['lineitem']['quantity'] = quantity
|
||||
if location:
|
||||
response["lineitem"]["location"] = location.pk
|
||||
response['lineitem']['location'] = location.pk
|
||||
|
||||
# if either the quantity is missing or no location is defined/found
|
||||
# -> return the line_item found, so the client can gather the missing
|
||||
# information and complete the action with an 'api-po-receive' call
|
||||
if not quantity or (not location and not no_stock_locations):
|
||||
response["action_required"] = _(
|
||||
"Further information required to receive line item"
|
||||
response['action_required'] = _(
|
||||
'Further information required to receive line item'
|
||||
)
|
||||
return response
|
||||
|
||||
@ -478,5 +478,5 @@ class SupplierBarcodeMixin(BarcodeMixin):
|
||||
line_item, location, quantity, user, barcode=barcode
|
||||
)
|
||||
|
||||
response["success"] = _("Received purchase order line item")
|
||||
response['success'] = _('Received purchase order line item')
|
||||
return response
|
||||
|
@ -86,7 +86,7 @@ class BarcodePOAllocateSerializer(BarcodeSerializer):
|
||||
"""Validate the provided order"""
|
||||
|
||||
if order.status != PurchaseOrderStatus.PENDING.value:
|
||||
raise ValidationError(_("Purchase order is not pending"))
|
||||
raise ValidationError(_('Purchase order is not pending'))
|
||||
|
||||
return order
|
||||
|
||||
@ -111,7 +111,7 @@ class BarcodePOReceiveSerializer(BarcodeSerializer):
|
||||
"""Validate the provided order"""
|
||||
|
||||
if order and order.status != PurchaseOrderStatus.PLACED.value:
|
||||
raise ValidationError(_("Purchase order has not been placed"))
|
||||
raise ValidationError(_('Purchase order has not been placed'))
|
||||
|
||||
return order
|
||||
|
||||
@ -126,7 +126,7 @@ class BarcodePOReceiveSerializer(BarcodeSerializer):
|
||||
"""Validate the provided location"""
|
||||
|
||||
if location and location.structural:
|
||||
raise ValidationError(_("Cannot select a structural location"))
|
||||
raise ValidationError(_('Cannot select a structural location'))
|
||||
|
||||
return location
|
||||
|
||||
@ -147,7 +147,7 @@ class BarcodeSOAllocateSerializer(BarcodeSerializer):
|
||||
"""Validate the provided order"""
|
||||
|
||||
if order and order.status != SalesOrderStatus.PENDING.value:
|
||||
raise ValidationError(_("Sales order is not pending"))
|
||||
raise ValidationError(_('Sales order is not pending'))
|
||||
|
||||
return order
|
||||
|
||||
@ -169,7 +169,7 @@ class BarcodeSOAllocateSerializer(BarcodeSerializer):
|
||||
"""Validate the provided shipment"""
|
||||
|
||||
if shipment and shipment.is_delivered():
|
||||
raise ValidationError(_("Shipment has already been delivered"))
|
||||
raise ValidationError(_('Shipment has already been delivered'))
|
||||
|
||||
return shipment
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user