Merge branch 'inventree:master' into matmair/issue2279

This commit is contained in:
Matthias Mair 2022-02-20 02:36:53 +01:00 committed by GitHub
commit da0eda0b27
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 1198 additions and 756 deletions

2
.github/FUNDING.yml vendored Normal file
View File

@ -0,0 +1,2 @@
patreon: inventree
ko_fi: inventree

View File

@ -45,6 +45,62 @@ def rename_attachment(instance, filename):
return os.path.join(instance.getSubdir(), filename)
class DataImportMixin(object):
"""
Model mixin class which provides support for 'data import' functionality.
Models which implement this mixin should provide information on the fields available for import
"""
# Define a map of fields avaialble for import
IMPORT_FIELDS = {}
@classmethod
def get_import_fields(cls):
"""
Return all available import fields
Where information on a particular field is not explicitly provided,
introspect the base model to (attempt to) find that information.
"""
fields = cls.IMPORT_FIELDS
for name, field in fields.items():
# Attempt to extract base field information from the model
base_field = None
for f in cls._meta.fields:
if f.name == name:
base_field = f
break
if base_field:
if 'label' not in field:
field['label'] = base_field.verbose_name
if 'help_text' not in field:
field['help_text'] = base_field.help_text
fields[name] = field
return fields
@classmethod
def get_required_import_fields(cls):
""" Return all *required* import fields """
fields = {}
for name, field in cls.get_import_fields().items():
required = field.get('required', False)
if required:
fields[name] = field
return fields
class ReferenceIndexingMixin(models.Model):
"""
A mixin for keeping track of numerical copies of the "reference" field.

View File

@ -5,8 +5,8 @@ Serializers used in various InvenTree apps
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import tablib
from decimal import Decimal
@ -332,3 +332,309 @@ class InvenTreeDecimalField(serializers.FloatField):
return Decimal(str(data))
except:
raise serializers.ValidationError(_("Invalid value"))
class DataFileUploadSerializer(serializers.Serializer):
"""
Generic serializer for uploading a data file, and extracting a dataset.
- Validates uploaded file
- Extracts column names
- Extracts data rows
"""
# Implementing class should register a target model (database model) to be used for import
TARGET_MODEL = None
class Meta:
fields = [
'data_file',
]
data_file = serializers.FileField(
label=_("Data File"),
help_text=_("Select data file for upload"),
required=True,
allow_empty_file=False,
)
def validate_data_file(self, data_file):
"""
Perform validation checks on the uploaded data file.
"""
self.filename = data_file.name
name, ext = os.path.splitext(data_file.name)
# Remove the leading . from the extension
ext = ext[1:]
accepted_file_types = [
'xls', 'xlsx',
'csv', 'tsv',
'xml',
]
if ext not in accepted_file_types:
raise serializers.ValidationError(_("Unsupported file type"))
# Impose a 50MB limit on uploaded BOM files
max_upload_file_size = 50 * 1024 * 1024
if data_file.size > max_upload_file_size:
raise serializers.ValidationError(_("File is too large"))
# Read file data into memory (bytes object)
try:
data = data_file.read()
except Exception as e:
raise serializers.ValidationError(str(e))
if ext in ['csv', 'tsv', 'xml']:
try:
data = data.decode()
except Exception as e:
raise serializers.ValidationError(str(e))
# Convert to a tablib dataset (we expect headers)
try:
self.dataset = tablib.Dataset().load(data, ext, headers=True)
except Exception as e:
raise serializers.ValidationError(str(e))
if len(self.dataset.headers) == 0:
raise serializers.ValidationError(_("No columns found in file"))
if len(self.dataset) == 0:
raise serializers.ValidationError(_("No data rows found in file"))
return data_file
def match_column(self, column_name, field_names, exact=False):
"""
Attempt to match a column name (from the file) to a field (defined in the model)
Order of matching is:
- Direct match
- Case insensitive match
- Fuzzy match
"""
column_name = column_name.strip()
column_name_lower = column_name.lower()
if column_name in field_names:
return column_name
for field_name in field_names:
if field_name.lower() == column_name_lower:
return field_name
if exact:
# Finished available 'exact' matches
return None
# TODO: Fuzzy pattern matching for column names
# No matches found
return None
def extract_data(self):
"""
Returns dataset extracted from the file
"""
# Provide a dict of available import fields for the model
model_fields = {}
# Keep track of columns we have already extracted
matched_columns = set()
if self.TARGET_MODEL:
try:
model_fields = self.TARGET_MODEL.get_import_fields()
except:
pass
# Extract a list of valid model field names
model_field_names = [key for key in model_fields.keys()]
# Provide a dict of available columns from the dataset
file_columns = {}
for header in self.dataset.headers:
column = {}
# Attempt to "match" file columns to model fields
match = self.match_column(header, model_field_names, exact=True)
if match is not None and match not in matched_columns:
matched_columns.add(match)
column['value'] = match
else:
column['value'] = None
file_columns[header] = column
return {
'file_fields': file_columns,
'model_fields': model_fields,
'rows': [row.values() for row in self.dataset.dict],
'filename': self.filename,
}
def save(self):
...
class DataFileExtractSerializer(serializers.Serializer):
"""
Generic serializer for extracting data from an imported dataset.
- User provides an array of matched headers
- User provides an array of raw data rows
"""
# Implementing class should register a target model (database model) to be used for import
TARGET_MODEL = None
class Meta:
fields = [
'columns',
'rows',
]
# Mapping of columns
columns = serializers.ListField(
child=serializers.CharField(
allow_blank=True,
),
)
rows = serializers.ListField(
child=serializers.ListField(
child=serializers.CharField(
allow_blank=True,
allow_null=True,
),
)
)
def validate(self, data):
data = super().validate(data)
self.columns = data.get('columns', [])
self.rows = data.get('rows', [])
if len(self.rows) == 0:
raise serializers.ValidationError(_("No data rows provided"))
if len(self.columns) == 0:
raise serializers.ValidationError(_("No data columns supplied"))
self.validate_extracted_columns()
return data
@property
def data(self):
if self.TARGET_MODEL:
try:
model_fields = self.TARGET_MODEL.get_import_fields()
except:
model_fields = {}
rows = []
for row in self.rows:
"""
Optionally pre-process each row, before sending back to the client
"""
processed_row = self.process_row(self.row_to_dict(row))
if processed_row:
rows.append({
"original": row,
"data": processed_row,
})
return {
'fields': model_fields,
'columns': self.columns,
'rows': rows,
}
def process_row(self, row):
"""
Process a 'row' of data, which is a mapped column:value dict
Returns either a mapped column:value dict, or None.
If the function returns None, the column is ignored!
"""
# Default implementation simply returns the original row data
return row
def row_to_dict(self, row):
"""
Convert a "row" to a named data dict
"""
row_dict = {
'errors': {},
}
for idx, value in enumerate(row):
if idx < len(self.columns):
col = self.columns[idx]
if col:
row_dict[col] = value
return row_dict
def validate_extracted_columns(self):
"""
Perform custom validation of header mapping.
"""
if self.TARGET_MODEL:
try:
model_fields = self.TARGET_MODEL.get_import_fields()
except:
model_fields = {}
cols_seen = set()
for name, field in model_fields.items():
required = field.get('required', False)
# Check for missing required columns
if required:
if name not in self.columns:
raise serializers.ValidationError(_("Missing required column") + f": '{name}'")
for col in self.columns:
if not col:
continue
# Check for duplicated columns
if col in cols_seen:
raise serializers.ValidationError(_("Duplicate column") + f": '{col}'")
cols_seen.add(col)
def save(self):
"""
No "save" action for this serializer
"""
...

View File

@ -12,11 +12,17 @@ import common.models
INVENTREE_SW_VERSION = "0.6.0 dev"
# InvenTree API version
INVENTREE_API_VERSION = 24
INVENTREE_API_VERSION = 26
"""
Increment this API version number whenever there is a significant change to the API that any clients need to know about
v26 -> 2022-02-17
- Adds API endpoint for uploading a BOM file and extracting data
v25 -> 2022-02-17
- Adds ability to filter "part" list endpoint by "in_bom_for" argument
v24 -> 2022-02-10
- Adds API endpoint for deleting (cancelling) build order outputs

View File

@ -208,7 +208,7 @@ class BuildOutputCreateSerializer(serializers.Serializer):
raise ValidationError(_("Integer quantity required for trackable parts"))
if part.has_trackable_parts():
raise ValidationError(_("Integer quantity required, as the bill of materials contains tracakble parts"))
raise ValidationError(_("Integer quantity required, as the bill of materials contains trackable parts"))
return quantity

View File

@ -995,6 +995,23 @@ class PartList(generics.ListCreateAPIView):
except (ValueError, Part.DoesNotExist):
pass
# Filter only parts which are in the "BOM" for a given part
in_bom_for = params.get('in_bom_for', None)
if in_bom_for is not None:
try:
in_bom_for = Part.objects.get(pk=in_bom_for)
# Extract a list of parts within the BOM
bom_parts = in_bom_for.get_parts_in_bom()
print("bom_parts:", bom_parts)
print([p.pk for p in bom_parts])
queryset = queryset.filter(pk__in=[p.pk for p in bom_parts])
except (ValueError, Part.DoesNotExist):
pass
# Filter by whether the BOM has been validated (or not)
bom_valid = params.get('bom_valid', None)
@ -1533,13 +1550,15 @@ class BomList(generics.ListCreateAPIView):
]
class BomExtract(generics.CreateAPIView):
class BomImportUpload(generics.CreateAPIView):
"""
API endpoint for extracting BOM data from a BOM file.
API endpoint for uploading a complete Bill of Materials.
It is assumed that the BOM has been extracted from a file using the BomExtract endpoint.
"""
queryset = Part.objects.none()
serializer_class = part_serializers.BomExtractSerializer
queryset = Part.objects.all()
serializer_class = part_serializers.BomImportUploadSerializer
def create(self, request, *args, **kwargs):
"""
@ -1556,15 +1575,22 @@ class BomExtract(generics.CreateAPIView):
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
class BomUpload(generics.CreateAPIView):
class BomImportExtract(generics.CreateAPIView):
"""
API endpoint for uploading a complete Bill of Materials.
It is assumed that the BOM has been extracted from a file using the BomExtract endpoint.
API endpoint for extracting BOM data from a BOM file.
"""
queryset = Part.objects.all()
serializer_class = part_serializers.BomUploadSerializer
queryset = Part.objects.none()
serializer_class = part_serializers.BomImportExtractSerializer
class BomImportSubmit(generics.CreateAPIView):
"""
API endpoint for submitting BOM data from a BOM file
"""
queryset = BomItem.objects.none()
serializer_class = part_serializers.BomImportSubmitSerializer
class BomDetail(generics.RetrieveUpdateDestroyAPIView):
@ -1719,9 +1745,10 @@ bom_api_urls = [
url(r'^.*$', BomDetail.as_view(), name='api-bom-item-detail'),
])),
url(r'^extract/', BomExtract.as_view(), name='api-bom-extract'),
url(r'^upload/', BomUpload.as_view(), name='api-bom-upload'),
# API endpoint URLs for importing BOM data
url(r'^import/upload/', BomImportUpload.as_view(), name='api-bom-import-upload'),
url(r'^import/extract/', BomImportExtract.as_view(), name='api-bom-import-extract'),
url(r'^import/submit/', BomImportSubmit.as_view(), name='api-bom-import-submit'),
# Catch-all
url(r'^.*$', BomList.as_view(), name='api-bom-list'),

View File

@ -46,7 +46,7 @@ from common.models import InvenTreeSetting
from InvenTree import helpers
from InvenTree import validators
from InvenTree.models import InvenTreeTree, InvenTreeAttachment
from InvenTree.models import InvenTreeTree, InvenTreeAttachment, DataImportMixin
from InvenTree.fields import InvenTreeURLField
from InvenTree.helpers import decimal2string, normalize, decimal2money
import InvenTree.tasks
@ -483,6 +483,36 @@ class Part(MPTTModel):
def __str__(self):
return f"{self.full_name} - {self.description}"
def get_parts_in_bom(self):
"""
Return a list of all parts in the BOM for this part.
Takes into account substitutes, variant parts, and inherited BOM items
"""
parts = set()
for bom_item in self.get_bom_items():
for part in bom_item.get_valid_parts_for_allocation():
parts.add(part)
return parts
def check_if_part_in_bom(self, other_part):
"""
Check if the other_part is in the BOM for this part.
Note:
- Accounts for substitute parts
- Accounts for variant BOMs
"""
for bom_item in self.get_bom_items():
if other_part in bom_item.get_valid_parts_for_allocation():
return True
# No matches found
return False
def check_add_to_bom(self, parent, raise_error=False, recursive=True):
"""
Check if this Part can be added to the BOM of another part.
@ -2550,7 +2580,7 @@ class PartCategoryParameterTemplate(models.Model):
help_text=_('Default Parameter Value'))
class BomItem(models.Model):
class BomItem(models.Model, DataImportMixin):
""" A BomItem links a part to its component items.
A part can have a BOM (bill of materials) which defines
which parts are required (and in what quantity) to make it.
@ -2568,6 +2598,39 @@ class BomItem(models.Model):
allow_variants: Stock for part variants can be substituted for this BomItem
"""
# Fields available for bulk import
IMPORT_FIELDS = {
'quantity': {
'required': True
},
'reference': {},
'overage': {},
'allow_variants': {},
'inherited': {},
'optional': {},
'note': {},
'part': {
'label': _('Part'),
'help_text': _('Part ID or part name'),
},
'part_id': {
'label': _('Part ID'),
'help_text': _('Unique part ID value')
},
'part_name': {
'label': _('Part Name'),
'help_text': _('Part name'),
},
'part_ipn': {
'label': _('Part IPN'),
'help_text': _('Part IPN value'),
},
'level': {
'label': _('Level'),
'help_text': _('BOM level'),
}
}
@staticmethod
def get_api_url():
return reverse('api-bom-list')

View File

@ -4,8 +4,6 @@ JSON serializers for Part app
import imghdr
from decimal import Decimal
import os
import tablib
from django.urls import reverse_lazy
from django.db import models, transaction
@ -17,7 +15,9 @@ from rest_framework import serializers
from sql_util.utils import SubqueryCount, SubquerySum
from djmoney.contrib.django_rest_framework import MoneyField
from InvenTree.serializers import (InvenTreeAttachmentSerializerField,
from InvenTree.serializers import (DataFileUploadSerializer,
DataFileExtractSerializer,
InvenTreeAttachmentSerializerField,
InvenTreeDecimalField,
InvenTreeImageSerializerField,
InvenTreeModelSerializer,
@ -709,307 +709,129 @@ class PartCopyBOMSerializer(serializers.Serializer):
)
class BomExtractSerializer(serializers.Serializer):
class BomImportUploadSerializer(DataFileUploadSerializer):
"""
Serializer for uploading a file and extracting data from it.
Note: 2022-02-04 - This needs a *serious* refactor in future, probably
When parsing the file, the following things happen:
a) Check file format and validity
b) Look for "required" fields
c) Look for "part" fields - used to "infer" part
Once the file itself has been validated, we iterate through each data row:
- If the "level" column is provided, ignore anything below level 1
- Try to "guess" the part based on part_id / part_name / part_ipn
- Extract other fields as required
"""
TARGET_MODEL = BomItem
class Meta:
fields = [
'bom_file',
'data_file',
'part',
'clear_existing',
'clear_existing_bom',
]
# These columns must be present
REQUIRED_COLUMNS = [
'quantity',
]
# We need at least one column to specify a "part"
PART_COLUMNS = [
'part',
'part_id',
'part_name',
'part_ipn',
]
# These columns are "optional"
OPTIONAL_COLUMNS = [
'allow_variants',
'inherited',
'optional',
'overage',
'note',
'reference',
]
def find_matching_column(self, col_name, columns):
# Direct match
if col_name in columns:
return col_name
col_name = col_name.lower().strip()
for col in columns:
if col.lower().strip() == col_name:
return col
# No match
return None
def find_matching_data(self, row, col_name, columns):
"""
Extract data from the row, based on the "expected" column name
"""
col_name = self.find_matching_column(col_name, columns)
return row.get(col_name, None)
bom_file = serializers.FileField(
label=_("BOM File"),
help_text=_("Select Bill of Materials file"),
part = serializers.PrimaryKeyRelatedField(
queryset=Part.objects.all(),
required=True,
allow_empty_file=False,
allow_null=False,
many=False,
)
def validate_bom_file(self, bom_file):
"""
Perform validation checks on the uploaded BOM file
"""
self.filename = bom_file.name
name, ext = os.path.splitext(bom_file.name)
# Remove the leading . from the extension
ext = ext[1:]
accepted_file_types = [
'xls', 'xlsx',
'csv', 'tsv',
'xml',
]
if ext not in accepted_file_types:
raise serializers.ValidationError(_("Unsupported file type"))
# Impose a 50MB limit on uploaded BOM files
max_upload_file_size = 50 * 1024 * 1024
if bom_file.size > max_upload_file_size:
raise serializers.ValidationError(_("File is too large"))
# Read file data into memory (bytes object)
try:
data = bom_file.read()
except Exception as e:
raise serializers.ValidationError(str(e))
if ext in ['csv', 'tsv', 'xml']:
try:
data = data.decode()
except Exception as e:
raise serializers.ValidationError(str(e))
# Convert to a tablib dataset (we expect headers)
try:
self.dataset = tablib.Dataset().load(data, ext, headers=True)
except Exception as e:
raise serializers.ValidationError(str(e))
for header in self.REQUIRED_COLUMNS:
match = self.find_matching_column(header, self.dataset.headers)
if match is None:
raise serializers.ValidationError(_("Missing required column") + f": '{header}'")
part_column_matches = {}
part_match = False
for col in self.PART_COLUMNS:
col_match = self.find_matching_column(col, self.dataset.headers)
part_column_matches[col] = col_match
if col_match is not None:
part_match = True
if not part_match:
raise serializers.ValidationError(_("No part column found"))
if len(self.dataset) == 0:
raise serializers.ValidationError(_("No data rows found"))
return bom_file
def extract_data(self):
"""
Read individual rows out of the BOM file
"""
rows = []
errors = []
found_parts = set()
headers = self.dataset.headers
level_column = self.find_matching_column('level', headers)
for row in self.dataset.dict:
row_error = {}
"""
If the "level" column is specified, and this is not a top-level BOM item, ignore the row!
"""
if level_column is not None:
level = row.get('level', None)
if level is not None:
try:
level = int(level)
if level != 1:
continue
except:
pass
"""
Next, we try to "guess" the part, based on the provided data.
A) If the part_id is supplied, use that!
B) If the part name and/or part_ipn are supplied, maybe we can use those?
"""
part_id = self.find_matching_data(row, 'part_id', headers)
part_name = self.find_matching_data(row, 'part_name', headers)
part_ipn = self.find_matching_data(row, 'part_ipn', headers)
part = None
if part_id is not None:
try:
part = Part.objects.get(pk=part_id)
except (ValueError, Part.DoesNotExist):
pass
# Optionally, specify using field "part"
if part is None:
pk = self.find_matching_data(row, 'part', headers)
if pk is not None:
try:
part = Part.objects.get(pk=pk)
except (ValueError, Part.DoesNotExist):
pass
if part is None:
if part_name or part_ipn:
queryset = Part.objects.all()
if part_name:
queryset = queryset.filter(name=part_name)
if part_ipn:
queryset = queryset.filter(IPN=part_ipn)
# Only if we have a single direct match
if queryset.exists():
if queryset.count() == 1:
part = queryset.first()
else:
# Multiple matches!
row_error['part'] = _('Multiple matching parts found')
if part is None:
if 'part' not in row_error:
row_error['part'] = _('No matching part found')
else:
if part.pk in found_parts:
row_error['part'] = _("Duplicate part selected")
elif not part.component:
row_error['part'] = _('Part is not designated as a component')
found_parts.add(part.pk)
row['part'] = part.pk if part is not None else None
"""
Read out the 'quantity' column - check that it is valid
"""
quantity = self.find_matching_data(row, 'quantity', self.dataset.headers)
# Ensure quantity field is provided
row['quantity'] = quantity
if quantity is None:
row_error['quantity'] = _('Quantity not provided')
else:
try:
quantity = Decimal(quantity)
if quantity <= 0:
row_error['quantity'] = _('Quantity must be greater than zero')
except:
row_error['quantity'] = _('Invalid quantity')
# For each "optional" column, ensure the column names are allocated correctly
for field_name in self.OPTIONAL_COLUMNS:
if field_name not in row:
row[field_name] = self.find_matching_data(row, field_name, self.dataset.headers)
rows.append(row)
errors.append(row_error)
return {
'rows': rows,
'errors': errors,
'headers': headers,
'filename': self.filename,
}
part = serializers.PrimaryKeyRelatedField(queryset=Part.objects.filter(assembly=True), required=True)
clear_existing = serializers.BooleanField(
label=_("Clear Existing BOM"),
help_text=_("Delete existing BOM data first"),
clear_existing_bom = serializers.BooleanField(
label=_('Clear Existing BOM'),
help_text=_('Delete existing BOM items before uploading')
)
def save(self):
data = self.validated_data
master_part = data['part']
clear_existing = data['clear_existing']
if data.get('clear_existing_bom', False):
part = data['part']
if clear_existing:
# Remove all existing BOM items
master_part.bom_items.all().delete()
with transaction.atomic():
part.bom_items.all().delete()
class BomUploadSerializer(serializers.Serializer):
class BomImportExtractSerializer(DataFileExtractSerializer):
"""
"""
TARGET_MODEL = BomItem
def validate_extracted_columns(self):
super().validate_extracted_columns()
part_columns = ['part', 'part_name', 'part_ipn', 'part_id']
if not any([col in self.columns for col in part_columns]):
# At least one part column is required!
raise serializers.ValidationError(_("No part column specified"))
def process_row(self, row):
# Skip any rows which are at a lower "level"
level = row.get('level', None)
if level is not None:
try:
level = int(level)
if level != 1:
# Skip this row
return None
except:
pass
# Attempt to extract a valid part based on the provided data
part_id = row.get('part_id', row.get('part', None))
part_name = row.get('part_name', row.get('part', None))
part_ipn = row.get('part_ipn', None)
part = None
if part_id is not None:
try:
part = Part.objects.get(pk=part_id)
except (ValueError, Part.DoesNotExist):
pass
# No direct match, where else can we look?
if part is None:
if part_name or part_ipn:
queryset = Part.objects.all()
if part_name:
queryset = queryset.filter(name=part_name)
if part_ipn:
queryset = queryset.filter(IPN=part_ipn)
if queryset.exists():
if queryset.count() == 1:
part = queryset.first()
else:
row['errors']['part'] = _('Multiple matching parts found')
if part is None:
row['errors']['part'] = _('No matching part found')
else:
if not part.component:
row['errors']['part'] = _('Part is not designated as a component')
# Update the 'part' value in the row
row['part'] = part.pk if part is not None else None
# Check the provided 'quantity' value
quantity = row.get('quantity', None)
if quantity is None:
row['errors']['quantity'] = _('Quantity not provided')
else:
try:
quantity = Decimal(quantity)
if quantity <= 0:
row['errors']['quantity'] = _('Quantity must be greater than zero')
except:
row['errors']['quantity'] = _('Invalid quantity')
return row
class BomImportSubmitSerializer(serializers.Serializer):
"""
Serializer for uploading a BOM against a specified part.

View File

@ -77,15 +77,15 @@ $('#bom-template-download').click(function() {
$('#bom-upload').click(function() {
constructForm('{% url "api-bom-extract" %}', {
constructForm('{% url "api-bom-import-upload" %}', {
method: 'POST',
fields: {
bom_file: {},
data_file: {},
part: {
value: {{ part.pk }},
hidden: true,
},
clear_existing: {},
clear_existing_bom: {},
},
title: '{% trans "Upload BOM File" %}',
onSuccess: function(response) {
@ -93,16 +93,24 @@ $('#bom-upload').click(function() {
// Clear existing entries from the table
$('.bom-import-row').remove();
// Disable the "submit" button
$('#bom-submit').show();
selectImportFields(
'{% url "api-bom-import-extract" %}',
response,
{
success: function(response) {
constructBomUploadTable(response);
constructBomUploadTable(response);
// Show the "submit" button
$('#bom-submit').show();
$('#bom-submit').click(function() {
submitBomTable({{ part.pk }}, {
bom_data: response,
});
});
$('#bom-submit').click(function() {
submitBomTable({{ part.pk }}, {
bom_data: response,
});
});
}
}
);
}
});

View File

@ -41,8 +41,6 @@ class BomUploadTest(InvenTreeAPITestCase):
assembly=False,
)
self.url = reverse('api-bom-extract')
def post_bom(self, filename, file_data, part=None, clear_existing=None, expected_code=None, content_type='text/plain'):
bom_file = SimpleUploadedFile(
@ -58,11 +56,9 @@ class BomUploadTest(InvenTreeAPITestCase):
clear_existing = False
response = self.post(
self.url,
reverse('api-bom-import-upload'),
data={
'bom_file': bom_file,
'part': part,
'clear_existing': clear_existing,
'data_file': bom_file,
},
expected_code=expected_code,
format='multipart',
@ -76,14 +72,12 @@ class BomUploadTest(InvenTreeAPITestCase):
"""
response = self.post(
self.url,
reverse('api-bom-import-upload'),
data={},
expected_code=400
)
self.assertIn('No file was submitted', str(response.data['bom_file']))
self.assertIn('This field is required', str(response.data['part']))
self.assertIn('This field is required', str(response.data['clear_existing']))
self.assertIn('No file was submitted', str(response.data['data_file']))
def test_unsupported_file(self):
"""
@ -96,7 +90,7 @@ class BomUploadTest(InvenTreeAPITestCase):
expected_code=400,
)
self.assertIn('Unsupported file type', str(response.data['bom_file']))
self.assertIn('Unsupported file type', str(response.data['data_file']))
def test_broken_file(self):
"""
@ -109,7 +103,7 @@ class BomUploadTest(InvenTreeAPITestCase):
expected_code=400,
)
self.assertIn('The submitted file is empty', str(response.data['bom_file']))
self.assertIn('The submitted file is empty', str(response.data['data_file']))
response = self.post_bom(
'test.xls',
@ -118,11 +112,11 @@ class BomUploadTest(InvenTreeAPITestCase):
content_type='application/xls',
)
self.assertIn('Unsupported format, or corrupt file', str(response.data['bom_file']))
self.assertIn('Unsupported format, or corrupt file', str(response.data['data_file']))
def test_invalid_upload(self):
def test_missing_rows(self):
"""
Test upload of an invalid file
Test upload of an invalid file (without data rows)
"""
dataset = tablib.Dataset()
@ -139,7 +133,7 @@ class BomUploadTest(InvenTreeAPITestCase):
expected_code=400,
)
self.assertIn("Missing required column: 'quantity'", str(response.data))
self.assertIn('No data rows found in file', str(response.data))
# Try again, with an .xlsx file
response = self.post_bom(
@ -149,32 +143,61 @@ class BomUploadTest(InvenTreeAPITestCase):
expected_code=400,
)
self.assertIn('No data rows found in file', str(response.data))
def test_missing_columns(self):
"""
Upload extracted data, but with missing columns
"""
url = reverse('api-bom-import-extract')
rows = [
['1', 'test'],
['2', 'test'],
]
# Post without columns
response = self.post(
url,
{},
expected_code=400,
)
self.assertIn('This field is required', str(response.data['rows']))
self.assertIn('This field is required', str(response.data['columns']))
response = self.post(
url,
{
'rows': rows,
'columns': ['part', 'reference'],
},
expected_code=400
)
self.assertIn("Missing required column: 'quantity'", str(response.data))
# Add the quantity field (or close enough)
dataset.headers.append('quAntiTy ')
response = self.post_bom(
'test.csv',
bytes(dataset.csv, 'utf8'),
content_type='text/csv',
response = self.post(
url,
{
'rows': rows,
'columns': ['quantity', 'reference'],
},
expected_code=400,
)
self.assertIn('No part column found', str(response.data))
self.assertIn('No part column specified', str(response.data))
dataset.headers.append('part_id')
dataset.headers.append('part_name')
response = self.post_bom(
'test.csv',
bytes(dataset.csv, 'utf8'),
content_type='text/csv',
expected_code=400,
response = self.post(
url,
{
'rows': rows,
'columns': ['quantity', 'part'],
},
expected_code=201,
)
self.assertIn('No data rows found', str(response.data))
def test_invalid_data(self):
"""
Upload data which contains errors
@ -195,25 +218,31 @@ class BomUploadTest(InvenTreeAPITestCase):
dataset.append([cmp.pk, idx])
# Add a duplicate part too
dataset.append([components.first().pk, 'invalid'])
url = reverse('api-bom-import-extract')
response = self.post_bom(
'test.csv',
bytes(dataset.csv, 'utf8'),
content_type='text/csv',
expected_code=201
response = self.post(
url,
{
'columns': dataset.headers,
'rows': [row for row in dataset],
},
)
errors = response.data['errors']
rows = response.data['rows']
self.assertIn('Quantity must be greater than zero', str(errors[0]))
self.assertIn('Part is not designated as a component', str(errors[5]))
self.assertIn('Duplicate part selected', str(errors[-1]))
self.assertIn('Invalid quantity', str(errors[-1]))
# Returned data must be the same as the original dataset
self.assertEqual(len(rows), len(dataset))
for idx, row in enumerate(response.data['rows'][:-1]):
self.assertEqual(str(row['part']), str(components[idx].pk))
for idx, row in enumerate(rows):
data = row['data']
cmp = components[idx]
# Should have guessed the correct part
data['part'] = cmp.pk
# Check some specific error messages
self.assertEqual(rows[0]['data']['errors']['quantity'], 'Quantity must be greater than zero')
self.assertEqual(rows[5]['data']['errors']['part'], 'Part is not designated as a component')
def test_part_guess(self):
"""
@ -233,9 +262,14 @@ class BomUploadTest(InvenTreeAPITestCase):
10,
])
response = self.post_bom(
'test.csv',
bytes(dataset.csv, 'utf8'),
url = reverse('api-bom-import-extract')
response = self.post(
url,
{
'columns': dataset.headers,
'rows': [row for row in dataset],
},
expected_code=201,
)
@ -244,7 +278,7 @@ class BomUploadTest(InvenTreeAPITestCase):
self.assertEqual(len(rows), 10)
for idx in range(10):
self.assertEqual(rows[idx]['part'], components[idx].pk)
self.assertEqual(rows[idx]['data']['part'], components[idx].pk)
# Should also be able to 'guess' part by the IPN value
dataset = tablib.Dataset()
@ -257,9 +291,12 @@ class BomUploadTest(InvenTreeAPITestCase):
10,
])
response = self.post_bom(
'test.csv',
bytes(dataset.csv, 'utf8'),
response = self.post(
url,
{
'columns': dataset.headers,
'rows': [row for row in dataset],
},
expected_code=201,
)
@ -268,13 +305,15 @@ class BomUploadTest(InvenTreeAPITestCase):
self.assertEqual(len(rows), 10)
for idx in range(10):
self.assertEqual(rows[idx]['part'], components[idx].pk)
self.assertEqual(rows[idx]['data']['part'], components[idx].pk)
def test_levels(self):
"""
Test that multi-level BOMs are correctly handled during upload
"""
url = reverse('api-bom-import-extract')
dataset = tablib.Dataset()
dataset.headers = ['level', 'part', 'quantity']
@ -288,11 +327,21 @@ class BomUploadTest(InvenTreeAPITestCase):
2,
])
response = self.post_bom(
'test.csv',
bytes(dataset.csv, 'utf8'),
response = self.post(
url,
{
'rows': [row for row in dataset],
'columns': dataset.headers,
},
expected_code=201,
)
rows = response.data['rows']
# Only parts at index 1, 4, 7 should have been returned
self.assertEqual(len(response.data['rows']), 3)
# Check the returned PK values
self.assertEqual(rows[0]['data']['part'], components[1].pk)
self.assertEqual(rows[1]['data']['part'], components[4].pk)
self.assertEqual(rows[2]['data']['part'], components[7].pk)

View File

@ -225,13 +225,14 @@ class ReportPrintMixin:
outputs.append(report.render_as_string(request))
else:
outputs.append(report.render(request))
except TemplateDoesNotExist:
filename = report.template
except TemplateDoesNotExist as e:
template = str(e)
if not template:
template = report.template
return Response(
{
'error': _(f"Template file '{filename}' is missing or does not exist"),
'error': _(f"Template file '{template}' is missing or does not exist"),
},
status=400,
)
@ -269,13 +270,16 @@ class ReportPrintMixin:
else:
pdf = outputs[0].get_document().write_pdf()
except TemplateDoesNotExist:
except TemplateDoesNotExist as e:
filename = report.template
template = str(e)
if not template:
template = report.template
return Response(
{
'error': _(f"Template file '{filename}' is missing or does not exist"),
'error': _(f"Template file '{template}' is missing or does not exist"),
},
status=400,
)

View File

@ -14,12 +14,12 @@ import datetime
from django.urls import reverse
from django.db import models
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import ValidationError, FieldError
from django.template.loader import render_to_string
from django.template import Template, Context
from django.core.files.storage import FileSystemStorage
from django.core.validators import FileExtensionValidator
import build.models
@ -43,32 +43,12 @@ except OSError as err: # pragma: no cover
logger = logging.getLogger("inventree")
class ReportFileUpload(FileSystemStorage):
"""
Custom implementation of FileSystemStorage class.
When uploading a report (or a snippet / asset / etc),
it is often important to ensure the filename is not arbitrarily *changed*,
if the name of the uploaded file is identical to the currently stored file.
For example, a snippet or asset file is referenced in a template by filename,
and we do not want that filename to change when we upload a new *version*
of the snippet or asset file.
This uploader class performs the following pseudo-code function:
- If the model is *new*, proceed as normal
- If the model is being updated:
a) If the new filename is *different* from the existing filename, proceed as normal
b) If the new filename is *identical* to the existing filename, we want to overwrite the existing file
"""
def get_available_name(self, name, max_length=None):
return super().get_available_name(name, max_length)
def rename_template(instance, filename):
"""
Helper function for 'renaming' uploaded report files.
Pass responsibility back to the calling class,
to ensure that files are uploaded to the correct directory.
"""
return instance.rename_file(filename)
@ -155,7 +135,23 @@ class ReportBase(models.Model):
filename = os.path.basename(filename)
return os.path.join('report', 'report_template', self.getSubdir(), filename)
path = os.path.join('report', 'report_template', self.getSubdir(), filename)
fullpath = os.path.join(settings.MEDIA_ROOT, path)
fullpath = os.path.abspath(fullpath)
# If the report file is the *same* filename as the one being uploaded,
# remove the original one from the media directory
if str(filename) == str(self.template):
if os.path.exists(fullpath):
logger.info(f"Deleting existing report template: '{filename}'")
os.remove(fullpath)
# Ensure that the cache is cleared for this template!
cache.delete(fullpath)
return path
@property
def extension(self):
@ -522,16 +518,20 @@ def rename_snippet(instance, filename):
path = os.path.join('report', 'snippets', filename)
fullpath = os.path.join(settings.MEDIA_ROOT, path)
fullpath = os.path.abspath(fullpath)
# If the snippet file is the *same* filename as the one being uploaded,
# delete the original one from the media directory
if str(filename) == str(instance.snippet):
fullpath = os.path.join(settings.MEDIA_ROOT, path)
fullpath = os.path.abspath(fullpath)
if os.path.exists(fullpath):
logger.info(f"Deleting existing snippet file: '{filename}'")
os.remove(fullpath)
# Ensure that the cache is deleted for this snippet
cache.delete(fullpath)
return path

View File

@ -109,6 +109,31 @@ class StockItemSerialize(generics.CreateAPIView):
return context
class StockItemInstall(generics.CreateAPIView):
"""
API endpoint for installing a particular stock item into this stock item.
- stock_item.part must be in the BOM for this part
- stock_item must currently be "in stock"
- stock_item must be serialized (and not belong to another item)
"""
queryset = StockItem.objects.none()
serializer_class = StockSerializers.InstallStockItemSerializer
def get_serializer_context(self):
context = super().get_serializer_context()
context['request'] = self.request
try:
context['item'] = StockItem.objects.get(pk=self.kwargs.get('pk', None))
except:
pass
return context
class StockAdjustView(generics.CreateAPIView):
"""
A generic class for handling stocktake actions.
@ -503,11 +528,34 @@ class StockList(generics.ListCreateAPIView):
serial_numbers = data.get('serial_numbers', '')
# Assign serial numbers for a trackable part
if serial_numbers and part.trackable:
if serial_numbers:
if not part.trackable:
raise ValidationError({
'serial_numbers': [_("Serial numbers cannot be supplied for a non-trackable part")]
})
# If serial numbers are specified, check that they match!
try:
serials = extract_serial_numbers(serial_numbers, quantity, part.getLatestSerialNumberInt())
# Determine if any of the specified serial numbers already exist!
existing = []
for serial in serials:
if part.checkIfSerialNumberExists(serial):
existing.append(serial)
if len(existing) > 0:
msg = _("The following serial numbers already exist")
msg += " : "
msg += ",".join([str(e) for e in existing])
raise ValidationError({
'serial_numbers': [msg],
})
except DjangoValidationError as e:
raise ValidationError({
'quantity': e.messages,
@ -1256,6 +1304,7 @@ stock_api_urls = [
# Detail views for a single stock item
url(r'^(?P<pk>\d+)/', include([
url(r'^serialize/', StockItemSerialize.as_view(), name='api-stock-item-serialize'),
url(r'^install/', StockItemInstall.as_view(), name='api-stock-item-install'),
url(r'^.*$', StockDetail.as_view(), name='api-stock-detail'),
])),

View File

@ -8,7 +8,6 @@ from __future__ import unicode_literals
from django import forms
from django.forms.utils import ErrorDict
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError
from mptt.fields import TreeNodeChoiceField
@ -16,8 +15,6 @@ from InvenTree.forms import HelperForm
from InvenTree.fields import RoundingDecimalFormField
from InvenTree.fields import DatePickerFormField
from part.models import Part
from .models import StockLocation, StockItem, StockItemTracking
@ -162,56 +159,6 @@ class SerializeStockForm(HelperForm):
]
class InstallStockForm(HelperForm):
"""
Form for manually installing a stock item into another stock item
TODO: Migrate this form to the modern API forms interface
"""
part = forms.ModelChoiceField(
queryset=Part.objects.all(),
widget=forms.HiddenInput()
)
stock_item = forms.ModelChoiceField(
required=True,
queryset=StockItem.objects.filter(StockItem.IN_STOCK_FILTER),
help_text=_('Stock item to install')
)
to_install = forms.BooleanField(
widget=forms.HiddenInput(),
required=False,
)
notes = forms.CharField(
required=False,
help_text=_('Notes')
)
class Meta:
model = StockItem
fields = [
'part',
'stock_item',
# 'quantity_to_install',
'notes',
]
def clean(self):
data = super().clean()
stock_item = data.get('stock_item', None)
quantity = data.get('quantity_to_install', None)
if stock_item and quantity and quantity > stock_item.quantity:
raise ValidationError({'quantity_to_install': _('Must not exceed available quantity')})
return data
class UninstallStockForm(forms.ModelForm):
"""
Form for uninstalling a stock item which is installed in another item.

View File

@ -391,6 +391,63 @@ class SerializeStockItemSerializer(serializers.Serializer):
)
class InstallStockItemSerializer(serializers.Serializer):
"""
Serializer for installing a stock item into a given part
"""
stock_item = serializers.PrimaryKeyRelatedField(
queryset=StockItem.objects.all(),
many=False,
required=True,
allow_null=False,
label=_('Stock Item'),
help_text=_('Select stock item to install'),
)
note = serializers.CharField(
label=_('Note'),
required=False,
allow_blank=True,
)
def validate_stock_item(self, stock_item):
"""
Validate the selected stock item
"""
if not stock_item.in_stock:
# StockItem must be in stock to be "installed"
raise ValidationError(_("Stock item is unavailable"))
# Extract the "parent" item - the item into which the stock item will be installed
parent_item = self.context['item']
parent_part = parent_item.part
if not parent_part.check_if_part_in_bom(stock_item.part):
raise ValidationError(_("Selected part is not in the Bill of Materials"))
return stock_item
def save(self):
""" Install the selected stock item into this one """
data = self.validated_data
stock_item = data['stock_item']
note = data.get('note', '')
parent_item = self.context['item']
request = self.context['request']
parent_item.installStockItem(
stock_item,
stock_item.quantity,
request.user,
note,
)
class LocationTreeSerializer(InvenTree.serializers.InvenTreeModelSerializer):
"""
Serializer for a simple tree view

View File

@ -183,16 +183,11 @@
$('#stock-item-install').click(function() {
launchModalForm(
"{% url 'stock-item-install' item.pk %}",
{
data: {
'part': {{ item.part.pk }},
'install_item': true,
},
reload: true,
installStockItem({{ item.pk }}, {{ item.part.pk }}, {
onSuccess: function(response) {
$("#installed-table").bootstrapTable('refresh');
}
);
});
});
loadInstalledInTable(
@ -311,65 +306,6 @@
});
});
$("#test-result-table").on('click', '.button-test-add', function() {
var button = $(this);
var test_name = button.attr('pk');
constructForm('{% url "api-stock-test-result-list" %}', {
method: 'POST',
fields: {
test: {
value: test_name,
},
result: {},
value: {},
attachment: {},
notes: {},
stock_item: {
value: {{ item.pk }},
hidden: true,
}
},
title: '{% trans "Add Test Result" %}',
onSuccess: reloadTable,
});
});
$("#test-result-table").on('click', '.button-test-edit', function() {
var button = $(this);
var pk = button.attr('pk');
var url = `/api/stock/test/${pk}/`;
constructForm(url, {
fields: {
test: {},
result: {},
value: {},
attachment: {},
notes: {},
},
title: '{% trans "Edit Test Result" %}',
onSuccess: reloadTable,
});
});
$("#test-result-table").on('click', '.button-test-delete', function() {
var button = $(this);
var pk = button.attr('pk');
var url = `/api/stock/test/${pk}/`;
constructForm(url, {
method: 'DELETE',
title: '{% trans "Delete Test Result" %}',
onSuccess: reloadTable,
});
});
{% if item.child_count > 0 %}
loadStockTable($("#childs-stock-table"), {
params: {

View File

@ -98,7 +98,9 @@
<li><a class='dropdown-item' href='#' id='stock-uninstall' title='{% trans "Uninstall stock item" %}'><span class='fas fa-unlink'></span> {% trans "Uninstall" %}</a></li>
{% else %}
{% if item.part.get_used_in %}
<li><a class='dropdown-item' href='#' id='stock-install-in' title='{% trans "Install stock item" %}'><span class='fas fa-link'></span> {% trans "Install" %}</a></li>
<!--
<li><a class='dropdown-item' href='#' id='stock-install-in' title='{% trans "Install stock item" %}'><span class='fas fa-link'></span> {% trans "Install" %}</a></li>
-->
{% endif %}
{% endif %}
</ul>
@ -442,16 +444,7 @@ $("#stock-serialize").click(function() {
$('#stock-install-in').click(function() {
launchModalForm(
"{% url 'stock-item-install' item.pk %}",
{
data: {
'part': {{ item.part.pk }},
'install_in': true,
},
reload: true,
}
);
// TODO - Launch dialog to install this item *into* another stock item
});
$('#stock-uninstall').click(function() {
@ -618,7 +611,7 @@ enableBreadcrumbTree({
{% endif %}
processNode: function(node) {
node.text = node.name;
node.href = `/stock/item/${node.pk}/`;
node.href = `/stock/location/${node.pk}/`;
return node;
}

View File

@ -1,33 +0,0 @@
{% extends "modal_form.html" %}
{% load i18n %}
{% block pre_form_content %}
{% if install_item %}
<p>
{% trans "Install another Stock Item into this item." %}
</p>
<p>
{% trans "Stock items can only be installed if they meet the following criteria" %}:
<ul>
<li>{% trans "The Stock Item links to a Part which is in the BOM for this Stock Item" %}</li>
<li>{% trans "The Stock Item is currently in stock" %}</li>
<li>{% trans "The Stock Item is serialized and does not belong to another item" %}</li>
</ul>
</p>
{% elif install_in %}
<p>
{% trans "Install this Stock Item in another stock item." %}
</p>
<p>
{% trans "Stock items can only be installed if they meet the following criteria" %}:
<ul>
<li>{% trans "The part associated to this Stock Item belongs to another part's BOM" %}</li>
<li>{% trans "This Stock Item is serialized and does not belong to another item" %}</li>
</ul>
</p>
{% endif %}
{% endblock %}

View File

@ -24,7 +24,6 @@ stock_item_detail_urls = [
url(r'^qr_code/', views.StockItemQRCode.as_view(), name='stock-item-qr'),
url(r'^delete_test_data/', views.StockItemDeleteTestData.as_view(), name='stock-item-delete-test-data'),
url(r'^return/', views.StockItemReturnToStock.as_view(), name='stock-item-return'),
url(r'^install/', views.StockItemInstall.as_view(), name='stock-item-install'),
url(r'^add_tracking/', views.StockItemTrackingCreate.as_view(), name='stock-tracking-create'),

View File

@ -465,155 +465,6 @@ class StockItemQRCode(QRCodeView):
return None
class StockItemInstall(AjaxUpdateView):
"""
View for manually installing stock items into
a particular stock item.
In contrast to the StockItemUninstall view,
only a single stock item can be installed at once.
The "part" to be installed must be provided in the GET query parameters.
"""
model = StockItem
form_class = StockForms.InstallStockForm
ajax_form_title = _('Install Stock Item')
ajax_template_name = "stock/item_install.html"
part = None
def get_params(self):
""" Retrieve GET parameters """
# Look at GET params
self.part_id = self.request.GET.get('part', None)
self.install_in = self.request.GET.get('install_in', False)
self.install_item = self.request.GET.get('install_item', False)
if self.part_id is None:
# Look at POST params
self.part_id = self.request.POST.get('part', None)
try:
self.part = Part.objects.get(pk=self.part_id)
except (ValueError, Part.DoesNotExist):
self.part = None
def get_stock_items(self):
"""
Return a list of stock items suitable for displaying to the user.
Requirements:
- Items must be in stock
- Items must be in BOM of stock item
- Items must be serialized
"""
# Filter items in stock
items = StockItem.objects.filter(StockItem.IN_STOCK_FILTER)
# Filter serialized stock items
items = items.exclude(serial__isnull=True).exclude(serial__exact='')
if self.part:
# Filter for parts to install this item in
if self.install_in:
# Get parts using this part
allowed_parts = self.part.get_used_in()
# Filter
items = items.filter(part__in=allowed_parts)
# Filter for parts to install in this item
if self.install_item:
# Get all parts which can be installed into this part
allowed_parts = self.part.get_installed_part_options()
# Filter
items = items.filter(part__in=allowed_parts)
return items
def get_context_data(self, **kwargs):
""" Retrieve parameters and update context """
ctx = super().get_context_data(**kwargs)
# Get request parameters
self.get_params()
ctx.update({
'part': self.part,
'install_in': self.install_in,
'install_item': self.install_item,
})
return ctx
def get_initial(self):
initials = super().get_initial()
items = self.get_stock_items()
# If there is a single stock item available, we can use it!
if items.count() == 1:
item = items.first()
initials['stock_item'] = item.pk
if self.part:
initials['part'] = self.part
try:
# Is this stock item being installed in the other stock item?
initials['to_install'] = self.install_in or not self.install_item
except AttributeError:
pass
return initials
def get_form(self):
form = super().get_form()
form.fields['stock_item'].queryset = self.get_stock_items()
return form
def post(self, request, *args, **kwargs):
self.get_params()
form = self.get_form()
valid = form.is_valid()
if valid:
# We assume by this point that we have a valid stock_item and quantity values
data = form.cleaned_data
other_stock_item = data['stock_item']
# Quantity will always be 1 for serialized item
quantity = 1
notes = data['notes']
# Get stock item
this_stock_item = self.get_object()
if data['to_install']:
# Install this stock item into the other stock item
other_stock_item.installStockItem(this_stock_item, quantity, request.user, notes)
else:
# Install the other stock item into this one
this_stock_item.installStockItem(other_stock_item, quantity, request.user, notes)
data = {
'form_valid': valid,
}
return self.renderJsonResponse(request, form, data=data)
class StockItemUninstall(AjaxView, FormMixin):
"""
View for uninstalling one or more StockItems,

View File

@ -40,12 +40,6 @@ function constructBomUploadTable(data, options={}) {
function constructRow(row, idx, fields) {
// Construct an individual row from the provided data
var errors = {};
if (data.errors && data.errors.length > idx) {
errors = data.errors[idx];
}
var field_options = {
hideLabels: true,
hideClearButton: true,
@ -60,7 +54,7 @@ function constructBomUploadTable(data, options={}) {
return `Cannot render field '${field_name}`;
}
field.value = row[field_name];
field.value = row.data[field_name];
return constructField(`items_${field_name}_${idx}`, field, field_options);
@ -99,19 +93,19 @@ function constructBomUploadTable(data, options={}) {
$('#bom-import-table tbody').append(html);
// Handle any errors raised by initial data import
if (errors.part) {
addFieldErrorMessage(`items_sub_part_${idx}`, errors.part);
if (row.data.errors.part) {
addFieldErrorMessage(`items_sub_part_${idx}`, row.data.errors.part);
}
if (errors.quantity) {
addFieldErrorMessage(`items_quantity_${idx}`, errors.quantity);
if (row.data.errors.quantity) {
addFieldErrorMessage(`items_quantity_${idx}`, row.data.errors.quantity);
}
// Initialize the "part" selector for this row
initializeRelatedField(
{
name: `items_sub_part_${idx}`,
value: row.part,
value: row.data.part,
api_url: '{% url "api-part-list" %}',
filters: {
component: true,
@ -140,7 +134,12 @@ function constructBomUploadTable(data, options={}) {
});
// Prettify the original import data
var pretty = JSON.stringify(row, undefined, 4);
var pretty = JSON.stringify(
{
columns: data.columns,
row: row.original,
}, undefined, 4
);
var html = `
<div class='alert alert-block'>
@ -176,7 +175,7 @@ function submitBomTable(part_id, options={}) {
var idx_values = [];
var url = '{% url "api-bom-upload" %}';
var url = '{% url "api-bom-import-submit" %}';
$('.bom-import-row').each(function() {
var idx = $(this).attr('idx');

View File

@ -31,6 +31,7 @@
setFormInputPlaceholder,
setFormGroupVisibility,
showFormInput,
selectImportFields,
*/
/**
@ -895,8 +896,8 @@ function getFormFieldValue(name, field={}, options={}) {
// Find the HTML element
var el = getFormFieldElement(name, options);
if (!el) {
console.log(`ERROR: getFormFieldValue could not locate field '{name}'`);
if (!el.exists()) {
console.log(`ERROR: getFormFieldValue could not locate field '${name}'`);
return null;
}
@ -1219,7 +1220,7 @@ function addFieldErrorMessage(name, error_text, error_idx=0, options={}) {
field_dom.append(error_html);
} else {
console.log(`WARNING: addFieldErrorMessage could not locate field '${field_name}`);
console.log(`WARNING: addFieldErrorMessage could not locate field '${field_name}'`);
}
}
@ -2080,7 +2081,7 @@ function constructLabel(name, parameters) {
* - parameters: Field parameters returned by the OPTIONS method
*
*/
function constructInput(name, parameters, options) {
function constructInput(name, parameters, options={}) {
var html = '';
@ -2422,3 +2423,117 @@ function constructHelpText(name, parameters) {
return html;
}
/*
* Construct a dialog to select import fields
*/
function selectImportFields(url, data={}, options={}) {
if (!data.model_fields) {
console.log(`WARNING: selectImportFields is missing 'model_fields'`);
return;
}
if (!data.file_fields) {
console.log(`WARNING: selectImportFields is missing 'file_fields'`);
return;
}
var choices = [];
// Add an "empty" value
choices.push({
value: '',
display_name: '-----',
});
for (const [name, field] of Object.entries(data.model_fields)) {
choices.push({
value: name,
display_name: field.label || name,
});
}
var rows = '';
var field_names = Object.keys(data.file_fields);
for (var idx = 0; idx < field_names.length; idx++) {
var field_name = field_names[idx];
var choice_input = constructInput(
`column_${idx}`,
{
type: 'choice',
label: field_name,
value: data.file_fields[field_name].value,
choices: choices,
}
);
rows += `<tr><td><em>${field_name}</em></td><td>${choice_input}</td></tr>`;
}
var headers = `<tr><th>{% trans "File Column" %}</th><th>{% trans "Field Name" %}</th></tr>`;
var html = '';
if (options.preamble) {
html += options.preamble;
}
html += `<table class='table table-condensed'>${headers}${rows}</table>`;
constructForm(url, {
method: 'POST',
title: '{% trans "Select Columns" %}',
fields: {},
preFormContent: html,
onSubmit: function(fields, opts) {
var columns = [];
for (var idx = 0; idx < field_names.length; idx++) {
columns.push(getFormFieldValue(`column_${idx}`, {}, opts));
}
$(opts.modal).find('#modal-progress-spinner').show();
inventreePut(
opts.url,
{
columns: columns,
rows: data.rows,
},
{
method: 'POST',
success: function(response) {
handleFormSuccess(response, opts);
if (options.success) {
options.success(response);
}
},
error: function(xhr) {
$(opts.modal).find('#modal-progress-spinner').hide();
switch (xhr.status) {
case 400:
handleFormErrors(xhr.responseJSON, fields, opts);
break;
default:
$(opts.modal).modal('hide');
console.log(`upload error at ${opts.url}`);
showApiError(xhr, opts.url);
break;
}
}
}
);
},
});
}

View File

@ -893,6 +893,9 @@ function handleModalForm(url, options) {
// Re-enable the modal
modalEnable(modal, true);
if ('form_valid' in response) {
// Get visibility option of error message
var hideErrorMessage = (options.hideErrorMessage === undefined) ? true : options.hideErrorMessage;
// Form data was validated correctly
if (response.form_valid) {
$(modal).modal('hide');
@ -901,7 +904,7 @@ function handleModalForm(url, options) {
// Form was returned, invalid!
// Disable error message with option or response
if (!options.hideErrorMessage && !response.hideErrorMessage) {
if (!hideErrorMessage && !response.hideErrorMessage) {
var warningDiv = $(modal).find('#form-validation-warning');
warningDiv.css('display', 'block');
}

View File

@ -46,6 +46,7 @@
editStockLocation,
exportStock,
findStockItemBySerialNumber,
installStockItem,
loadInstalledInTable,
loadStockAllocationTable,
loadStockLocationTable,
@ -1227,14 +1228,42 @@ function formatDate(row) {
return html;
}
/*
* Load StockItemTestResult table
*/
function loadStockTestResultsTable(table, options) {
/*
* Load StockItemTestResult table
*/
// Setup filters for the table
var filterTarget = options.filterTarget || '#filter-list-stocktests';
var filterKey = options.filterKey || options.name || 'stocktests';
var filters = loadTableFilters(filterKey);
var params = {
part: options.part,
};
var original = {};
for (var k in params) {
original[k] = params[k];
filters[k] = params[k];
}
setupFilterList(filterKey, table, filterTarget);
function makeButtons(row, grouped) {
// Helper function for rendering buttons
var html = `<div class='btn-group float-right' role='group'>`;
if (row.requires_attachment == false && row.requires_value == false && !row.result) {
// Enable a "quick tick" option for this test result
html += makeIconButton('fa-check-circle icon-green', 'button-test-tick', row.test_name, '{% trans "Pass test" %}');
}
html += makeIconButton('fa-plus icon-green', 'button-test-add', row.test_name, '{% trans "Add test result" %}');
if (!grouped && row.result != null) {
@ -1258,14 +1287,13 @@ function loadStockTestResultsTable(table, options) {
rootParentId: parent_node,
parentIdField: 'parent',
idField: 'pk',
uniqueId: 'key',
uniqueId: 'pk',
treeShowField: 'test_name',
formatNoMatches: function() {
return '{% trans "No test results found" %}';
},
queryParams: {
part: options.part,
},
queryParams: filters,
original: original,
onPostBody: function() {
table.treegrid({
treeColumn: 0,
@ -1401,6 +1429,102 @@ function loadStockTestResultsTable(table, options) {
);
}
});
/* Register button callbacks */
function reloadTestTable(response) {
$(table).bootstrapTable('refresh');
}
// "tick" a test result
$(table).on('click', '.button-test-tick', function() {
var button = $(this);
var test_name = button.attr('pk');
inventreePut(
'{% url "api-stock-test-result-list" %}',
{
test: test_name,
result: true,
stock_item: options.stock_item,
},
{
method: 'POST',
success: reloadTestTable,
}
);
});
// Add a test result
$(table).on('click', '.button-test-add', function() {
var button = $(this);
var test_name = button.attr('pk');
constructForm('{% url "api-stock-test-result-list" %}', {
method: 'POST',
fields: {
test: {
value: test_name,
},
result: {},
value: {},
attachment: {},
notes: {},
stock_item: {
value: options.stock_item,
hidden: true,
}
},
title: '{% trans "Add Test Result" %}',
onSuccess: reloadTestTable,
});
});
// Edit a test result
$(table).on('click', '.button-test-edit', function() {
var button = $(this);
var pk = button.attr('pk');
var url = `/api/stock/test/${pk}/`;
constructForm(url, {
fields: {
test: {},
result: {},
value: {},
attachment: {},
notes: {},
},
title: '{% trans "Edit Test Result" %}',
onSuccess: reloadTestTable,
});
});
// Delete a test result
$(table).on('click', '.button-test-delete', function() {
var button = $(this);
var pk = button.attr('pk');
var url = `/api/stock/test/${pk}/`;
var row = $(table).bootstrapTable('getRowByUniqueId', pk);
var html = `
<div class='alert alert-block alert-danger'>
<strong>{% trans "Delete test result" %}:</strong> ${row.test_name || row.test || row.key}
</div>`;
constructForm(url, {
method: 'DELETE',
title: '{% trans "Delete Test Result" %}',
onSuccess: reloadTestTable,
preFormContent: html,
});
});
}
@ -2837,3 +2961,67 @@ function loadInstalledInTable(table, options) {
}
});
}
/*
* Launch a dialog to install a stock item into another stock item
*/
function installStockItem(stock_item_id, part_id, options={}) {
var html = `
<div class='alert alert-block alert-info'>
<strong>{% trans "Install another stock item into this item" %}</strong><br>
{% trans "Stock items can only be installed if they meet the following criteria" %}:<br>
<ul>
<li>{% trans "The Stock Item links to a Part which is the BOM for this Stock Item" %}</li>
<li>{% trans "The Stock Item is currently available in stock" %}</li>
<li>{% trans "The Stock Item is serialized and does not belong to another item" %}</li>
</ul>
</div>`;
constructForm(
`/api/stock/${stock_item_id}/install/`,
{
method: 'POST',
fields: {
part: {
type: 'related field',
required: 'true',
label: '{% trans "Part" %}',
help_text: '{% trans "Select part to install" %}',
model: 'part',
api_url: '{% url "api-part-list" %}',
auto_fill: true,
filters: {
trackable: true,
in_bom_for: part_id,
}
},
stock_item: {
filters: {
part_detail: true,
in_stock: true,
serialized: true,
},
adjustFilters: function(filters, opts) {
var part = getFormFieldValue('part', {}, opts);
if (part) {
filters.part = part;
}
return filters;
}
}
},
confirm: true,
title: '{% trans "Install Stock Item" %}',
preFormContent: html,
onSuccess: function(response) {
if (options.onSuccess) {
options.onSuccess(response);
}
}
}
);
}

View File

@ -265,12 +265,7 @@ function getAvailableTableFilters(tableKey) {
// Filters for the 'stock test' table
if (tableKey == 'stocktests') {
return {
result: {
type: 'bool',
title: '{% trans "Test result" %}',
},
};
return {};
}
// Filters for the 'part test template' table