Increased error checking when uploading BOM data

This commit is contained in:
Oliver 2022-02-09 23:02:09 +11:00
parent 2af617e92b
commit 001437e083
2 changed files with 76 additions and 8 deletions

View File

@ -878,7 +878,7 @@ class BomExtractSerializer(serializers.Serializer):
for row in self.dataset.dict: for row in self.dataset.dict:
error = {} row_error = {}
""" """
If the "level" column is specified, and this is not a top-level BOM item, ignore the row! If the "level" column is specified, and this is not a top-level BOM item, ignore the row!
@ -939,26 +939,45 @@ class BomExtractSerializer(serializers.Serializer):
part = queryset.first() part = queryset.first()
else: else:
# Multiple matches! # Multiple matches!
error['part'] = _('Multiple matching parts found') row_error['part'] = _('Multiple matching parts found')
if part is None: if part is None:
if 'part' not in error: if 'part' not in row_error:
error['part'] = _('No matching part found') row_error['part'] = _('No matching part found')
else: else:
if part.pk in found_parts: if part.pk in found_parts:
error['part'] = _('Duplicate part selected') row_error['part'] = _("Duplicate part selected")
else:
elif not part.component:
row_error['part'] = _('Part is not designated as a component')
found_parts.add(part.pk) found_parts.add(part.pk)
row['part'] = part.pk if part is not None else None row['part'] = part.pk if part is not None else None
"""
Read out the 'quantity' column - check that it is valid
"""
quantity = self.find_matching_data(row, 'quantity', self.dataset.headers)
if quantity is None:
row_error['quantity'] = _('Quantity not provided')
else:
try:
quantity = Decimal(quantity)
if quantity <= 0:
row_error['quantity'] = _('Quantity must be greater than zero')
except:
row_error['quantity'] = _('Invalid quantity')
# For each "optional" column, ensure the column names are allocated correctly # For each "optional" column, ensure the column names are allocated correctly
for field_name in self.OPTIONAL_COLUMNS: for field_name in self.OPTIONAL_COLUMNS:
if field_name not in row: if field_name not in row:
row[field_name] = self.find_matching_data(row, field_name, self.dataset.headers) row[field_name] = self.find_matching_data(row, field_name, self.dataset.headers)
rows.append(row) rows.append(row)
errors.append(error) errors.append(row_error)
return { return {
'rows': rows, 'rows': rows,

View File

@ -29,6 +29,15 @@ class BomUploadTest(InvenTreeAPITestCase):
name='Assembly', name='Assembly',
description='An assembled part', description='An assembled part',
assembly=True, assembly=True,
component=False,
)
for i in range(10):
Part.objects.create(
name=f"Component {i}",
description="A subcomponent that can be used in a BOM",
component=True,
assembly=False,
) )
self.url = reverse('api-bom-extract') self.url = reverse('api-bom-extract')
@ -164,3 +173,43 @@ class BomUploadTest(InvenTreeAPITestCase):
) )
self.assertIn('No data rows found', str(response.data)) self.assertIn('No data rows found', str(response.data))
def test_invalid_data(self):
"""
Upload data which contains errors
"""
dataset = tablib.Dataset()
# Only these headers are strictly necessary
dataset.headers = ['part_id', 'quantity']
components = Part.objects.filter(component=True)
for idx, cmp in enumerate(components):
if idx == 5:
cmp.component = False
cmp.save()
dataset.append([cmp.pk, idx])
# Add a duplicate part too
dataset.append([components.first().pk, 'invalid'])
response = self.post_bom(
'test.csv',
bytes(dataset.csv, 'utf8'),
content_type='text/csv',
expected_code=201
)
errors = response.data['errors']
self.assertIn('Quantity must be greater than zero', str(errors[0]))
self.assertIn('Part is not designated as a component', str(errors[5]))
self.assertIn('Duplicate part selected', str(errors[-1]))
self.assertIn('Invalid quantity', str(errors[-1]))
for idx, row in enumerate(response.data['rows'][:-1]):
self.assertEqual(str(row['part']), str(components[idx].pk))