Refactoring API endpoints

- Improved URL naming scheme
This commit is contained in:
Oliver 2022-02-16 16:57:27 +11:00
parent d7adb6959d
commit f399f4fa34
8 changed files with 295 additions and 274 deletions

View File

@ -59,7 +59,7 @@ class DataImportMixin(object):
def get_import_fields(cls):
"""
Return all available import fields
Where information on a particular field is not explicitly provided,
introspect the base model to (attempt to) find that information.
@ -67,7 +67,7 @@ class DataImportMixin(object):
fields = cls.IMPORT_FIELDS
for name, field in fields.items():
# Attempt to extract base field information from the model
base_field = None
@ -79,7 +79,7 @@ class DataImportMixin(object):
if base_field:
if 'label' not in field:
field['label'] = base_field.verbose_name
if 'help_text' not in field:
field['help_text'] = base_field.help_text

View File

@ -411,7 +411,7 @@ class DataFileUploadSerializer(serializers.Serializer):
return data_file
def match_column(self, column_name, field_names):
def match_column(self, column_name, field_names, exact=False):
"""
Attempt to match a column name (from the file) to a field (defined in the model)
@ -432,12 +432,15 @@ class DataFileUploadSerializer(serializers.Serializer):
if field_name.lower() == column_name_lower:
return field_name
# TODO: Fuzzy pattern matching
if exact:
# Finished available 'exact' matches
return None
# TODO: Fuzzy pattern matching for column names
# No matches found
return None
def extract_data(self):
"""
Returns dataset extracted from the file
@ -465,7 +468,7 @@ class DataFileUploadSerializer(serializers.Serializer):
column = {}
# Attempt to "match" file columns to model fields
match = self.match_column(header, model_field_names)
match = self.match_column(header, model_field_names, exact=True)
if match is not None and match not in matched_columns:
matched_columns.add(match)
@ -482,13 +485,16 @@ class DataFileUploadSerializer(serializers.Serializer):
'filename': self.filename,
}
def save(self):
...
class DataFileExtractSerializer(serializers.Serializer):
"""
Generic serializer for extracting data from an imported dataset.
- User provides an array of matched headers
- User provides an array of raw data rows
- User provides an array of raw data rows
"""
# Implementing class should register a target model (database model) to be used for import
@ -500,7 +506,7 @@ class DataFileExtractSerializer(serializers.Serializer):
'rows',
]
# Mapping of columns
# Mapping of columns
columns = serializers.ListField(
child=serializers.CharField(
allow_blank=True,
@ -530,16 +536,69 @@ class DataFileExtractSerializer(serializers.Serializer):
self.validate_extracted_columns()
return self.extract_data(data)
def extract_data(self, data):
"""
Extract row data based on the provided fields.
Returns an array of mapped column:value values
"""
return data
@property
def data(self):
if self.TARGET_MODEL:
try:
model_fields = self.TARGET_MODEL.get_import_fields()
except:
model_fields = {}
rows = []
for row in self.rows:
"""
Optionally pre-process each row, before sending back to the client
"""
processed_row = self.process_row(self.row_to_dict(row))
if processed_row:
rows.append({
"original": row,
"data": processed_row,
})
return {
'fields': model_fields,
'columns': self.columns,
'rows': rows,
}
def process_row(self, row):
"""
Process a 'row' of data, which is a mapped column:value dict
Returns either a mapped column:value dict, or None.
If the function returns None, the column is ignored!
"""
# Default implementation simply returns the original row data
return row
def row_to_dict(self, row):
"""
Convert a "row" to a named data dict
"""
row_dict = {
'errors': {},
}
for idx, value in enumerate(row):
if idx < len(self.columns):
col = self.columns[idx]
if col:
row_dict[col] = value
return row_dict
def validate_extracted_columns(self):
"""
Perform custom validation of header mapping.
@ -561,7 +620,7 @@ class DataFileExtractSerializer(serializers.Serializer):
if required:
if name not in self.columns:
raise serializers.ValidationError(_("Missing required column") + f": '{name}'")
for col in self.columns:
if not col:
@ -577,4 +636,4 @@ class DataFileExtractSerializer(serializers.Serializer):
"""
No "save" action for this serializer
"""
...
...

View File

@ -1533,16 +1533,7 @@ class BomList(generics.ListCreateAPIView):
]
class BomExtract(generics.CreateAPIView):
"""
API endpoint for extracting BOM data from a BOM file.
"""
queryset = Part.objects.none()
serializer_class = part_serializers.BomFileExtractSerializer
class BomUpload(generics.CreateAPIView):
class BomImportUpload(generics.CreateAPIView):
"""
API endpoint for uploading a complete Bill of Materials.
@ -1550,7 +1541,7 @@ class BomUpload(generics.CreateAPIView):
"""
queryset = Part.objects.all()
serializer_class = part_serializers.BomFileUploadSerializer
serializer_class = part_serializers.BomImportUploadSerializer
def create(self, request, *args, **kwargs):
"""
@ -1567,6 +1558,23 @@ class BomUpload(generics.CreateAPIView):
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
class BomImportExtract(generics.CreateAPIView):
"""
API endpoint for extracting BOM data from a BOM file.
"""
queryset = Part.objects.none()
serializer_class = part_serializers.BomImportExtractSerializer
class BomImportSubmit(generics.CreateAPIView):
"""
API endpoint for submitting BOM data from a BOM file
"""
queryset = BomItem.objects.none()
serializer_class = part_serializers.BomImportSubmitSerializer
class BomDetail(generics.RetrieveUpdateDestroyAPIView):
""" API endpoint for detail view of a single BomItem object """
@ -1720,9 +1728,10 @@ bom_api_urls = [
url(r'^.*$', BomDetail.as_view(), name='api-bom-item-detail'),
])),
url(r'^upload/', BomUpload.as_view(), name='api-bom-upload'),
url(r'^extract/', BomExtract.as_view(), name='api-bom-extract'),
# API endpoint URLs for importing BOM data
url(r'^import/upload/', BomImportUpload.as_view(), name='api-bom-import-upload'),
url(r'^import/extract/', BomImportExtract.as_view(), name='api-bom-import-extract'),
url(r'^import/submit/', BomImportSubmit.as_view(), name='api-bom-import-submit'),
# Catch-all
url(r'^.*$', BomList.as_view(), name='api-bom-list'),

View File

@ -2573,12 +2573,12 @@ class BomItem(models.Model, DataImportMixin):
'quantity': {
'required': True
},
'optional': {},
'reference': {},
'overage': {},
'note': {},
'inherited': {},
'allow_variants': {},
'inherited': {},
'optional': {},
'note': {},
'part': {
'label': _('Part'),
'help_text': _('Part ID or part name'),
@ -2594,6 +2594,10 @@ class BomItem(models.Model, DataImportMixin):
'part_ipn': {
'label': _('Part IPN'),
'help_text': _('Part IPN value'),
},
'level': {
'label': _('Level'),
'help_text': _('BOM level'),
}
}

View File

@ -4,8 +4,6 @@ JSON serializers for Part app
import imghdr
from decimal import Decimal
import os
import tablib
from django.urls import reverse_lazy
from django.db import models, transaction
@ -711,223 +709,100 @@ class PartCopyBOMSerializer(serializers.Serializer):
)
class BomFileUploadSerializer(DataFileUploadSerializer):
class BomImportUploadSerializer(DataFileUploadSerializer):
"""
Serializer for uploading a file and extracting data from it.
Note: 2022-02-04 - This needs a *serious* refactor in future, probably
When parsing the file, the following things happen:
a) Check file format and validity
b) Look for "required" fields
c) Look for "part" fields - used to "infer" part
Once the file itself has been validated, we iterate through each data row:
- If the "level" column is provided, ignore anything below level 1
- Try to "guess" the part based on part_id / part_name / part_ipn
- Extract other fields as required
"""
TARGET_MODEL = BomItem
def find_matching_data(self, row, col_name, columns):
"""
Extract data from the row, based on the "expected" column name
"""
col_name = self.find_matching_column(col_name, columns)
return row.get(col_name, None)
"""
for header in self.REQUIRED_COLUMNS:
match = self.find_matching_column(header, self.dataset.headers)
if match is None:
raise serializers.ValidationError(_("Missing required column") + f": '{header}'")
part_column_matches = {}
part_match = False
for col in self.PART_COLUMNS:
col_match = self.find_matching_column(col, self.dataset.headers)
part_column_matches[col] = col_match
if col_match is not None:
part_match = True
if not part_match:
raise serializers.ValidationError(_("No part column found"))
if len(self.dataset) == 0:
raise serializers.ValidationError(_("No data rows found"))
return bom_file
"""
def dextract_data(self):
rows = []
errors = []
found_parts = set()
headers = self.dataset.headers
level_column = self.find_matching_column('level', headers)
for row in self.dataset.dict:
row_error = {}
# If the "level" column is specified, and this is not a top-level BOM item, ignore the row!
if level_column is not None:
level = row.get('level', None)
if level is not None:
try:
level = int(level)
if level != 1:
continue
except:
pass
"""
Next, we try to "guess" the part, based on the provided data.
A) If the part_id is supplied, use that!
B) If the part name and/or part_ipn are supplied, maybe we can use those?
"""
part_id = self.find_matching_data(row, 'part_id', headers)
part_name = self.find_matching_data(row, 'part_name', headers)
part_ipn = self.find_matching_data(row, 'part_ipn', headers)
part = None
if part_id is not None:
try:
part = Part.objects.get(pk=part_id)
except (ValueError, Part.DoesNotExist):
pass
# Optionally, specify using field "part"
if part is None:
pk = self.find_matching_data(row, 'part', headers)
if pk is not None:
try:
part = Part.objects.get(pk=pk)
except (ValueError, Part.DoesNotExist):
pass
if part is None:
if part_name or part_ipn:
queryset = Part.objects.all()
if part_name:
queryset = queryset.filter(name=part_name)
if part_ipn:
queryset = queryset.filter(IPN=part_ipn)
# Only if we have a single direct match
if queryset.exists():
if queryset.count() == 1:
part = queryset.first()
else:
# Multiple matches!
row_error['part'] = _('Multiple matching parts found')
if part is None:
if 'part' not in row_error:
row_error['part'] = _('No matching part found')
else:
if part.pk in found_parts:
row_error['part'] = _("Duplicate part selected")
elif not part.component:
row_error['part'] = _('Part is not designated as a component')
found_parts.add(part.pk)
row['part'] = part.pk if part is not None else None
"""
Read out the 'quantity' column - check that it is valid
"""
quantity = self.find_matching_data(row, 'quantity', self.dataset.headers)
# Ensure quantity field is provided
row['quantity'] = quantity
if quantity is None:
row_error['quantity'] = _('Quantity not provided')
else:
try:
quantity = Decimal(quantity)
if quantity <= 0:
row_error['quantity'] = _('Quantity must be greater than zero')
except:
row_error['quantity'] = _('Invalid quantity')
# For each "optional" column, ensure the column names are allocated correctly
for field_name in self.OPTIONAL_COLUMNS:
if field_name not in row:
row[field_name] = self.find_matching_data(row, field_name, self.dataset.headers)
rows.append(row)
errors.append(row_error)
return {
'rows': rows,
'errors': errors,
'headers': headers,
'filename': self.filename,
}
"""
part = serializers.PrimaryKeyRelatedField(queryset=Part.objects.filter(assembly=True), required=True)
clear_existing = serializers.BooleanField(
label=_("Clear Existing BOM"),
help_text=_("Delete existing BOM data first"),
)
"""
def save(self):
...
"""
data = self.validated_data
master_part = data['part']
clear_existing = data['clear_existing']
if clear_existing:
# Remove all existing BOM items
$ master_part.bom_items.all().delete()
"""
class BomFileExtractSerializer(DataFileExtractSerializer):
"""
"""
TARGET_MODEL = BomItem
class BomUploadSerializer(serializers.Serializer):
class BomImportExtractSerializer(DataFileExtractSerializer):
"""
"""
TARGET_MODEL = BomItem
def validate_extracted_columns(self):
super().validate_extracted_columns()
part_columns = ['part', 'part_name', 'part_ipn', 'part_id']
if not any([col in self.columns for col in part_columns]):
# At least one part column is required!
raise serializers.ValidationError(_("No part column specified"))
def process_row(self, row):
# Skip any rows which are at a lower "level"
level = row.get('level', None)
if level is not None:
try:
level = int(level)
if level != 1:
# Skip this row
return None
except:
pass
# Attempt to extract a valid part based on the provided data
part_id = row.get('part_id', row.get('part', None))
part_name = row.get('part_name', row.get('part', None))
part_ipn = row.get('part_ipn', None)
part = None
if part_id is not None:
try:
part = Part.objects.get(pk=part_id)
except (ValueError, Part.DoesNotExist):
pass
# No direct match, where else can we look?
if part is None:
if part_name or part_ipn:
queryset = Part.objects.all()
if part_name:
queryset = queryset.filter(name=part_name)
if part_ipn:
queryset = queryset.filter(IPN=part_ipn)
if queryset.exists():
if queryset.count() == 1:
part = queryset.first()
else:
row['errors']['part'] = _('Multiple matching parts found')
if part is None:
row['errors']['part'] = _('No matching part found')
else:
if not part.component:
row['errors']['part'] = _('Part is not designed as a component')
# Update the 'part' value in the row
row['part'] = part.pk if part is not None else None
# Check the provided 'quantity' value
quantity = row.get('quantity', None)
if quantity is None:
row['errors']['quantity'] = _('Quantity not provided')
else:
try:
quantity = Decimal(quantity)
if quantity <= 0:
row['errors']['quantity'] = _('Quantity must be greater than zero')
except:
row['errors']['quantity'] = _('Invalid quantity')
return row
class BomImportSubmitSerializer(serializers.Serializer):
"""
Serializer for uploading a BOM against a specified part.

View File

@ -77,15 +77,10 @@ $('#bom-template-download').click(function() {
$('#bom-upload').click(function() {
constructForm('{% url "api-bom-extract" %}', {
constructForm('{% url "api-bom-import-upload" %}', {
method: 'POST',
fields: {
bom_file: {},
part: {
value: {{ part.pk }},
hidden: true,
},
clear_existing: {},
data_file: {},
},
title: '{% trans "Upload BOM File" %}',
onSuccess: function(response) {
@ -96,12 +91,92 @@ $('#bom-upload').click(function() {
// Disable the "submit" button
$('#bom-submit').show();
constructBomUploadTable(response);
var fields = {};
$('#bom-submit').click(function() {
submitBomTable({{ part.pk }}, {
bom_data: response,
var choices = [];
// Add an "empty" value
choices.push({
value: '',
display_name: '-----',
});
for (const [name, field] of Object.entries(response.model_fields)) {
choices.push({
value: name,
display_name: field.label || name,
});
}
var field_names = Object.keys(response.file_fields);
for (var idx = 0; idx < field_names.length; idx++) {
var field_name = field_names[idx];
// Construct a new field
fields[`column_${idx}`] = {
type: 'choice',
label: field_name,
value: response.file_fields[field_name].value,
choices: choices,
inline: true,
};
}
constructForm('{% url "api-bom-import-extract" %}', {
method: 'POST',
title: '{% trans "Select BOM Columns" %}',
fields: fields,
onSubmit: function(fields, opts) {
var columns = [];
for (var idx = 0; idx < field_names.length; idx++) {
columns.push(
getFormFieldValue(`column_${idx}`, {}, {})
);
}
$(opts.modal).find('#modal-progress-spinner').show();
inventreePut(
opts.url,
{
columns: columns,
rows: response.rows,
},
{
method: 'POST',
success: function(r) {
handleFormSuccess(r, opts);
constructBomUploadTable(r);
$('#bom-submit').click(function() {
submitBomTable({{ part.pk }}, {
bom_data: response,
});
});
},
error: function(xhr) {
$(opts.modal).find('#modal-progress-spinner').hide();
switch (xhr.status) {
case 400:
handleFormErrors(xhr.responseJSON, fields, opts);
break;
default:
$(opts.modal).modal('hide');
console.log(`upload error at ${opts.url}`);
showApiError(xhr, opts.url);
break;
}
}
}
);
},
});
}
});

View File

@ -40,12 +40,6 @@ function constructBomUploadTable(data, options={}) {
function constructRow(row, idx, fields) {
// Construct an individual row from the provided data
var errors = {};
if (data.errors && data.errors.length > idx) {
errors = data.errors[idx];
}
var field_options = {
hideLabels: true,
hideClearButton: true,
@ -60,7 +54,7 @@ function constructBomUploadTable(data, options={}) {
return `Cannot render field '${field_name}`;
}
field.value = row[field_name];
field.value = row.data[field_name];
return constructField(`items_${field_name}_${idx}`, field, field_options);
@ -99,19 +93,19 @@ function constructBomUploadTable(data, options={}) {
$('#bom-import-table tbody').append(html);
// Handle any errors raised by initial data import
if (errors.part) {
addFieldErrorMessage(`items_sub_part_${idx}`, errors.part);
if (row.data.errors.part) {
addFieldErrorMessage(`items_sub_part_${idx}`, row.data.errors.part);
}
if (errors.quantity) {
addFieldErrorMessage(`items_quantity_${idx}`, errors.quantity);
if (row.data.errors.quantity) {
addFieldErrorMessage(`items_quantity_${idx}`, row.data.errors.quantity);
}
// Initialize the "part" selector for this row
initializeRelatedField(
{
name: `items_sub_part_${idx}`,
value: row.part,
value: row.data.part,
api_url: '{% url "api-part-list" %}',
filters: {
component: true,
@ -140,7 +134,12 @@ function constructBomUploadTable(data, options={}) {
});
// Prettify the original import data
var pretty = JSON.stringify(row, undefined, 4);
var pretty = JSON.stringify(
{
columns: data.columns,
row: row.original,
}, undefined, 4
);
var html = `
<div class='alert alert-block'>
@ -176,7 +175,7 @@ function submitBomTable(part_id, options={}) {
var idx_values = [];
var url = '{% url "api-bom-upload" %}';
var url = '{% url "api-bom-import-submit" %}';
$('.bom-import-row').each(function() {
var idx = $(this).attr('idx');

View File

@ -1219,7 +1219,7 @@ function addFieldErrorMessage(name, error_text, error_idx=0, options={}) {
field_dom.append(error_html);
} else {
console.log(`WARNING: addFieldErrorMessage could not locate field '${field_name}`);
console.log(`WARNING: addFieldErrorMessage could not locate field '${field_name}'`);
}
}