2019-06-27 12:46:11 +00:00
|
|
|
"""
|
|
|
|
Functionality for Bill of Material (BOM) management.
|
|
|
|
Primarily BOM upload tools.
|
|
|
|
"""
|
|
|
|
|
2020-08-20 18:53:27 +00:00
|
|
|
from collections import OrderedDict
|
|
|
|
|
2021-07-10 04:13:46 +00:00
|
|
|
from django.utils.translation import gettext as _
|
2019-06-27 12:46:11 +00:00
|
|
|
|
2021-11-10 02:27:59 +00:00
|
|
|
from InvenTree.helpers import DownloadFile, GetExportFormats, normalize
|
2019-06-27 12:46:11 +00:00
|
|
|
|
2019-09-15 12:21:12 +00:00
|
|
|
from .admin import BomItemResource
|
|
|
|
from .models import BomItem
|
2021-04-07 15:43:05 +00:00
|
|
|
from company.models import ManufacturerPart, SupplierPart
|
2019-09-15 12:21:12 +00:00
|
|
|
|
2019-06-27 12:46:11 +00:00
|
|
|
|
|
|
|
def IsValidBOMFormat(fmt):
|
|
|
|
""" Test if a file format specifier is in the valid list of BOM file formats """
|
|
|
|
|
2019-09-15 09:45:59 +00:00
|
|
|
return fmt.strip().lower() in GetExportFormats()
|
2019-06-27 12:46:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
def MakeBomTemplate(fmt):
|
|
|
|
""" Generate a Bill of Materials upload template file (for user download) """
|
|
|
|
|
|
|
|
fmt = fmt.strip().lower()
|
|
|
|
|
|
|
|
if not IsValidBOMFormat(fmt):
|
|
|
|
fmt = 'csv'
|
|
|
|
|
2020-08-18 01:47:27 +00:00
|
|
|
# Create an "empty" queryset, essentially.
|
|
|
|
# This will then export just the row headers!
|
2019-09-15 12:21:12 +00:00
|
|
|
query = BomItem.objects.filter(pk=None)
|
2020-08-18 01:47:27 +00:00
|
|
|
|
|
|
|
dataset = BomItemResource().export(
|
|
|
|
queryset=query,
|
|
|
|
importing=True
|
|
|
|
)
|
2019-06-27 12:46:11 +00:00
|
|
|
|
2019-09-15 12:21:12 +00:00
|
|
|
data = dataset.export(fmt)
|
2019-06-27 12:46:11 +00:00
|
|
|
|
|
|
|
filename = 'InvenTree_BOM_Template.' + fmt
|
|
|
|
|
|
|
|
return DownloadFile(data, filename)
|
|
|
|
|
|
|
|
|
2021-04-07 15:43:05 +00:00
|
|
|
def ExportBom(part, fmt='csv', cascade=False, max_levels=None, parameter_data=False, stock_data=False, supplier_data=False, manufacturer_data=False):
|
2019-09-15 12:21:12 +00:00
|
|
|
""" Export a BOM (Bill of Materials) for a given part.
|
2020-02-11 11:32:36 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
fmt: File format (default = 'csv')
|
|
|
|
cascade: If True, multi-level BOM output is supported. Otherwise, a flat top-level-only BOM is exported.
|
2019-09-15 12:21:12 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not IsValidBOMFormat(fmt):
|
|
|
|
fmt = 'csv'
|
|
|
|
|
2020-02-11 11:32:36 +00:00
|
|
|
bom_items = []
|
|
|
|
|
2020-02-11 23:18:20 +00:00
|
|
|
uids = []
|
|
|
|
|
2021-11-20 06:44:54 +00:00
|
|
|
def add_items(items, level, cascade=True):
|
2020-02-11 11:32:36 +00:00
|
|
|
# Add items at a given layer
|
|
|
|
for item in items:
|
|
|
|
|
2020-08-17 00:39:35 +00:00
|
|
|
item.level = str(int(level))
|
2021-05-06 10:11:38 +00:00
|
|
|
|
2020-02-11 23:18:20 +00:00
|
|
|
# Avoid circular BOM references
|
|
|
|
if item.pk in uids:
|
|
|
|
continue
|
|
|
|
|
2020-02-11 11:32:36 +00:00
|
|
|
bom_items.append(item)
|
|
|
|
|
2021-11-10 02:27:59 +00:00
|
|
|
if cascade and item.sub_part.assembly:
|
2020-08-15 22:29:36 +00:00
|
|
|
if max_levels is None or level < max_levels:
|
|
|
|
add_items(item.sub_part.bom_items.all().order_by('id'), level + 1)
|
2021-05-06 10:11:38 +00:00
|
|
|
|
2021-11-10 02:27:59 +00:00
|
|
|
top_level_items = part.get_bom_items().order_by('id')
|
2020-02-11 11:32:36 +00:00
|
|
|
|
2021-11-10 02:27:59 +00:00
|
|
|
add_items(top_level_items, 1, cascade)
|
2019-09-15 12:21:12 +00:00
|
|
|
|
2020-02-11 11:32:36 +00:00
|
|
|
dataset = BomItemResource().export(queryset=bom_items, cascade=cascade)
|
2020-08-18 19:04:50 +00:00
|
|
|
|
2020-08-20 18:53:27 +00:00
|
|
|
def add_columns_to_dataset(columns, column_size):
|
|
|
|
try:
|
|
|
|
for header, column_dict in columns.items():
|
|
|
|
# Construct column tuple
|
|
|
|
col = tuple(column_dict.get(c_idx, '') for c_idx in range(column_size))
|
|
|
|
# Add column to dataset
|
|
|
|
dataset.append_col(col, header=header)
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if parameter_data:
|
|
|
|
"""
|
|
|
|
If requested, add extra columns for each PartParameter associated with each line item
|
|
|
|
"""
|
|
|
|
|
|
|
|
parameter_cols = {}
|
|
|
|
|
|
|
|
for b_idx, bom_item in enumerate(bom_items):
|
|
|
|
# Get part parameters
|
|
|
|
parameters = bom_item.sub_part.get_parameters()
|
|
|
|
# Add parameters to columns
|
|
|
|
if parameters:
|
|
|
|
for parameter in parameters:
|
|
|
|
name = parameter.template.name
|
|
|
|
value = parameter.data
|
|
|
|
|
|
|
|
try:
|
|
|
|
parameter_cols[name].update({b_idx: value})
|
|
|
|
except KeyError:
|
|
|
|
parameter_cols[name] = {b_idx: value}
|
2021-05-06 10:11:38 +00:00
|
|
|
|
2020-08-20 18:53:27 +00:00
|
|
|
# Add parameter columns to dataset
|
|
|
|
parameter_cols_ordered = OrderedDict(sorted(parameter_cols.items(), key=lambda x: x[0]))
|
|
|
|
add_columns_to_dataset(parameter_cols_ordered, len(bom_items))
|
|
|
|
|
|
|
|
if stock_data:
|
|
|
|
"""
|
|
|
|
If requested, add extra columns for stock data associated with each line item
|
|
|
|
"""
|
|
|
|
|
|
|
|
stock_headers = [
|
2020-08-25 21:02:46 +00:00
|
|
|
_('Default Location'),
|
2022-01-27 02:59:13 +00:00
|
|
|
_('Total Stock'),
|
2020-08-20 18:53:27 +00:00
|
|
|
_('Available Stock'),
|
2022-01-27 02:59:13 +00:00
|
|
|
_('On Order'),
|
2020-08-20 18:53:27 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
stock_cols = {}
|
|
|
|
|
|
|
|
for b_idx, bom_item in enumerate(bom_items):
|
2022-01-27 02:59:13 +00:00
|
|
|
|
2020-08-20 18:53:27 +00:00
|
|
|
stock_data = []
|
2022-01-27 02:59:13 +00:00
|
|
|
|
|
|
|
sub_part = bom_item.sub_part
|
|
|
|
|
2020-08-20 18:53:27 +00:00
|
|
|
# Get part default location
|
|
|
|
try:
|
2022-01-27 02:59:13 +00:00
|
|
|
loc = sub_part.get_default_location()
|
2021-07-10 04:13:46 +00:00
|
|
|
|
|
|
|
if loc is not None:
|
|
|
|
stock_data.append(str(loc.name))
|
|
|
|
else:
|
|
|
|
stock_data.append('')
|
2020-08-20 18:53:27 +00:00
|
|
|
except AttributeError:
|
|
|
|
stock_data.append('')
|
2021-11-10 02:27:59 +00:00
|
|
|
|
2022-01-27 02:59:13 +00:00
|
|
|
# Total "in stock" quantity for this part
|
|
|
|
stock_data.append(
|
|
|
|
str(normalize(sub_part.total_stock))
|
|
|
|
)
|
|
|
|
|
|
|
|
# Total "available stock" quantity for this part
|
|
|
|
stock_data.append(
|
|
|
|
str(normalize(sub_part.available_stock))
|
|
|
|
)
|
|
|
|
|
|
|
|
# Total "on order" quantity for this part
|
|
|
|
stock_data.append(
|
|
|
|
str(normalize(sub_part.on_order))
|
|
|
|
)
|
2020-08-20 18:53:27 +00:00
|
|
|
|
|
|
|
for s_idx, header in enumerate(stock_headers):
|
|
|
|
try:
|
|
|
|
stock_cols[header].update({b_idx: stock_data[s_idx]})
|
|
|
|
except KeyError:
|
|
|
|
stock_cols[header] = {b_idx: stock_data[s_idx]}
|
|
|
|
|
|
|
|
# Add stock columns to dataset
|
|
|
|
add_columns_to_dataset(stock_cols, len(bom_items))
|
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
if manufacturer_data or supplier_data:
|
2020-08-19 04:05:16 +00:00
|
|
|
"""
|
2021-04-07 15:43:05 +00:00
|
|
|
If requested, add extra columns for each SupplierPart and ManufacturerPart associated with each line item
|
2020-08-19 04:05:16 +00:00
|
|
|
"""
|
2020-08-19 03:56:45 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
# Keep track of the supplier parts we have already exported
|
|
|
|
supplier_parts_used = set()
|
2020-08-18 19:04:50 +00:00
|
|
|
|
2020-08-19 04:05:16 +00:00
|
|
|
manufacturer_cols = {}
|
2020-08-19 03:56:45 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
for bom_idx, bom_item in enumerate(bom_items):
|
2020-08-19 04:05:16 +00:00
|
|
|
# Get part instance
|
|
|
|
b_part = bom_item.sub_part
|
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
# Include manufacturer data for each BOM item
|
|
|
|
if manufacturer_data:
|
2020-08-19 04:05:16 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
# Filter manufacturer parts
|
|
|
|
manufacturer_parts = ManufacturerPart.objects.filter(part__pk=b_part.pk).prefetch_related('supplier_parts')
|
2021-11-22 23:28:23 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
for mp_idx, mp_part in enumerate(manufacturer_parts):
|
2020-08-19 04:05:16 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
# Extract the "name" field of the Manufacturer (Company)
|
|
|
|
if mp_part and mp_part.manufacturer:
|
|
|
|
manufacturer_name = mp_part.manufacturer.name
|
2021-04-07 16:24:32 +00:00
|
|
|
else:
|
2021-11-04 01:10:36 +00:00
|
|
|
manufacturer_name = ''
|
2021-04-07 16:24:32 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
# Extract the "MPN" field from the Manufacturer Part
|
|
|
|
if mp_part:
|
|
|
|
manufacturer_mpn = mp_part.MPN
|
2021-10-07 00:52:41 +00:00
|
|
|
else:
|
2021-11-04 01:10:36 +00:00
|
|
|
manufacturer_mpn = ''
|
2021-04-07 16:24:32 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
# Generate a column name for this manufacturer
|
|
|
|
k_man = f'{_("Manufacturer")}_{mp_idx}'
|
|
|
|
k_mpn = f'{_("MPN")}_{mp_idx}'
|
2021-11-22 23:28:23 +00:00
|
|
|
|
2021-04-07 16:24:32 +00:00
|
|
|
try:
|
2021-11-04 01:10:36 +00:00
|
|
|
manufacturer_cols[k_man].update({bom_idx: manufacturer_name})
|
|
|
|
manufacturer_cols[k_mpn].update({bom_idx: manufacturer_mpn})
|
2021-04-07 16:24:32 +00:00
|
|
|
except KeyError:
|
2021-11-04 01:10:36 +00:00
|
|
|
manufacturer_cols[k_man] = {bom_idx: manufacturer_name}
|
|
|
|
manufacturer_cols[k_mpn] = {bom_idx: manufacturer_mpn}
|
2021-04-07 15:43:05 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
# We wish to include supplier data for this manufacturer part
|
|
|
|
if supplier_data:
|
2021-11-22 23:28:23 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
for sp_idx, sp_part in enumerate(mp_part.supplier_parts.all()):
|
2021-04-07 15:43:05 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
supplier_parts_used.add(sp_part)
|
2021-04-07 15:43:05 +00:00
|
|
|
|
2022-01-21 01:12:40 +00:00
|
|
|
if sp_part.supplier:
|
2021-11-04 01:10:36 +00:00
|
|
|
supplier_name = sp_part.supplier.name
|
|
|
|
else:
|
|
|
|
supplier_name = ''
|
2021-04-07 15:43:05 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
if sp_part:
|
|
|
|
supplier_sku = sp_part.SKU
|
|
|
|
else:
|
|
|
|
supplier_sku = ''
|
2021-04-07 15:43:05 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
# Generate column names for this supplier
|
|
|
|
k_sup = str(_("Supplier")) + "_" + str(mp_idx) + "_" + str(sp_idx)
|
|
|
|
k_sku = str(_("SKU")) + "_" + str(mp_idx) + "_" + str(sp_idx)
|
2021-04-07 15:43:05 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
try:
|
|
|
|
manufacturer_cols[k_sup].update({bom_idx: supplier_name})
|
|
|
|
manufacturer_cols[k_sku].update({bom_idx: supplier_sku})
|
|
|
|
except KeyError:
|
|
|
|
manufacturer_cols[k_sup] = {bom_idx: supplier_name}
|
|
|
|
manufacturer_cols[k_sku] = {bom_idx: supplier_sku}
|
2021-05-06 10:11:38 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
if supplier_data:
|
|
|
|
# Add in any extra supplier parts, which are not associated with a manufacturer part
|
2021-04-07 15:43:05 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
for sp_idx, sp_part in enumerate(SupplierPart.objects.filter(part__pk=b_part.pk)):
|
2021-04-07 15:43:05 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
if sp_part in supplier_parts_used:
|
|
|
|
continue
|
2021-04-07 15:43:05 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
supplier_parts_used.add(sp_part)
|
2021-04-07 15:43:05 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
if sp_part.supplier:
|
|
|
|
supplier_name = sp_part.supplier.name
|
|
|
|
else:
|
|
|
|
supplier_name = ''
|
2021-04-07 15:43:05 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
supplier_sku = sp_part.SKU
|
2021-04-07 15:43:05 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
# Generate column names for this supplier
|
|
|
|
k_sup = str(_("Supplier")) + "_" + str(sp_idx)
|
|
|
|
k_sku = str(_("SKU")) + "_" + str(sp_idx)
|
2021-04-07 15:43:05 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
try:
|
|
|
|
manufacturer_cols[k_sup].update({bom_idx: supplier_name})
|
|
|
|
manufacturer_cols[k_sku].update({bom_idx: supplier_sku})
|
|
|
|
except KeyError:
|
|
|
|
manufacturer_cols[k_sup] = {bom_idx: supplier_name}
|
|
|
|
manufacturer_cols[k_sku] = {bom_idx: supplier_sku}
|
2021-04-07 15:43:05 +00:00
|
|
|
|
2021-11-04 01:10:36 +00:00
|
|
|
# Add supplier columns to dataset
|
2021-04-07 15:43:05 +00:00
|
|
|
add_columns_to_dataset(manufacturer_cols, len(bom_items))
|
|
|
|
|
2019-09-15 12:21:12 +00:00
|
|
|
data = dataset.export(fmt)
|
|
|
|
|
2021-07-10 04:13:46 +00:00
|
|
|
filename = f"{part.full_name}_BOM.{fmt}"
|
2019-09-15 12:21:12 +00:00
|
|
|
|
|
|
|
return DownloadFile(data, filename)
|