mirror of
https://github.com/inventree/InvenTree
synced 2024-08-30 18:33:04 +00:00
Merge branch 'inventree:master' into ci-only-in-inventree
This commit is contained in:
commit
6d8a0e920f
@ -7,7 +7,7 @@ services:
|
|||||||
expose:
|
expose:
|
||||||
- 5432/tcp
|
- 5432/tcp
|
||||||
volumes:
|
volumes:
|
||||||
- ../dev:/var/lib/postgresql/data:z
|
- inventreedatabase:/var/lib/postgresql/data:z
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_DB: inventree
|
POSTGRES_DB: inventree
|
||||||
POSTGRES_USER: inventree_user
|
POSTGRES_USER: inventree_user
|
||||||
@ -19,7 +19,6 @@ services:
|
|||||||
target: dev
|
target: dev
|
||||||
args:
|
args:
|
||||||
base_image: "mcr.microsoft.com/vscode/devcontainers/base:alpine-3.18"
|
base_image: "mcr.microsoft.com/vscode/devcontainers/base:alpine-3.18"
|
||||||
workspace: "${containerWorkspaceFolder}"
|
|
||||||
data_dir: "dev"
|
data_dir: "dev"
|
||||||
volumes:
|
volumes:
|
||||||
- ../:/home/inventree:z
|
- ../:/home/inventree:z
|
||||||
@ -36,3 +35,6 @@ services:
|
|||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- db
|
- db
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
inventreedatabase:
|
||||||
|
@ -7,9 +7,13 @@ git config --global --add safe.directory /home/inventree
|
|||||||
python3 -m venv /home/inventree/dev/venv --system-site-packages --upgrade-deps
|
python3 -m venv /home/inventree/dev/venv --system-site-packages --upgrade-deps
|
||||||
. /home/inventree/dev/venv/bin/activate
|
. /home/inventree/dev/venv/bin/activate
|
||||||
|
|
||||||
# setup InvenTree server
|
# Run initial InvenTree server setup
|
||||||
invoke update -s
|
invoke update -s
|
||||||
|
|
||||||
|
# Configure dev environment
|
||||||
invoke setup-dev
|
invoke setup-dev
|
||||||
|
|
||||||
|
# Install required frontend packages
|
||||||
invoke frontend-install
|
invoke frontend-install
|
||||||
|
|
||||||
# remove existing gitconfig created by "Avoiding Dubious Ownership" step
|
# remove existing gitconfig created by "Avoiding Dubious Ownership" step
|
||||||
|
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
@ -1,4 +1,5 @@
|
|||||||
github: inventree
|
github: inventree
|
||||||
ko_fi: inventree
|
ko_fi: inventree
|
||||||
patreon: inventree
|
patreon: inventree
|
||||||
|
polar: inventree
|
||||||
custom: [paypal.me/inventree]
|
custom: [paypal.me/inventree]
|
||||||
|
32
.github/workflows/docker.yaml
vendored
32
.github/workflows/docker.yaml
vendored
@ -58,15 +58,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
python_version: "3.11"
|
python_version: "3.11"
|
||||||
strategy:
|
runs-on: ubuntu-latest # in the future we can try to use alternative runners here
|
||||||
matrix:
|
|
||||||
platform: ["linux/amd64", "linux/arm64"]
|
|
||||||
include:
|
|
||||||
- platform: linux/amd64
|
|
||||||
os: ubuntu-latest
|
|
||||||
- platform: linux/arm64
|
|
||||||
os: ubuntu-latest # in the future we can try to use alternative runners here
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repo
|
- name: Check out repo
|
||||||
@ -82,6 +74,14 @@ jobs:
|
|||||||
python3 ci/version_check.py
|
python3 ci/version_check.py
|
||||||
echo "git_commit_hash=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
|
echo "git_commit_hash=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
|
||||||
echo "git_commit_date=$(git show -s --format=%ci)" >> $GITHUB_ENV
|
echo "git_commit_date=$(git show -s --format=%ci)" >> $GITHUB_ENV
|
||||||
|
- name: Test Docker Image
|
||||||
|
id: test-docker
|
||||||
|
run: |
|
||||||
|
docker build . --target production --tag inventree-test
|
||||||
|
docker run --rm inventree-test invoke --version
|
||||||
|
docker run --rm inventree-test invoke --list
|
||||||
|
docker run --rm inventree-test gunicorn --version
|
||||||
|
docker run --rm inventree-test pg_dump --version
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
# Build the development docker image (using docker-compose.yml)
|
# Build the development docker image (using docker-compose.yml)
|
||||||
run: docker-compose build --no-cache
|
run: docker-compose build --no-cache
|
||||||
@ -147,24 +147,18 @@ jobs:
|
|||||||
inventree/inventree
|
inventree/inventree
|
||||||
ghcr.io/${{ github.repository }}
|
ghcr.io/${{ github.repository }}
|
||||||
|
|
||||||
- name: Build and Push
|
- name: Push Docker Images
|
||||||
id: build-and-push
|
id: push-docker
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # pin@v5.0.0
|
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # pin@v5.0.0
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: ${{ matrix.platform }}
|
platforms: linux/amd64,linux/arm64
|
||||||
push: true
|
push: true
|
||||||
sbom: true
|
sbom: true
|
||||||
provenance: false
|
provenance: false
|
||||||
target: production
|
target: production
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ env.docker_tags }}
|
||||||
build-args: |
|
build-args: |
|
||||||
commit_hash=${{ env.git_commit_hash }}
|
commit_hash=${{ env.git_commit_hash }}
|
||||||
commit_date=${{ env.git_commit_date }}
|
commit_date=${{ env.git_commit_date }}
|
||||||
|
|
||||||
- name: Sign the published image
|
|
||||||
if: ${{ false }} # github.event_name != 'pull_request'
|
|
||||||
env:
|
|
||||||
COSIGN_EXPERIMENTAL: "true"
|
|
||||||
run: cosign sign ${{ steps.meta.outputs.tags }}@${{ steps.build-and-push.outputs.digest }}
|
|
||||||
|
4
.github/workflows/qc_checks.yaml
vendored
4
.github/workflows/qc_checks.yaml
vendored
@ -144,7 +144,7 @@ jobs:
|
|||||||
dev-install: true
|
dev-install: true
|
||||||
update: true
|
update: true
|
||||||
- name: Export API Documentation
|
- name: Export API Documentation
|
||||||
run: invoke schema --ignore-warnings
|
run: invoke schema --ignore-warnings --filename InvenTree/schema.yml
|
||||||
- name: Upload schema
|
- name: Upload schema
|
||||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # pin@v3.1.3
|
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # pin@v3.1.3
|
||||||
with:
|
with:
|
||||||
@ -160,7 +160,7 @@ jobs:
|
|||||||
echo "URL: $url"
|
echo "URL: $url"
|
||||||
curl -s -o api.yaml $url
|
curl -s -o api.yaml $url
|
||||||
echo "Downloaded api.yaml"
|
echo "Downloaded api.yaml"
|
||||||
- name: Check for differences in schemas
|
- name: Check for differences in API Schema
|
||||||
if: needs.paths-filter.outputs.api == 'false'
|
if: needs.paths-filter.outputs.api == 'false'
|
||||||
run: |
|
run: |
|
||||||
diff --color -u InvenTree/schema.yml api.yaml
|
diff --color -u InvenTree/schema.yml api.yaml
|
||||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -104,3 +104,7 @@ api.yaml
|
|||||||
|
|
||||||
# web frontend (static files)
|
# web frontend (static files)
|
||||||
InvenTree/web/static
|
InvenTree/web/static
|
||||||
|
|
||||||
|
# Generated docs files
|
||||||
|
docs/docs/api/*.yml
|
||||||
|
docs/docs/api/schema/*.yml
|
||||||
|
@ -19,9 +19,9 @@ before:
|
|||||||
- contrib/packager.io/before.sh
|
- contrib/packager.io/before.sh
|
||||||
dependencies:
|
dependencies:
|
||||||
- curl
|
- curl
|
||||||
- python3.9
|
- "python3.9 | python3.10 | python3.11"
|
||||||
- python3.9-venv
|
- "python3.9-venv | python3.10-venv | python3.11-venv"
|
||||||
- python3.9-dev
|
- "python3.9-dev | python3.10-dev | python3.11-dev"
|
||||||
- python3-pip
|
- python3-pip
|
||||||
- python3-cffi
|
- python3-cffi
|
||||||
- python3-brotli
|
- python3-brotli
|
||||||
@ -36,4 +36,3 @@ dependencies:
|
|||||||
targets:
|
targets:
|
||||||
ubuntu-20.04: true
|
ubuntu-20.04: true
|
||||||
debian-11: true
|
debian-11: true
|
||||||
debian-12: true
|
|
||||||
|
@ -16,7 +16,7 @@ repos:
|
|||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
- id: mixed-line-ending
|
- id: mixed-line-ending
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.2.2
|
rev: v0.3.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
args: [--preview]
|
args: [--preview]
|
||||||
@ -26,7 +26,7 @@ repos:
|
|||||||
--preview
|
--preview
|
||||||
]
|
]
|
||||||
- repo: https://github.com/matmair/ruff-pre-commit
|
- repo: https://github.com/matmair/ruff-pre-commit
|
||||||
rev: 830893bf46db844d9c99b6c468e285199adf2de6 # uv-018
|
rev: 8bed1087452bdf816b840ea7b6848b21d32b7419 # uv-018
|
||||||
hooks:
|
hooks:
|
||||||
- id: pip-compile
|
- id: pip-compile
|
||||||
name: pip-compile requirements-dev.in
|
name: pip-compile requirements-dev.in
|
||||||
@ -60,7 +60,7 @@ repos:
|
|||||||
- "prettier@^2.4.1"
|
- "prettier@^2.4.1"
|
||||||
- "@trivago/prettier-plugin-sort-imports"
|
- "@trivago/prettier-plugin-sort-imports"
|
||||||
- repo: https://github.com/pre-commit/mirrors-eslint
|
- repo: https://github.com/pre-commit/mirrors-eslint
|
||||||
rev: "v9.0.0-beta.0"
|
rev: "v9.0.0-beta.1"
|
||||||
hooks:
|
hooks:
|
||||||
- id: eslint
|
- id: eslint
|
||||||
additional_dependencies:
|
additional_dependencies:
|
||||||
|
11
Dockerfile
11
Dockerfile
@ -48,8 +48,6 @@ ENV INVENTREE_BACKGROUND_WORKERS="4"
|
|||||||
ENV INVENTREE_WEB_ADDR=0.0.0.0
|
ENV INVENTREE_WEB_ADDR=0.0.0.0
|
||||||
ENV INVENTREE_WEB_PORT=8000
|
ENV INVENTREE_WEB_PORT=8000
|
||||||
|
|
||||||
ENV VIRTUAL_ENV=/usr/local
|
|
||||||
|
|
||||||
LABEL org.label-schema.schema-version="1.0" \
|
LABEL org.label-schema.schema-version="1.0" \
|
||||||
org.label-schema.build-date=${DATE} \
|
org.label-schema.build-date=${DATE} \
|
||||||
org.label-schema.vendor="inventree" \
|
org.label-schema.vendor="inventree" \
|
||||||
@ -65,8 +63,11 @@ RUN apk add --no-cache \
|
|||||||
libjpeg libwebp zlib \
|
libjpeg libwebp zlib \
|
||||||
# Weasyprint requirements : https://doc.courtbouillon.org/weasyprint/stable/first_steps.html#alpine-3-12
|
# Weasyprint requirements : https://doc.courtbouillon.org/weasyprint/stable/first_steps.html#alpine-3-12
|
||||||
py3-pip py3-pillow py3-cffi py3-brotli pango poppler-utils openldap \
|
py3-pip py3-pillow py3-cffi py3-brotli pango poppler-utils openldap \
|
||||||
# Core database packages
|
# Postgres client
|
||||||
postgresql13-client && \
|
postgresql13-client \
|
||||||
|
# MySQL / MariaDB client
|
||||||
|
mariadb-client mariadb-connector-c \
|
||||||
|
&& \
|
||||||
# fonts
|
# fonts
|
||||||
apk --update --upgrade --no-cache add fontconfig ttf-freefont font-noto terminus-font && fc-cache -f
|
apk --update --upgrade --no-cache add fontconfig ttf-freefont font-noto terminus-font && fc-cache -f
|
||||||
|
|
||||||
@ -96,7 +97,7 @@ FROM inventree_base as prebuild
|
|||||||
|
|
||||||
ENV PATH=/root/.local/bin:$PATH
|
ENV PATH=/root/.local/bin:$PATH
|
||||||
RUN ./install_build_packages.sh --no-cache --virtual .build-deps && \
|
RUN ./install_build_packages.sh --no-cache --virtual .build-deps && \
|
||||||
pip install --user uv --no-cache-dir && pip install -r base_requirements.txt -r requirements.txt --no-cache && \
|
pip install --user -r base_requirements.txt -r requirements.txt --no-cache && \
|
||||||
apk --purge del .build-deps
|
apk --purge del .build-deps
|
||||||
|
|
||||||
# Frontend builder image:
|
# Frontend builder image:
|
||||||
|
@ -1,11 +1,40 @@
|
|||||||
"""InvenTree API version information."""
|
"""InvenTree API version information."""
|
||||||
|
|
||||||
# InvenTree API version
|
# InvenTree API version
|
||||||
INVENTREE_API_VERSION = 177
|
INVENTREE_API_VERSION = 183
|
||||||
"""Increment this API version number whenever there is a significant change to the API that any clients need to know about."""
|
"""Increment this API version number whenever there is a significant change to the API that any clients need to know about."""
|
||||||
|
|
||||||
INVENTREE_API_TEXT = """
|
INVENTREE_API_TEXT = """
|
||||||
|
|
||||||
|
v183 - 2024-03-14 : https://github.com/inventree/InvenTree/pull/5972
|
||||||
|
- Adds "category_default_location" annotated field to part serializer
|
||||||
|
- Adds "part_detail.category_default_location" annotated field to stock item serializer
|
||||||
|
- Adds "part_detail.category_default_location" annotated field to purchase order line serializer
|
||||||
|
- Adds "parent_default_location" annotated field to category serializer
|
||||||
|
|
||||||
|
v182 - 2024-03-13 : https://github.com/inventree/InvenTree/pull/6714
|
||||||
|
- Expose ReportSnippet model to the /report/snippet/ API endpoint
|
||||||
|
- Expose ReportAsset model to the /report/asset/ API endpoint
|
||||||
|
|
||||||
|
v181 - 2024-02-21 : https://github.com/inventree/InvenTree/pull/6541
|
||||||
|
- Adds "width" and "height" fields to the LabelTemplate API endpoint
|
||||||
|
- Adds "page_size" and "landscape" fields to the ReportTemplate API endpoint
|
||||||
|
|
||||||
|
v180 - 2024-3-02 : https://github.com/inventree/InvenTree/pull/6463
|
||||||
|
- Tweaks to API documentation to allow automatic documentation generation
|
||||||
|
|
||||||
|
v179 - 2024-03-01 : https://github.com/inventree/InvenTree/pull/6605
|
||||||
|
- Adds "subcategories" count to PartCategory serializer
|
||||||
|
- Adds "sublocations" count to StockLocation serializer
|
||||||
|
- Adds "image" field to PartBrief serializer
|
||||||
|
- Adds "image" field to CompanyBrief serializer
|
||||||
|
|
||||||
|
v178 - 2024-02-29 : https://github.com/inventree/InvenTree/pull/6604
|
||||||
|
- Adds "external_stock" field to the Part API endpoint
|
||||||
|
- Adds "external_stock" field to the BomItem API endpoint
|
||||||
|
- Adds "external_stock" field to the BuildLine API endpoint
|
||||||
|
- Stock quantites represented in the BuildLine API endpoint are now filtered by Build.source_location
|
||||||
|
|
||||||
v177 - 2024-02-27 : https://github.com/inventree/InvenTree/pull/6581
|
v177 - 2024-02-27 : https://github.com/inventree/InvenTree/pull/6581
|
||||||
- Adds "subcategoies" count to PartCategoryTree serializer
|
- Adds "subcategoies" count to PartCategoryTree serializer
|
||||||
- Adds "sublocations" count to StockLocationTree serializer
|
- Adds "sublocations" count to StockLocationTree serializer
|
||||||
|
@ -39,9 +39,9 @@ def reload_unit_registry():
|
|||||||
reg = pint.UnitRegistry(autoconvert_offset_to_baseunit=True)
|
reg = pint.UnitRegistry(autoconvert_offset_to_baseunit=True)
|
||||||
|
|
||||||
# Aliases for temperature units
|
# Aliases for temperature units
|
||||||
reg.define('@alias degC = celsius = Celsius')
|
reg.define('@alias degC = Celsius')
|
||||||
reg.define('@alias degF = fahrenheit = Fahrenheit')
|
reg.define('@alias degF = Fahrenheit')
|
||||||
reg.define('@alias degK = kelvin = Kelvin')
|
reg.define('@alias degK = Kelvin')
|
||||||
|
|
||||||
# Define some "standard" additional units
|
# Define some "standard" additional units
|
||||||
reg.define('piece = 1')
|
reg.define('piece = 1')
|
||||||
@ -165,6 +165,13 @@ def convert_physical_value(value: str, unit: str = None, strip_units=True):
|
|||||||
value = str(value).strip() if value else ''
|
value = str(value).strip() if value else ''
|
||||||
unit = str(unit).strip() if unit else ''
|
unit = str(unit).strip() if unit else ''
|
||||||
|
|
||||||
|
# Handle imperial length measurements
|
||||||
|
if value.count("'") == 1 and value.endswith("'"):
|
||||||
|
value = value.replace("'", ' feet')
|
||||||
|
|
||||||
|
if value.count('"') == 1 and value.endswith('"'):
|
||||||
|
value = value.replace('"', ' inches')
|
||||||
|
|
||||||
# Error on blank values
|
# Error on blank values
|
||||||
if not value:
|
if not value:
|
||||||
raise ValidationError(_('No value provided'))
|
raise ValidationError(_('No value provided'))
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""Provides helper functions used throughout the InvenTree project."""
|
"""Provides helper functions used throughout the InvenTree project."""
|
||||||
|
|
||||||
|
import datetime
|
||||||
import hashlib
|
import hashlib
|
||||||
import io
|
import io
|
||||||
import json
|
import json
|
||||||
@ -11,6 +12,7 @@ from decimal import Decimal, InvalidOperation
|
|||||||
from typing import TypeVar
|
from typing import TypeVar
|
||||||
from wsgiref.util import FileWrapper
|
from wsgiref.util import FileWrapper
|
||||||
|
|
||||||
|
import django.utils.timezone as timezone
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.staticfiles.storage import StaticFilesStorage
|
from django.contrib.staticfiles.storage import StaticFilesStorage
|
||||||
from django.core.exceptions import FieldError, ValidationError
|
from django.core.exceptions import FieldError, ValidationError
|
||||||
@ -18,6 +20,7 @@ from django.core.files.storage import default_storage
|
|||||||
from django.http import StreamingHttpResponse
|
from django.http import StreamingHttpResponse
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
import pytz
|
||||||
import regex
|
import regex
|
||||||
from bleach import clean
|
from bleach import clean
|
||||||
from djmoney.money import Money
|
from djmoney.money import Money
|
||||||
@ -87,11 +90,24 @@ def generateTestKey(test_name: str) -> str:
|
|||||||
key = test_name.strip().lower()
|
key = test_name.strip().lower()
|
||||||
key = key.replace(' ', '')
|
key = key.replace(' ', '')
|
||||||
|
|
||||||
# Remove any characters that cannot be used to represent a variable
|
def valid_char(char: str):
|
||||||
key = re.sub(r'[^a-zA-Z0-9_]', '', key)
|
"""Determine if a particular character is valid for use in a test key."""
|
||||||
|
if not char.isprintable():
|
||||||
|
return False
|
||||||
|
|
||||||
# If the key starts with a digit, prefix with an underscore
|
if char.isidentifier():
|
||||||
if key[0].isdigit():
|
return True
|
||||||
|
|
||||||
|
if char.isalnum():
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Remove any characters that cannot be used to represent a variable
|
||||||
|
key = ''.join([c for c in key if valid_char(c)])
|
||||||
|
|
||||||
|
# If the key starts with a non-identifier character, prefix with an underscore
|
||||||
|
if len(key) > 0 and not key[0].isidentifier():
|
||||||
key = '_' + key
|
key = '_' + key
|
||||||
|
|
||||||
return key
|
return key
|
||||||
@ -850,6 +866,56 @@ def hash_file(filename: str):
|
|||||||
return hashlib.md5(open(filename, 'rb').read()).hexdigest()
|
return hashlib.md5(open(filename, 'rb').read()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def server_timezone() -> str:
|
||||||
|
"""Return the timezone of the server as a string.
|
||||||
|
|
||||||
|
e.g. "UTC" / "Australia/Sydney" etc
|
||||||
|
"""
|
||||||
|
return settings.TIME_ZONE
|
||||||
|
|
||||||
|
|
||||||
|
def to_local_time(time, target_tz: str = None):
|
||||||
|
"""Convert the provided time object to the local timezone.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
time: The time / date to convert
|
||||||
|
target_tz: The desired timezone (string) - defaults to server time
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A timezone aware datetime object, with the desired timezone
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TypeError: If the provided time object is not a datetime or date object
|
||||||
|
"""
|
||||||
|
if isinstance(time, datetime.datetime):
|
||||||
|
pass
|
||||||
|
elif isinstance(time, datetime.date):
|
||||||
|
time = timezone.datetime(year=time.year, month=time.month, day=time.day)
|
||||||
|
else:
|
||||||
|
raise TypeError(
|
||||||
|
f'Argument must be a datetime or date object (found {type(time)}'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Extract timezone information from the provided time
|
||||||
|
source_tz = getattr(time, 'tzinfo', None)
|
||||||
|
|
||||||
|
if not source_tz:
|
||||||
|
# Default to UTC if not provided
|
||||||
|
source_tz = pytz.utc
|
||||||
|
|
||||||
|
if not target_tz:
|
||||||
|
target_tz = server_timezone()
|
||||||
|
|
||||||
|
try:
|
||||||
|
target_tz = pytz.timezone(str(target_tz))
|
||||||
|
except pytz.UnknownTimeZoneError:
|
||||||
|
target_tz = pytz.utc
|
||||||
|
|
||||||
|
target_time = time.replace(tzinfo=source_tz).astimezone(target_tz)
|
||||||
|
|
||||||
|
return target_time
|
||||||
|
|
||||||
|
|
||||||
def get_objectreference(
|
def get_objectreference(
|
||||||
obj, type_ref: str = 'content_type', object_ref: str = 'object_id'
|
obj, type_ref: str = 'content_type', object_ref: str = 'object_id'
|
||||||
):
|
):
|
||||||
|
@ -74,6 +74,7 @@ class AuthRequiredMiddleware(object):
|
|||||||
|
|
||||||
# Is the function exempt from auth requirements?
|
# Is the function exempt from auth requirements?
|
||||||
path_func = resolve(request.path).func
|
path_func = resolve(request.path).func
|
||||||
|
|
||||||
if getattr(path_func, 'auth_exempt', False) is True:
|
if getattr(path_func, 'auth_exempt', False) is True:
|
||||||
return self.get_response(request)
|
return self.get_response(request)
|
||||||
|
|
||||||
@ -119,7 +120,13 @@ class AuthRequiredMiddleware(object):
|
|||||||
]
|
]
|
||||||
|
|
||||||
# Do not redirect requests to any of these paths
|
# Do not redirect requests to any of these paths
|
||||||
paths_ignore = ['/api/', '/js/', '/media/', '/static/']
|
paths_ignore = [
|
||||||
|
'/api/',
|
||||||
|
'/auth/',
|
||||||
|
'/js/',
|
||||||
|
settings.MEDIA_URL,
|
||||||
|
settings.STATIC_URL,
|
||||||
|
]
|
||||||
|
|
||||||
if path not in urls and not any(
|
if path not in urls and not any(
|
||||||
path.startswith(p) for p in paths_ignore
|
path.startswith(p) for p in paths_ignore
|
||||||
|
@ -22,6 +22,7 @@ from django.http import Http404
|
|||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
import moneyed
|
import moneyed
|
||||||
|
import pytz
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
from InvenTree.config import get_boolean_setting, get_custom_file, get_setting
|
from InvenTree.config import get_boolean_setting, get_custom_file, get_setting
|
||||||
@ -130,6 +131,9 @@ DATA_UPLOAD_MAX_NUMBER_FIELDS = 10000
|
|||||||
# Web URL endpoint for served static files
|
# Web URL endpoint for served static files
|
||||||
STATIC_URL = '/static/'
|
STATIC_URL = '/static/'
|
||||||
|
|
||||||
|
# Web URL endpoint for served media files
|
||||||
|
MEDIA_URL = '/media/'
|
||||||
|
|
||||||
STATICFILES_DIRS = []
|
STATICFILES_DIRS = []
|
||||||
|
|
||||||
# Translated Template settings
|
# Translated Template settings
|
||||||
@ -155,9 +159,6 @@ STATFILES_I18_PROCESSORS = ['InvenTree.context.status_codes']
|
|||||||
# Color Themes Directory
|
# Color Themes Directory
|
||||||
STATIC_COLOR_THEMES_DIR = STATIC_ROOT.joinpath('css', 'color-themes').resolve()
|
STATIC_COLOR_THEMES_DIR = STATIC_ROOT.joinpath('css', 'color-themes').resolve()
|
||||||
|
|
||||||
# Web URL endpoint for served media files
|
|
||||||
MEDIA_URL = '/media/'
|
|
||||||
|
|
||||||
# Database backup options
|
# Database backup options
|
||||||
# Ref: https://django-dbbackup.readthedocs.io/en/master/configuration.html
|
# Ref: https://django-dbbackup.readthedocs.io/en/master/configuration.html
|
||||||
DBBACKUP_SEND_EMAIL = False
|
DBBACKUP_SEND_EMAIL = False
|
||||||
@ -205,6 +206,7 @@ INSTALLED_APPS = [
|
|||||||
'django.contrib.auth',
|
'django.contrib.auth',
|
||||||
'django.contrib.contenttypes',
|
'django.contrib.contenttypes',
|
||||||
'user_sessions', # db user sessions
|
'user_sessions', # db user sessions
|
||||||
|
'whitenoise.runserver_nostatic',
|
||||||
'django.contrib.messages',
|
'django.contrib.messages',
|
||||||
'django.contrib.staticfiles',
|
'django.contrib.staticfiles',
|
||||||
'django.contrib.sites',
|
'django.contrib.sites',
|
||||||
@ -249,6 +251,7 @@ MIDDLEWARE = CONFIG.get(
|
|||||||
'django.middleware.locale.LocaleMiddleware',
|
'django.middleware.locale.LocaleMiddleware',
|
||||||
'django.middleware.csrf.CsrfViewMiddleware',
|
'django.middleware.csrf.CsrfViewMiddleware',
|
||||||
'corsheaders.middleware.CorsMiddleware',
|
'corsheaders.middleware.CorsMiddleware',
|
||||||
|
'whitenoise.middleware.WhiteNoiseMiddleware',
|
||||||
'django.middleware.common.CommonMiddleware',
|
'django.middleware.common.CommonMiddleware',
|
||||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||||
'InvenTree.middleware.InvenTreeRemoteUserMiddleware', # Remote / proxy auth
|
'InvenTree.middleware.InvenTreeRemoteUserMiddleware', # Remote / proxy auth
|
||||||
@ -294,7 +297,10 @@ if LDAP_AUTH:
|
|||||||
|
|
||||||
# get global options from dict and use ldap.OPT_* as keys and values
|
# get global options from dict and use ldap.OPT_* as keys and values
|
||||||
global_options_dict = get_setting(
|
global_options_dict = get_setting(
|
||||||
'INVENTREE_LDAP_GLOBAL_OPTIONS', 'ldap.global_options', {}, dict
|
'INVENTREE_LDAP_GLOBAL_OPTIONS',
|
||||||
|
'ldap.global_options',
|
||||||
|
default_value=None,
|
||||||
|
typecast=dict,
|
||||||
)
|
)
|
||||||
global_options = {}
|
global_options = {}
|
||||||
for k, v in global_options_dict.items():
|
for k, v in global_options_dict.items():
|
||||||
@ -364,24 +370,16 @@ if LDAP_AUTH:
|
|||||||
)
|
)
|
||||||
AUTH_LDAP_DENY_GROUP = get_setting('INVENTREE_LDAP_DENY_GROUP', 'ldap.deny_group')
|
AUTH_LDAP_DENY_GROUP = get_setting('INVENTREE_LDAP_DENY_GROUP', 'ldap.deny_group')
|
||||||
AUTH_LDAP_USER_FLAGS_BY_GROUP = get_setting(
|
AUTH_LDAP_USER_FLAGS_BY_GROUP = get_setting(
|
||||||
'INVENTREE_LDAP_USER_FLAGS_BY_GROUP', 'ldap.user_flags_by_group', {}, dict
|
'INVENTREE_LDAP_USER_FLAGS_BY_GROUP',
|
||||||
|
'ldap.user_flags_by_group',
|
||||||
|
default_value=None,
|
||||||
|
typecast=dict,
|
||||||
)
|
)
|
||||||
AUTH_LDAP_FIND_GROUP_PERMS = True
|
AUTH_LDAP_FIND_GROUP_PERMS = True
|
||||||
|
|
||||||
# Internal IP addresses allowed to see the debug toolbar
|
|
||||||
INTERNAL_IPS = ['127.0.0.1']
|
|
||||||
|
|
||||||
# Internal flag to determine if we are running in docker mode
|
# Internal flag to determine if we are running in docker mode
|
||||||
DOCKER = get_boolean_setting('INVENTREE_DOCKER', default_value=False)
|
DOCKER = get_boolean_setting('INVENTREE_DOCKER', default_value=False)
|
||||||
|
|
||||||
if DOCKER: # pragma: no cover
|
|
||||||
# Internal IP addresses are different when running under docker
|
|
||||||
hostname, ___, ips = socket.gethostbyname_ex(socket.gethostname())
|
|
||||||
INTERNAL_IPS = [ip[: ip.rfind('.')] + '.1' for ip in ips] + [
|
|
||||||
'127.0.0.1',
|
|
||||||
'10.0.2.2',
|
|
||||||
]
|
|
||||||
|
|
||||||
# Allow secure http developer server in debug mode
|
# Allow secure http developer server in debug mode
|
||||||
if DEBUG:
|
if DEBUG:
|
||||||
INSTALLED_APPS.append('sslserver')
|
INSTALLED_APPS.append('sslserver')
|
||||||
@ -469,21 +467,6 @@ if USE_JWT:
|
|||||||
INSTALLED_APPS.append('rest_framework_simplejwt')
|
INSTALLED_APPS.append('rest_framework_simplejwt')
|
||||||
|
|
||||||
# WSGI default setting
|
# WSGI default setting
|
||||||
SPECTACULAR_SETTINGS = {
|
|
||||||
'TITLE': 'InvenTree API',
|
|
||||||
'DESCRIPTION': 'API for InvenTree - the intuitive open source inventory management system',
|
|
||||||
'LICENSE': {
|
|
||||||
'name': 'MIT',
|
|
||||||
'url': 'https://github.com/inventree/InvenTree/blob/master/LICENSE',
|
|
||||||
},
|
|
||||||
'EXTERNAL_DOCS': {
|
|
||||||
'description': 'More information about InvenTree in the official docs',
|
|
||||||
'url': 'https://docs.inventree.org',
|
|
||||||
},
|
|
||||||
'VERSION': str(inventreeApiVersion()),
|
|
||||||
'SERVE_INCLUDE_SCHEMA': False,
|
|
||||||
}
|
|
||||||
|
|
||||||
WSGI_APPLICATION = 'InvenTree.wsgi.application'
|
WSGI_APPLICATION = 'InvenTree.wsgi.application'
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@ -497,7 +480,7 @@ Configure the database backend based on the user-specified values.
|
|||||||
logger.debug('Configuring database backend:')
|
logger.debug('Configuring database backend:')
|
||||||
|
|
||||||
# Extract database configuration from the config.yaml file
|
# Extract database configuration from the config.yaml file
|
||||||
db_config = CONFIG.get('database', {})
|
db_config = CONFIG.get('database', None)
|
||||||
|
|
||||||
if not db_config:
|
if not db_config:
|
||||||
db_config = {}
|
db_config = {}
|
||||||
@ -573,7 +556,10 @@ Ref: https://docs.djangoproject.com/en/3.2/ref/settings/#std:setting-OPTIONS
|
|||||||
# connecting to the database server (such as a replica failover) don't sit and
|
# connecting to the database server (such as a replica failover) don't sit and
|
||||||
# wait for possibly an hour or more, just tell the client something went wrong
|
# wait for possibly an hour or more, just tell the client something went wrong
|
||||||
# and let the client retry when they want to.
|
# and let the client retry when they want to.
|
||||||
db_options = db_config.get('OPTIONS', db_config.get('options', {}))
|
db_options = db_config.get('OPTIONS', db_config.get('options', None))
|
||||||
|
|
||||||
|
if db_options is None:
|
||||||
|
db_options = {}
|
||||||
|
|
||||||
# Specific options for postgres backend
|
# Specific options for postgres backend
|
||||||
if 'postgres' in db_engine: # pragma: no cover
|
if 'postgres' in db_engine: # pragma: no cover
|
||||||
@ -736,7 +722,10 @@ if TRACING_ENABLED: # pragma: no cover
|
|||||||
logger.info('OpenTelemetry tracing enabled')
|
logger.info('OpenTelemetry tracing enabled')
|
||||||
|
|
||||||
_t_resources = get_setting(
|
_t_resources = get_setting(
|
||||||
'INVENTREE_TRACING_RESOURCES', 'tracing.resources', {}, dict
|
'INVENTREE_TRACING_RESOURCES',
|
||||||
|
'tracing.resources',
|
||||||
|
default_value=None,
|
||||||
|
typecast=dict,
|
||||||
)
|
)
|
||||||
cstm_tags = {'inventree.env.' + k: v for k, v in inventree_tags.items()}
|
cstm_tags = {'inventree.env.' + k: v for k, v in inventree_tags.items()}
|
||||||
tracing_resources = {**cstm_tags, **_t_resources}
|
tracing_resources = {**cstm_tags, **_t_resources}
|
||||||
@ -748,7 +737,12 @@ if TRACING_ENABLED: # pragma: no cover
|
|||||||
console=get_boolean_setting(
|
console=get_boolean_setting(
|
||||||
'INVENTREE_TRACING_CONSOLE', 'tracing.console', False
|
'INVENTREE_TRACING_CONSOLE', 'tracing.console', False
|
||||||
),
|
),
|
||||||
auth=get_setting('INVENTREE_TRACING_AUTH', 'tracing.auth', {}),
|
auth=get_setting(
|
||||||
|
'INVENTREE_TRACING_AUTH',
|
||||||
|
'tracing.auth',
|
||||||
|
default_value=None,
|
||||||
|
typecast=dict,
|
||||||
|
),
|
||||||
is_http=get_setting('INVENTREE_TRACING_IS_HTTP', 'tracing.is_http', True),
|
is_http=get_setting('INVENTREE_TRACING_IS_HTTP', 'tracing.is_http', True),
|
||||||
append_http=get_boolean_setting(
|
append_http=get_boolean_setting(
|
||||||
'INVENTREE_TRACING_APPEND_HTTP', 'tracing.append_http', True
|
'INVENTREE_TRACING_APPEND_HTTP', 'tracing.append_http', True
|
||||||
@ -945,8 +939,13 @@ LOCALE_PATHS = (BASE_DIR.joinpath('locale/'),)
|
|||||||
|
|
||||||
TIME_ZONE = get_setting('INVENTREE_TIMEZONE', 'timezone', 'UTC')
|
TIME_ZONE = get_setting('INVENTREE_TIMEZONE', 'timezone', 'UTC')
|
||||||
|
|
||||||
USE_I18N = True
|
# Check that the timezone is valid
|
||||||
|
try:
|
||||||
|
pytz.timezone(TIME_ZONE)
|
||||||
|
except pytz.exceptions.UnknownTimeZoneError: # pragma: no cover
|
||||||
|
raise ValueError(f"Specified timezone '{TIME_ZONE}' is not valid")
|
||||||
|
|
||||||
|
USE_I18N = True
|
||||||
|
|
||||||
# Do not use native timezone support in "test" mode
|
# Do not use native timezone support in "test" mode
|
||||||
# It generates a *lot* of cruft in the logs
|
# It generates a *lot* of cruft in the logs
|
||||||
@ -990,13 +989,29 @@ if not SITE_MULTI:
|
|||||||
ALLOWED_HOSTS = get_setting(
|
ALLOWED_HOSTS = get_setting(
|
||||||
'INVENTREE_ALLOWED_HOSTS',
|
'INVENTREE_ALLOWED_HOSTS',
|
||||||
config_key='allowed_hosts',
|
config_key='allowed_hosts',
|
||||||
default_value=['*'],
|
default_value=[],
|
||||||
typecast=list,
|
typecast=list,
|
||||||
)
|
)
|
||||||
|
|
||||||
if SITE_URL and SITE_URL not in ALLOWED_HOSTS:
|
if SITE_URL and SITE_URL not in ALLOWED_HOSTS:
|
||||||
ALLOWED_HOSTS.append(SITE_URL)
|
ALLOWED_HOSTS.append(SITE_URL)
|
||||||
|
|
||||||
|
if not ALLOWED_HOSTS:
|
||||||
|
if DEBUG:
|
||||||
|
logger.info(
|
||||||
|
'No ALLOWED_HOSTS specified. Defaulting to ["*"] for debug mode. This is not recommended for production use'
|
||||||
|
)
|
||||||
|
ALLOWED_HOSTS = ['*']
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
'No ALLOWED_HOSTS specified. Please provide a list of allowed hosts, or specify INVENTREE_SITE_URL'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ensure that the ALLOWED_HOSTS do not contain any scheme info
|
||||||
|
for i, host in enumerate(ALLOWED_HOSTS):
|
||||||
|
if '://' in host:
|
||||||
|
ALLOWED_HOSTS[i] = host.split('://')[1]
|
||||||
|
|
||||||
# List of trusted origins for unsafe requests
|
# List of trusted origins for unsafe requests
|
||||||
# Ref: https://docs.djangoproject.com/en/4.2/ref/settings/#csrf-trusted-origins
|
# Ref: https://docs.djangoproject.com/en/4.2/ref/settings/#csrf-trusted-origins
|
||||||
CSRF_TRUSTED_ORIGINS = get_setting(
|
CSRF_TRUSTED_ORIGINS = get_setting(
|
||||||
@ -1037,8 +1052,8 @@ CORS_ALLOW_CREDENTIALS = get_boolean_setting(
|
|||||||
default_value=True,
|
default_value=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Only allow CORS access to API and media endpoints
|
# Only allow CORS access to the following URL endpoints
|
||||||
CORS_URLS_REGEX = r'^/(api|media|static)/.*$'
|
CORS_URLS_REGEX = r'^/(api|auth|media|static)/.*$'
|
||||||
|
|
||||||
CORS_ALLOWED_ORIGINS = get_setting(
|
CORS_ALLOWED_ORIGINS = get_setting(
|
||||||
'INVENTREE_CORS_ORIGIN_WHITELIST',
|
'INVENTREE_CORS_ORIGIN_WHITELIST',
|
||||||
@ -1051,6 +1066,27 @@ CORS_ALLOWED_ORIGINS = get_setting(
|
|||||||
if SITE_URL and SITE_URL not in CORS_ALLOWED_ORIGINS:
|
if SITE_URL and SITE_URL not in CORS_ALLOWED_ORIGINS:
|
||||||
CORS_ALLOWED_ORIGINS.append(SITE_URL)
|
CORS_ALLOWED_ORIGINS.append(SITE_URL)
|
||||||
|
|
||||||
|
CORS_ALLOWED_ORIGIN_REGEXES = get_setting(
|
||||||
|
'INVENTREE_CORS_ORIGIN_REGEX',
|
||||||
|
config_key='cors.regex',
|
||||||
|
default_value=[],
|
||||||
|
typecast=list,
|
||||||
|
)
|
||||||
|
|
||||||
|
# In debug mode allow CORS requests from localhost
|
||||||
|
# This allows connection from the frontend development server
|
||||||
|
if DEBUG:
|
||||||
|
CORS_ALLOWED_ORIGIN_REGEXES.append(r'^http://localhost:\d+$')
|
||||||
|
|
||||||
|
if CORS_ALLOW_ALL_ORIGINS:
|
||||||
|
logger.info('CORS: All origins allowed')
|
||||||
|
else:
|
||||||
|
if CORS_ALLOWED_ORIGINS:
|
||||||
|
logger.info('CORS: Whitelisted origins: %s', CORS_ALLOWED_ORIGINS)
|
||||||
|
|
||||||
|
if CORS_ALLOWED_ORIGIN_REGEXES:
|
||||||
|
logger.info('CORS: Whitelisted origin regexes: %s', CORS_ALLOWED_ORIGIN_REGEXES)
|
||||||
|
|
||||||
for app in SOCIAL_BACKENDS:
|
for app in SOCIAL_BACKENDS:
|
||||||
# Ensure that the app starts with 'allauth.socialaccount.providers'
|
# Ensure that the app starts with 'allauth.socialaccount.providers'
|
||||||
social_prefix = 'allauth.socialaccount.providers.'
|
social_prefix = 'allauth.socialaccount.providers.'
|
||||||
@ -1083,6 +1119,7 @@ ACCOUNT_DEFAULT_HTTP_PROTOCOL = get_setting(
|
|||||||
)
|
)
|
||||||
ACCOUNT_LOGOUT_ON_PASSWORD_CHANGE = True
|
ACCOUNT_LOGOUT_ON_PASSWORD_CHANGE = True
|
||||||
ACCOUNT_PREVENT_ENUMERATION = True
|
ACCOUNT_PREVENT_ENUMERATION = True
|
||||||
|
ACCOUNT_EMAIL_SUBJECT_PREFIX = EMAIL_SUBJECT_PREFIX
|
||||||
# 2FA
|
# 2FA
|
||||||
REMOVE_SUCCESS_URL = 'settings'
|
REMOVE_SUCCESS_URL = 'settings'
|
||||||
|
|
||||||
@ -1173,7 +1210,9 @@ CUSTOM_SPLASH = get_custom_file(
|
|||||||
'INVENTREE_CUSTOM_SPLASH', 'customize.splash', 'custom splash'
|
'INVENTREE_CUSTOM_SPLASH', 'customize.splash', 'custom splash'
|
||||||
)
|
)
|
||||||
|
|
||||||
CUSTOMIZE = get_setting('INVENTREE_CUSTOMIZE', 'customize', {})
|
CUSTOMIZE = get_setting(
|
||||||
|
'INVENTREE_CUSTOMIZE', 'customize', default_value=None, typecast=dict
|
||||||
|
)
|
||||||
|
|
||||||
# Load settings for the frontend interface
|
# Load settings for the frontend interface
|
||||||
FRONTEND_SETTINGS = config.get_frontend_settings(debug=DEBUG)
|
FRONTEND_SETTINGS = config.get_frontend_settings(debug=DEBUG)
|
||||||
@ -1208,3 +1247,23 @@ if CUSTOM_FLAGS:
|
|||||||
# Magic login django-sesame
|
# Magic login django-sesame
|
||||||
SESAME_MAX_AGE = 300
|
SESAME_MAX_AGE = 300
|
||||||
LOGIN_REDIRECT_URL = '/api/auth/login-redirect/'
|
LOGIN_REDIRECT_URL = '/api/auth/login-redirect/'
|
||||||
|
|
||||||
|
# Configuratino for API schema generation
|
||||||
|
SPECTACULAR_SETTINGS = {
|
||||||
|
'TITLE': 'InvenTree API',
|
||||||
|
'DESCRIPTION': 'API for InvenTree - the intuitive open source inventory management system',
|
||||||
|
'LICENSE': {
|
||||||
|
'name': 'MIT',
|
||||||
|
'url': 'https://github.com/inventree/InvenTree/blob/master/LICENSE',
|
||||||
|
},
|
||||||
|
'EXTERNAL_DOCS': {
|
||||||
|
'description': 'More information about InvenTree in the official docs',
|
||||||
|
'url': 'https://docs.inventree.org',
|
||||||
|
},
|
||||||
|
'VERSION': str(inventreeApiVersion()),
|
||||||
|
'SERVE_INCLUDE_SCHEMA': False,
|
||||||
|
'SCHEMA_PATH_PREFIX': '/api/',
|
||||||
|
}
|
||||||
|
|
||||||
|
if SITE_URL:
|
||||||
|
SPECTACULAR_SETTINGS['SERVERS'] = [{'url': SITE_URL}]
|
||||||
|
@ -180,6 +180,8 @@ def offload_task(
|
|||||||
Returns:
|
Returns:
|
||||||
bool: True if the task was offloaded (or ran), False otherwise
|
bool: True if the task was offloaded (or ran), False otherwise
|
||||||
"""
|
"""
|
||||||
|
from InvenTree.exceptions import log_error
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import importlib
|
import importlib
|
||||||
|
|
||||||
@ -213,6 +215,7 @@ def offload_task(
|
|||||||
return False
|
return False
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise_warning(f"WARNING: '{taskname}' not offloaded due to {str(exc)}")
|
raise_warning(f"WARNING: '{taskname}' not offloaded due to {str(exc)}")
|
||||||
|
log_error('InvenTree.offload_task')
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
if callable(taskname):
|
if callable(taskname):
|
||||||
@ -233,6 +236,7 @@ def offload_task(
|
|||||||
try:
|
try:
|
||||||
_mod = importlib.import_module(app_mod)
|
_mod = importlib.import_module(app_mod)
|
||||||
except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
|
log_error('InvenTree.offload_task')
|
||||||
raise_warning(
|
raise_warning(
|
||||||
f"WARNING: '{taskname}' not started - No module named '{app_mod}'"
|
f"WARNING: '{taskname}' not started - No module named '{app_mod}'"
|
||||||
)
|
)
|
||||||
@ -249,6 +253,7 @@ def offload_task(
|
|||||||
if not _func:
|
if not _func:
|
||||||
_func = eval(func) # pragma: no cover
|
_func = eval(func) # pragma: no cover
|
||||||
except NameError:
|
except NameError:
|
||||||
|
log_error('InvenTree.offload_task')
|
||||||
raise_warning(
|
raise_warning(
|
||||||
f"WARNING: '{taskname}' not started - No function named '{func}'"
|
f"WARNING: '{taskname}' not started - No function named '{func}'"
|
||||||
)
|
)
|
||||||
@ -258,6 +263,7 @@ def offload_task(
|
|||||||
try:
|
try:
|
||||||
_func(*args, **kwargs)
|
_func(*args, **kwargs)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
log_error('InvenTree.offload_task')
|
||||||
raise_warning(f"WARNING: '{taskname}' not started due to {str(exc)}")
|
raise_warning(f"WARNING: '{taskname}' not started due to {str(exc)}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -14,8 +14,10 @@ from django.core import mail
|
|||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.test import TestCase, override_settings, tag
|
from django.test import TestCase, override_settings, tag
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
import pint.errors
|
import pint.errors
|
||||||
|
import pytz
|
||||||
from djmoney.contrib.exchange.exceptions import MissingRate
|
from djmoney.contrib.exchange.exceptions import MissingRate
|
||||||
from djmoney.contrib.exchange.models import Rate, convert_money
|
from djmoney.contrib.exchange.models import Rate, convert_money
|
||||||
from djmoney.money import Money
|
from djmoney.money import Money
|
||||||
@ -40,6 +42,147 @@ from .tasks import offload_task
|
|||||||
from .validators import validate_overage
|
from .validators import validate_overage
|
||||||
|
|
||||||
|
|
||||||
|
class HostTest(InvenTreeTestCase):
|
||||||
|
"""Test for host configuration."""
|
||||||
|
|
||||||
|
@override_settings(ALLOWED_HOSTS=['testserver'])
|
||||||
|
def test_allowed_hosts(self):
|
||||||
|
"""Test that the ALLOWED_HOSTS functions as expected."""
|
||||||
|
self.assertIn('testserver', settings.ALLOWED_HOSTS)
|
||||||
|
|
||||||
|
response = self.client.get('/api/', headers={'host': 'testserver'})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
response = self.client.get('/api/', headers={'host': 'invalidserver'})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
|
||||||
|
@override_settings(ALLOWED_HOSTS=['invalidserver.co.uk'])
|
||||||
|
def test_allowed_hosts_2(self):
|
||||||
|
"""Another test for ALLOWED_HOSTS functionality."""
|
||||||
|
response = self.client.get('/api/', headers={'host': 'invalidserver.co.uk'})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
|
||||||
|
class CorsTest(TestCase):
|
||||||
|
"""Unit tests for CORS functionality."""
|
||||||
|
|
||||||
|
def cors_headers(self):
|
||||||
|
"""Return a list of CORS headers."""
|
||||||
|
return [
|
||||||
|
'access-control-allow-origin',
|
||||||
|
'access-control-allow-credentials',
|
||||||
|
'access-control-allow-methods',
|
||||||
|
'access-control-allow-headers',
|
||||||
|
]
|
||||||
|
|
||||||
|
def preflight(self, url, origin, method='GET'):
|
||||||
|
"""Make a CORS preflight request to the specified URL."""
|
||||||
|
headers = {'origin': origin, 'access-control-request-method': method}
|
||||||
|
|
||||||
|
return self.client.options(url, headers=headers)
|
||||||
|
|
||||||
|
def test_no_origin(self):
|
||||||
|
"""Test that CORS headers are not included for regular requests.
|
||||||
|
|
||||||
|
- We use the /api/ endpoint for this test (it does not require auth)
|
||||||
|
- By default, in debug mode *all* CORS origins are allowed
|
||||||
|
"""
|
||||||
|
# Perform an initial response without the "origin" header
|
||||||
|
response = self.client.get('/api/')
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
for header in self.cors_headers():
|
||||||
|
self.assertNotIn(header, response.headers)
|
||||||
|
|
||||||
|
# Now, perform a "preflight" request with the "origin" header
|
||||||
|
response = self.preflight('/api/', origin='http://random-external-server.com')
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
for header in self.cors_headers():
|
||||||
|
self.assertIn(header, response.headers)
|
||||||
|
|
||||||
|
self.assertEqual(response.headers['content-length'], '0')
|
||||||
|
self.assertEqual(
|
||||||
|
response.headers['access-control-allow-origin'],
|
||||||
|
'http://random-external-server.com',
|
||||||
|
)
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
CORS_ALLOW_ALL_ORIGINS=False,
|
||||||
|
CORS_ALLOWED_ORIGINS=['http://my-external-server.com'],
|
||||||
|
CORS_ALLOWED_ORIGIN_REGEXES=[],
|
||||||
|
)
|
||||||
|
def test_auth_view(self):
|
||||||
|
"""Test that CORS requests work for the /auth/ view.
|
||||||
|
|
||||||
|
Here, we are not authorized by default,
|
||||||
|
but the CORS headers should still be included.
|
||||||
|
"""
|
||||||
|
url = '/auth/'
|
||||||
|
|
||||||
|
# First, a preflight request with a "valid" origin
|
||||||
|
|
||||||
|
response = self.preflight(url, origin='http://my-external-server.com')
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
for header in self.cors_headers():
|
||||||
|
self.assertIn(header, response.headers)
|
||||||
|
|
||||||
|
# Next, a preflight request with an "invalid" origin
|
||||||
|
response = self.preflight(url, origin='http://random-external-server.com')
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
for header in self.cors_headers():
|
||||||
|
self.assertNotIn(header, response.headers)
|
||||||
|
|
||||||
|
# Next, make a GET request (without a token)
|
||||||
|
response = self.client.get(
|
||||||
|
url, headers={'origin': 'http://my-external-server.com'}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Unauthorized
|
||||||
|
self.assertEqual(response.status_code, 401)
|
||||||
|
|
||||||
|
self.assertIn('access-control-allow-origin', response.headers)
|
||||||
|
self.assertNotIn('access-control-allow-methods', response.headers)
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
CORS_ALLOW_ALL_ORIGINS=False,
|
||||||
|
CORS_ALLOWED_ORIGINS=[],
|
||||||
|
CORS_ALLOWED_ORIGIN_REGEXES=['http://.*myserver.com'],
|
||||||
|
)
|
||||||
|
def test_cors_regex(self):
|
||||||
|
"""Test that CORS regexes work as expected."""
|
||||||
|
valid_urls = [
|
||||||
|
'http://www.myserver.com',
|
||||||
|
'http://test.myserver.com',
|
||||||
|
'http://myserver.com',
|
||||||
|
'http://www.myserver.com:8080',
|
||||||
|
]
|
||||||
|
|
||||||
|
invalid_urls = [
|
||||||
|
'http://myserver.org',
|
||||||
|
'http://www.other-server.org',
|
||||||
|
'http://google.com',
|
||||||
|
'http://myserver.co.uk:8080',
|
||||||
|
]
|
||||||
|
|
||||||
|
for url in valid_urls:
|
||||||
|
response = self.preflight('/api/', origin=url)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertIn('access-control-allow-origin', response.headers)
|
||||||
|
|
||||||
|
for url in invalid_urls:
|
||||||
|
response = self.preflight('/api/', origin=url)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertNotIn('access-control-allow-origin', response.headers)
|
||||||
|
|
||||||
|
|
||||||
class ConversionTest(TestCase):
|
class ConversionTest(TestCase):
|
||||||
"""Tests for conversion of physical units."""
|
"""Tests for conversion of physical units."""
|
||||||
|
|
||||||
@ -138,6 +281,24 @@ class ConversionTest(TestCase):
|
|||||||
q = InvenTree.conversion.convert_physical_value(val, 'W', strip_units=False)
|
q = InvenTree.conversion.convert_physical_value(val, 'W', strip_units=False)
|
||||||
self.assertAlmostEqual(float(q.magnitude), expected, places=2)
|
self.assertAlmostEqual(float(q.magnitude), expected, places=2)
|
||||||
|
|
||||||
|
def test_imperial_lengths(self):
|
||||||
|
"""Test support of imperial length measurements."""
|
||||||
|
tests = [
|
||||||
|
('1 inch', 'mm', 25.4),
|
||||||
|
('1 "', 'mm', 25.4),
|
||||||
|
('2 "', 'inches', 2),
|
||||||
|
('3 feet', 'inches', 36),
|
||||||
|
("3'", 'inches', 36),
|
||||||
|
("7 '", 'feet', 7),
|
||||||
|
]
|
||||||
|
|
||||||
|
for val, unit, expected in tests:
|
||||||
|
output = InvenTree.conversion.convert_physical_value(
|
||||||
|
val, unit, strip_units=True
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertAlmostEqual(output, expected, 3)
|
||||||
|
|
||||||
def test_dimensionless_units(self):
|
def test_dimensionless_units(self):
|
||||||
"""Tests for 'dimensionless' unit quantities."""
|
"""Tests for 'dimensionless' unit quantities."""
|
||||||
# Test some dimensionless units
|
# Test some dimensionless units
|
||||||
@ -587,6 +748,47 @@ class TestHelpers(TestCase):
|
|||||||
self.assertEqual(helpers.generateTestKey(name), key)
|
self.assertEqual(helpers.generateTestKey(name), key)
|
||||||
|
|
||||||
|
|
||||||
|
class TestTimeFormat(TestCase):
|
||||||
|
"""Unit test for time formatting functionality."""
|
||||||
|
|
||||||
|
@override_settings(TIME_ZONE='UTC')
|
||||||
|
def test_tz_utc(self):
|
||||||
|
"""Check UTC timezone."""
|
||||||
|
self.assertEqual(InvenTree.helpers.server_timezone(), 'UTC')
|
||||||
|
|
||||||
|
@override_settings(TIME_ZONE='Europe/London')
|
||||||
|
def test_tz_london(self):
|
||||||
|
"""Check London timezone."""
|
||||||
|
self.assertEqual(InvenTree.helpers.server_timezone(), 'Europe/London')
|
||||||
|
|
||||||
|
@override_settings(TIME_ZONE='Australia/Sydney')
|
||||||
|
def test_to_local_time(self):
|
||||||
|
"""Test that the local time conversion works as expected."""
|
||||||
|
source_time = timezone.datetime(
|
||||||
|
year=2000,
|
||||||
|
month=1,
|
||||||
|
day=1,
|
||||||
|
hour=0,
|
||||||
|
minute=0,
|
||||||
|
second=0,
|
||||||
|
tzinfo=pytz.timezone('Europe/London'),
|
||||||
|
)
|
||||||
|
|
||||||
|
tests = [
|
||||||
|
('UTC', '2000-01-01 00:01:00+00:00'),
|
||||||
|
('Europe/London', '2000-01-01 00:00:00-00:01'),
|
||||||
|
('America/New_York', '1999-12-31 19:01:00-05:00'),
|
||||||
|
# All following tests should result in the same value
|
||||||
|
('Australia/Sydney', '2000-01-01 11:01:00+11:00'),
|
||||||
|
(None, '2000-01-01 11:01:00+11:00'),
|
||||||
|
('', '2000-01-01 11:01:00+11:00'),
|
||||||
|
]
|
||||||
|
|
||||||
|
for tz, expected in tests:
|
||||||
|
local_time = InvenTree.helpers.to_local_time(source_time, tz)
|
||||||
|
self.assertEqual(str(local_time), expected)
|
||||||
|
|
||||||
|
|
||||||
class TestQuoteWrap(TestCase):
|
class TestQuoteWrap(TestCase):
|
||||||
"""Tests for string wrapping."""
|
"""Tests for string wrapping."""
|
||||||
|
|
||||||
@ -894,6 +1096,7 @@ class TestVersionNumber(TestCase):
|
|||||||
hash = str(
|
hash = str(
|
||||||
subprocess.check_output('git rev-parse --short HEAD'.split()), 'utf-8'
|
subprocess.check_output('git rev-parse --short HEAD'.split()), 'utf-8'
|
||||||
).strip()
|
).strip()
|
||||||
|
|
||||||
self.assertEqual(hash, version.inventreeCommitHash())
|
self.assertEqual(hash, version.inventreeCommitHash())
|
||||||
|
|
||||||
d = (
|
d = (
|
||||||
|
@ -19,7 +19,7 @@ from dulwich.repo import NotGitRepository, Repo
|
|||||||
from .api_version import INVENTREE_API_TEXT, INVENTREE_API_VERSION
|
from .api_version import INVENTREE_API_TEXT, INVENTREE_API_VERSION
|
||||||
|
|
||||||
# InvenTree software version
|
# InvenTree software version
|
||||||
INVENTREE_SW_VERSION = '0.14.0 dev'
|
INVENTREE_SW_VERSION = '0.15.0 dev'
|
||||||
|
|
||||||
# Discover git
|
# Discover git
|
||||||
try:
|
try:
|
||||||
|
@ -314,11 +314,21 @@ class BuildLineEndpoint:
|
|||||||
queryset = BuildLine.objects.all()
|
queryset = BuildLine.objects.all()
|
||||||
serializer_class = build.serializers.BuildLineSerializer
|
serializer_class = build.serializers.BuildLineSerializer
|
||||||
|
|
||||||
|
def get_source_build(self) -> Build:
|
||||||
|
"""Return the source Build object for the BuildLine queryset.
|
||||||
|
|
||||||
|
This source build is used to filter the available stock for each BuildLine.
|
||||||
|
|
||||||
|
- If this is a "detail" view, use the build associated with the line
|
||||||
|
- If this is a "list" view, use the build associated with the request
|
||||||
|
"""
|
||||||
|
raise NotImplementedError("get_source_build must be implemented in the child class")
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
"""Override queryset to select-related and annotate"""
|
"""Override queryset to select-related and annotate"""
|
||||||
queryset = super().get_queryset()
|
queryset = super().get_queryset()
|
||||||
|
source_build = self.get_source_build()
|
||||||
queryset = build.serializers.BuildLineSerializer.annotate_queryset(queryset)
|
queryset = build.serializers.BuildLineSerializer.annotate_queryset(queryset, build=source_build)
|
||||||
|
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
@ -353,10 +363,26 @@ class BuildLineList(BuildLineEndpoint, ListCreateAPI):
|
|||||||
'bom_item__reference',
|
'bom_item__reference',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def get_source_build(self) -> Build:
|
||||||
|
"""Return the target build for the BuildLine queryset."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
build_id = self.request.query_params.get('build', None)
|
||||||
|
if build_id:
|
||||||
|
build = Build.objects.get(pk=build_id)
|
||||||
|
return build
|
||||||
|
except (Build.DoesNotExist, AttributeError, ValueError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
class BuildLineDetail(BuildLineEndpoint, RetrieveUpdateDestroyAPI):
|
class BuildLineDetail(BuildLineEndpoint, RetrieveUpdateDestroyAPI):
|
||||||
"""API endpoint for detail view of a BuildLine object."""
|
"""API endpoint for detail view of a BuildLine object."""
|
||||||
pass
|
|
||||||
|
def get_source_build(self) -> Build:
|
||||||
|
"""Return the target source location for the BuildLine queryset."""
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
class BuildOrderContextMixin:
|
class BuildOrderContextMixin:
|
||||||
|
@ -518,9 +518,25 @@ class Build(InvenTree.models.InvenTreeBarcodeMixin, InvenTree.models.InvenTreeNo
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@transaction.atomic
|
||||||
|
def complete_allocations(self, user):
|
||||||
|
"""Complete all stock allocations for this build order.
|
||||||
|
|
||||||
|
- This function is called when a build order is completed
|
||||||
|
"""
|
||||||
|
# Remove untracked allocated stock
|
||||||
|
self.subtract_allocated_stock(user)
|
||||||
|
|
||||||
|
# Ensure that there are no longer any BuildItem objects
|
||||||
|
# which point to this Build Order
|
||||||
|
self.allocated_stock.delete()
|
||||||
|
|
||||||
@transaction.atomic
|
@transaction.atomic
|
||||||
def complete_build(self, user):
|
def complete_build(self, user):
|
||||||
"""Mark this build as complete."""
|
"""Mark this build as complete."""
|
||||||
|
|
||||||
|
import build.tasks
|
||||||
|
|
||||||
if self.incomplete_count > 0:
|
if self.incomplete_count > 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -529,12 +545,12 @@ class Build(InvenTree.models.InvenTreeBarcodeMixin, InvenTree.models.InvenTreeNo
|
|||||||
self.status = BuildStatus.COMPLETE.value
|
self.status = BuildStatus.COMPLETE.value
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
# Remove untracked allocated stock
|
# Offload task to complete build allocations
|
||||||
self.subtract_allocated_stock(user)
|
InvenTree.tasks.offload_task(
|
||||||
|
build.tasks.complete_build_allocations,
|
||||||
# Ensure that there are no longer any BuildItem objects
|
self.pk,
|
||||||
# which point to this Build Order
|
user.pk if user else None
|
||||||
self.allocated_stock.delete()
|
)
|
||||||
|
|
||||||
# Register an event
|
# Register an event
|
||||||
trigger_event('build.completed', id=self.pk)
|
trigger_event('build.completed', id=self.pk)
|
||||||
|
@ -1083,6 +1083,7 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
|||||||
'available_substitute_stock',
|
'available_substitute_stock',
|
||||||
'available_variant_stock',
|
'available_variant_stock',
|
||||||
'total_available_stock',
|
'total_available_stock',
|
||||||
|
'external_stock',
|
||||||
]
|
]
|
||||||
|
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
@ -1124,15 +1125,23 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
|||||||
available_substitute_stock = serializers.FloatField(read_only=True)
|
available_substitute_stock = serializers.FloatField(read_only=True)
|
||||||
available_variant_stock = serializers.FloatField(read_only=True)
|
available_variant_stock = serializers.FloatField(read_only=True)
|
||||||
total_available_stock = serializers.FloatField(read_only=True)
|
total_available_stock = serializers.FloatField(read_only=True)
|
||||||
|
external_stock = serializers.FloatField(read_only=True)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def annotate_queryset(queryset):
|
def annotate_queryset(queryset, build=None):
|
||||||
"""Add extra annotations to the queryset:
|
"""Add extra annotations to the queryset:
|
||||||
|
|
||||||
- allocated: Total stock quantity allocated against this build line
|
- allocated: Total stock quantity allocated against this build line
|
||||||
- available: Total stock available for allocation against this build line
|
- available: Total stock available for allocation against this build line
|
||||||
- on_order: Total stock on order for this build line
|
- on_order: Total stock on order for this build line
|
||||||
- in_production: Total stock currently in production for this build line
|
- in_production: Total stock currently in production for this build line
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
queryset: The queryset to annotate
|
||||||
|
build: The build order to filter against (optional)
|
||||||
|
|
||||||
|
Note: If the 'build' is provided, we can use it to filter available stock, depending on the specified location for the build
|
||||||
|
|
||||||
"""
|
"""
|
||||||
queryset = queryset.select_related(
|
queryset = queryset.select_related(
|
||||||
'build', 'bom_item',
|
'build', 'bom_item',
|
||||||
@ -1169,6 +1178,18 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
|||||||
|
|
||||||
ref = 'bom_item__sub_part__'
|
ref = 'bom_item__sub_part__'
|
||||||
|
|
||||||
|
stock_filter = None
|
||||||
|
|
||||||
|
if build is not None and build.take_from is not None:
|
||||||
|
location = build.take_from
|
||||||
|
# Filter by locations below the specified location
|
||||||
|
stock_filter = Q(
|
||||||
|
location__tree_id=location.tree_id,
|
||||||
|
location__lft__gte=location.lft,
|
||||||
|
location__rght__lte=location.rght,
|
||||||
|
location__level__gte=location.level,
|
||||||
|
)
|
||||||
|
|
||||||
# Annotate the "in_production" quantity
|
# Annotate the "in_production" quantity
|
||||||
queryset = queryset.annotate(
|
queryset = queryset.annotate(
|
||||||
in_production=part.filters.annotate_in_production_quantity(reference=ref)
|
in_production=part.filters.annotate_in_production_quantity(reference=ref)
|
||||||
@ -1181,10 +1202,8 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Annotate the "available" quantity
|
# Annotate the "available" quantity
|
||||||
# TODO: In the future, this should be refactored.
|
|
||||||
# TODO: Note that part.serializers.BomItemSerializer also has a similar annotation
|
|
||||||
queryset = queryset.alias(
|
queryset = queryset.alias(
|
||||||
total_stock=part.filters.annotate_total_stock(reference=ref),
|
total_stock=part.filters.annotate_total_stock(reference=ref, filter=stock_filter),
|
||||||
allocated_to_sales_orders=part.filters.annotate_sales_order_allocations(reference=ref),
|
allocated_to_sales_orders=part.filters.annotate_sales_order_allocations(reference=ref),
|
||||||
allocated_to_build_orders=part.filters.annotate_build_order_allocations(reference=ref),
|
allocated_to_build_orders=part.filters.annotate_build_order_allocations(reference=ref),
|
||||||
)
|
)
|
||||||
@ -1197,11 +1216,21 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
external_stock_filter = Q(location__external=True)
|
||||||
|
|
||||||
|
if stock_filter:
|
||||||
|
external_stock_filter &= stock_filter
|
||||||
|
|
||||||
|
# Add 'external stock' annotations
|
||||||
|
queryset = queryset.annotate(
|
||||||
|
external_stock=part.filters.annotate_total_stock(reference=ref, filter=external_stock_filter)
|
||||||
|
)
|
||||||
|
|
||||||
ref = 'bom_item__substitutes__part__'
|
ref = 'bom_item__substitutes__part__'
|
||||||
|
|
||||||
# Extract similar information for any 'substitute' parts
|
# Extract similar information for any 'substitute' parts
|
||||||
queryset = queryset.alias(
|
queryset = queryset.alias(
|
||||||
substitute_stock=part.filters.annotate_total_stock(reference=ref),
|
substitute_stock=part.filters.annotate_total_stock(reference=ref, filter=stock_filter),
|
||||||
substitute_build_allocations=part.filters.annotate_build_order_allocations(reference=ref),
|
substitute_build_allocations=part.filters.annotate_build_order_allocations(reference=ref),
|
||||||
substitute_sales_allocations=part.filters.annotate_sales_order_allocations(reference=ref)
|
substitute_sales_allocations=part.filters.annotate_sales_order_allocations(reference=ref)
|
||||||
)
|
)
|
||||||
@ -1215,7 +1244,7 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Annotate the queryset with 'available variant stock' information
|
# Annotate the queryset with 'available variant stock' information
|
||||||
variant_stock_query = part.filters.variant_stock_query(reference='bom_item__sub_part__')
|
variant_stock_query = part.filters.variant_stock_query(reference='bom_item__sub_part__', filter=stock_filter)
|
||||||
|
|
||||||
queryset = queryset.alias(
|
queryset = queryset.alias(
|
||||||
variant_stock_total=part.filters.annotate_variant_quantity(variant_stock_query, reference='quantity'),
|
variant_stock_total=part.filters.annotate_variant_quantity(variant_stock_query, reference='quantity'),
|
||||||
|
@ -4,6 +4,7 @@ from datetime import datetime, timedelta
|
|||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from django.contrib.auth.models import User
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
|
|
||||||
@ -24,6 +25,27 @@ import part.models as part_models
|
|||||||
logger = logging.getLogger('inventree')
|
logger = logging.getLogger('inventree')
|
||||||
|
|
||||||
|
|
||||||
|
def complete_build_allocations(build_id: int, user_id: int):
|
||||||
|
"""Complete build allocations for a specified BuildOrder."""
|
||||||
|
|
||||||
|
build_order = build.models.Build.objects.filter(pk=build_id).first()
|
||||||
|
|
||||||
|
if user_id:
|
||||||
|
try:
|
||||||
|
user = User.objects.get(pk=user_id)
|
||||||
|
except User.DoesNotExist:
|
||||||
|
logger.warning("Could not complete build allocations for BuildOrder <%s> - User does not exist", build_id)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
user = None
|
||||||
|
|
||||||
|
if not build_order:
|
||||||
|
logger.warning("Could not complete build allocations for BuildOrder <%s> - BuildOrder does not exist", build_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
build_order.complete_allocations(user)
|
||||||
|
|
||||||
|
|
||||||
def update_build_order_lines(bom_item_pk: int):
|
def update_build_order_lines(bom_item_pk: int):
|
||||||
"""Update all BuildOrderLineItem objects which reference a particular BomItem.
|
"""Update all BuildOrderLineItem objects which reference a particular BomItem.
|
||||||
|
|
||||||
|
@ -200,6 +200,11 @@
|
|||||||
<div id='build-lines-toolbar'>
|
<div id='build-lines-toolbar'>
|
||||||
{% include "filter_list.html" with id='buildlines' %}
|
{% include "filter_list.html" with id='buildlines' %}
|
||||||
</div>
|
</div>
|
||||||
|
{% if build.take_from %}
|
||||||
|
<div class='alert alert-block alert-info'>
|
||||||
|
{% trans "Available stock has been filtered based on specified source location for this build order" %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
<table class='table table-striped table-condensed' id='build-lines-table' data-toolbar='#build-lines-toolbar'></table>
|
<table class='table table-striped table-condensed' id='build-lines-table' data-toolbar='#build-lines-toolbar'></table>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -374,6 +379,9 @@ onPanelLoad('allocate', function() {
|
|||||||
"#build-lines-table",
|
"#build-lines-table",
|
||||||
{{ build.pk }},
|
{{ build.pk }},
|
||||||
{
|
{
|
||||||
|
{% if build.take_from %}
|
||||||
|
location: {{ build.take_from.pk }},
|
||||||
|
{% endif %}
|
||||||
{% if build.project_code %}
|
{% if build.project_code %}
|
||||||
project_code: {{ build.project_code.pk }},
|
project_code: {{ build.project_code.pk }},
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
@ -226,9 +226,12 @@ class BaseInvenTreeSetting(models.Model):
|
|||||||
"""
|
"""
|
||||||
cache_key = f'BUILD_DEFAULT_VALUES:{str(cls.__name__)}'
|
cache_key = f'BUILD_DEFAULT_VALUES:{str(cls.__name__)}'
|
||||||
|
|
||||||
|
try:
|
||||||
if InvenTree.helpers.str2bool(cache.get(cache_key, False)):
|
if InvenTree.helpers.str2bool(cache.get(cache_key, False)):
|
||||||
# Already built default values
|
# Already built default values
|
||||||
return
|
return
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
existing_keys = cls.objects.filter(**kwargs).values_list('key', flat=True)
|
existing_keys = cls.objects.filter(**kwargs).values_list('key', flat=True)
|
||||||
@ -251,7 +254,10 @@ class BaseInvenTreeSetting(models.Model):
|
|||||||
)
|
)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
cache.set(cache_key, True, timeout=3600)
|
cache.set(cache_key, True, timeout=3600)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
def _call_settings_function(self, reference: str, args, kwargs):
|
def _call_settings_function(self, reference: str, args, kwargs):
|
||||||
"""Call a function associated with a particular setting.
|
"""Call a function associated with a particular setting.
|
||||||
@ -290,8 +296,7 @@ class BaseInvenTreeSetting(models.Model):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
cache.set(ckey, self, timeout=3600)
|
cache.set(ckey, self, timeout=3600)
|
||||||
except TypeError:
|
except Exception:
|
||||||
# Some characters cause issues with caching; ignore and move on
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -554,16 +559,18 @@ class BaseInvenTreeSetting(models.Model):
|
|||||||
# Unless otherwise specified, attempt to create the setting
|
# Unless otherwise specified, attempt to create the setting
|
||||||
create = kwargs.pop('create', True)
|
create = kwargs.pop('create', True)
|
||||||
|
|
||||||
|
# Perform cache lookup by default
|
||||||
|
do_cache = kwargs.pop('cache', True)
|
||||||
|
|
||||||
# Prevent saving to the database during data import
|
# Prevent saving to the database during data import
|
||||||
if InvenTree.ready.isImportingData():
|
if InvenTree.ready.isImportingData():
|
||||||
create = False
|
create = False
|
||||||
|
do_cache = False
|
||||||
|
|
||||||
# Prevent saving to the database during migrations
|
# Prevent saving to the database during migrations
|
||||||
if InvenTree.ready.isRunningMigrations():
|
if InvenTree.ready.isRunningMigrations():
|
||||||
create = False
|
create = False
|
||||||
|
do_cache = False
|
||||||
# Perform cache lookup by default
|
|
||||||
do_cache = kwargs.pop('cache', True)
|
|
||||||
|
|
||||||
ckey = cls.create_cache_key(key, **kwargs)
|
ckey = cls.create_cache_key(key, **kwargs)
|
||||||
|
|
||||||
@ -575,7 +582,7 @@ class BaseInvenTreeSetting(models.Model):
|
|||||||
if cached_setting is not None:
|
if cached_setting is not None:
|
||||||
return cached_setting
|
return cached_setting
|
||||||
|
|
||||||
except AppRegistryNotReady:
|
except Exception:
|
||||||
# Cache is not ready yet
|
# Cache is not ready yet
|
||||||
do_cache = False
|
do_cache = False
|
||||||
|
|
||||||
@ -1646,6 +1653,12 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
|||||||
'default': False,
|
'default': False,
|
||||||
'validator': bool,
|
'validator': bool,
|
||||||
},
|
},
|
||||||
|
'REPORT_LOG_ERRORS': {
|
||||||
|
'name': _('Log Report Errors'),
|
||||||
|
'description': _('Log errors which occur when generating reports'),
|
||||||
|
'default': False,
|
||||||
|
'validator': bool,
|
||||||
|
},
|
||||||
'REPORT_DEFAULT_PAGE_SIZE': {
|
'REPORT_DEFAULT_PAGE_SIZE': {
|
||||||
'name': _('Page Size'),
|
'name': _('Page Size'),
|
||||||
'description': _('Default page size for PDF reports'),
|
'description': _('Default page size for PDF reports'),
|
||||||
|
@ -14,7 +14,10 @@ def currency_code_default():
|
|||||||
"""Returns the default currency code (or USD if not specified)."""
|
"""Returns the default currency code (or USD if not specified)."""
|
||||||
from common.models import InvenTreeSetting
|
from common.models import InvenTreeSetting
|
||||||
|
|
||||||
|
try:
|
||||||
cached_value = cache.get('currency_code_default', '')
|
cached_value = cache.get('currency_code_default', '')
|
||||||
|
except Exception:
|
||||||
|
cached_value = None
|
||||||
|
|
||||||
if cached_value:
|
if cached_value:
|
||||||
return cached_value
|
return cached_value
|
||||||
@ -31,7 +34,10 @@ def currency_code_default():
|
|||||||
code = 'USD' # pragma: no cover
|
code = 'USD' # pragma: no cover
|
||||||
|
|
||||||
# Cache the value for a short amount of time
|
# Cache the value for a short amount of time
|
||||||
|
try:
|
||||||
cache.set('currency_code_default', code, 30)
|
cache.set('currency_code_default', code, 30)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
return code
|
return code
|
||||||
|
|
||||||
|
@ -42,11 +42,13 @@ class CompanyBriefSerializer(InvenTreeModelSerializer):
|
|||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = Company
|
model = Company
|
||||||
fields = ['pk', 'url', 'name', 'description', 'image']
|
fields = ['pk', 'url', 'name', 'description', 'image', 'thumbnail']
|
||||||
|
|
||||||
url = serializers.CharField(source='get_absolute_url', read_only=True)
|
url = serializers.CharField(source='get_absolute_url', read_only=True)
|
||||||
|
|
||||||
image = serializers.CharField(source='get_thumbnail_url', read_only=True)
|
image = InvenTreeImageSerializerField(read_only=True)
|
||||||
|
|
||||||
|
thumbnail = serializers.CharField(source='get_thumbnail_url', read_only=True)
|
||||||
|
|
||||||
|
|
||||||
class AddressSerializer(InvenTreeModelSerializer):
|
class AddressSerializer(InvenTreeModelSerializer):
|
||||||
|
@ -168,9 +168,9 @@ allowed_hosts:
|
|||||||
|
|
||||||
# Trusted origins (see CSRF_TRUSTED_ORIGINS in Django settings documentation)
|
# Trusted origins (see CSRF_TRUSTED_ORIGINS in Django settings documentation)
|
||||||
# If you are running behind a proxy, you may need to add the proxy address here
|
# If you are running behind a proxy, you may need to add the proxy address here
|
||||||
trusted_origins:
|
# trusted_origins:
|
||||||
- 'http://localhost:8000'
|
# - 'http://localhost'
|
||||||
|
# - 'http://*.localhost'
|
||||||
|
|
||||||
# Proxy forwarding settings
|
# Proxy forwarding settings
|
||||||
# If InvenTree is running behind a proxy, you may need to configure these settings
|
# If InvenTree is running behind a proxy, you may need to configure these settings
|
||||||
@ -183,24 +183,23 @@ use_x_forwarded_port: false
|
|||||||
|
|
||||||
# Cross Origin Resource Sharing (CORS) settings (see https://github.com/adamchainz/django-cors-headers)
|
# Cross Origin Resource Sharing (CORS) settings (see https://github.com/adamchainz/django-cors-headers)
|
||||||
cors:
|
cors:
|
||||||
allow_all: True
|
allow_all: true
|
||||||
allow_credentials: True,
|
allow_credentials: true
|
||||||
|
|
||||||
# whitelist:
|
# whitelist:
|
||||||
# - https://example.com
|
# - https://example.com
|
||||||
# - https://sub.example.com
|
# - https://sub.example.com
|
||||||
|
|
||||||
|
# regex:
|
||||||
|
|
||||||
# MEDIA_ROOT is the local filesystem location for storing uploaded files
|
# MEDIA_ROOT is the local filesystem location for storing uploaded files
|
||||||
#media_root: '/home/inventree/data/media'
|
#media_root: '/home/inventree/data/media'
|
||||||
|
|
||||||
# STATIC_ROOT is the local filesystem location for storing static files
|
# STATIC_ROOT is the local filesystem location for storing static files
|
||||||
#static_root: '/home/inventree/data/static'
|
#static_root: '/home/inventree/data/static'
|
||||||
|
|
||||||
### Backup configuration options ###
|
|
||||||
# INVENTREE_BACKUP_DIR is the local filesystem location for storing backups
|
# INVENTREE_BACKUP_DIR is the local filesystem location for storing backups
|
||||||
backup_storage: django.core.files.storage.FileSystemStorage
|
|
||||||
#backup_dir: '/home/inventree/data/backup'
|
#backup_dir: '/home/inventree/data/backup'
|
||||||
#backup_options:
|
|
||||||
|
|
||||||
# Background worker options
|
# Background worker options
|
||||||
background:
|
background:
|
||||||
|
@ -15,7 +15,16 @@ class LabelSerializerBase(InvenTreeModelSerializer):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def label_fields():
|
def label_fields():
|
||||||
"""Generic serializer fields for a label template."""
|
"""Generic serializer fields for a label template."""
|
||||||
return ['pk', 'name', 'description', 'label', 'filters', 'enabled']
|
return [
|
||||||
|
'pk',
|
||||||
|
'name',
|
||||||
|
'description',
|
||||||
|
'label',
|
||||||
|
'filters',
|
||||||
|
'width',
|
||||||
|
'height',
|
||||||
|
'enabled',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class StockItemLabelSerializer(LabelSerializerBase):
|
class StockItemLabelSerializer(LabelSerializerBase):
|
||||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -144,7 +144,9 @@ class MachineRestart(APIView):
|
|||||||
|
|
||||||
permission_classes = [permissions.IsAuthenticated]
|
permission_classes = [permissions.IsAuthenticated]
|
||||||
|
|
||||||
@extend_schema(responses={200: MachineSerializers.MachineRestartSerializer()})
|
@extend_schema(
|
||||||
|
request=None, responses={200: MachineSerializers.MachineRestartSerializer()}
|
||||||
|
)
|
||||||
def post(self, request, pk):
|
def post(self, request, pk):
|
||||||
"""Restart machine by pk."""
|
"""Restart machine by pk."""
|
||||||
machine = get_machine(pk)
|
machine = get_machine(pk)
|
||||||
|
@ -5,7 +5,16 @@ from decimal import Decimal
|
|||||||
|
|
||||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||||
from django.db import models, transaction
|
from django.db import models, transaction
|
||||||
from django.db.models import BooleanField, Case, ExpressionWrapper, F, Q, Value, When
|
from django.db.models import (
|
||||||
|
BooleanField,
|
||||||
|
Case,
|
||||||
|
ExpressionWrapper,
|
||||||
|
F,
|
||||||
|
Prefetch,
|
||||||
|
Q,
|
||||||
|
Value,
|
||||||
|
When,
|
||||||
|
)
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
@ -14,6 +23,8 @@ from sql_util.utils import SubqueryCount
|
|||||||
|
|
||||||
import order.models
|
import order.models
|
||||||
import part.filters
|
import part.filters
|
||||||
|
import part.filters as part_filters
|
||||||
|
import part.models as part_models
|
||||||
import stock.models
|
import stock.models
|
||||||
import stock.serializers
|
import stock.serializers
|
||||||
from common.serializers import ProjectCodeSerializer
|
from common.serializers import ProjectCodeSerializer
|
||||||
@ -375,6 +386,17 @@ class PurchaseOrderLineItemSerializer(InvenTreeModelSerializer):
|
|||||||
- "total_price" = purchase_price * quantity
|
- "total_price" = purchase_price * quantity
|
||||||
- "overdue" status (boolean field)
|
- "overdue" status (boolean field)
|
||||||
"""
|
"""
|
||||||
|
queryset = queryset.prefetch_related(
|
||||||
|
Prefetch(
|
||||||
|
'part__part',
|
||||||
|
queryset=part_models.Part.objects.annotate(
|
||||||
|
category_default_location=part_filters.annotate_default_location(
|
||||||
|
'category__'
|
||||||
|
)
|
||||||
|
).prefetch_related(None),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
queryset = queryset.annotate(
|
queryset = queryset.annotate(
|
||||||
total_price=ExpressionWrapper(
|
total_price=ExpressionWrapper(
|
||||||
F('purchase_price') * F('quantity'), output_field=models.DecimalField()
|
F('purchase_price') * F('quantity'), output_field=models.DecimalField()
|
||||||
|
@ -1767,6 +1767,7 @@ class BomFilter(rest_filters.FilterSet):
|
|||||||
part_active = rest_filters.BooleanFilter(
|
part_active = rest_filters.BooleanFilter(
|
||||||
label='Master part is active', field_name='part__active'
|
label='Master part is active', field_name='part__active'
|
||||||
)
|
)
|
||||||
|
|
||||||
part_trackable = rest_filters.BooleanFilter(
|
part_trackable = rest_filters.BooleanFilter(
|
||||||
label='Master part is trackable', field_name='part__trackable'
|
label='Master part is trackable', field_name='part__trackable'
|
||||||
)
|
)
|
||||||
@ -1775,6 +1776,7 @@ class BomFilter(rest_filters.FilterSet):
|
|||||||
sub_part_trackable = rest_filters.BooleanFilter(
|
sub_part_trackable = rest_filters.BooleanFilter(
|
||||||
label='Sub part is trackable', field_name='sub_part__trackable'
|
label='Sub part is trackable', field_name='sub_part__trackable'
|
||||||
)
|
)
|
||||||
|
|
||||||
sub_part_assembly = rest_filters.BooleanFilter(
|
sub_part_assembly = rest_filters.BooleanFilter(
|
||||||
label='Sub part is an assembly', field_name='sub_part__assembly'
|
label='Sub part is an assembly', field_name='sub_part__assembly'
|
||||||
)
|
)
|
||||||
@ -1814,6 +1816,22 @@ class BomFilter(rest_filters.FilterSet):
|
|||||||
|
|
||||||
return queryset.filter(q_a | q_b).distinct()
|
return queryset.filter(q_a | q_b).distinct()
|
||||||
|
|
||||||
|
part = rest_filters.ModelChoiceFilter(
|
||||||
|
queryset=Part.objects.all(), method='filter_part', label=_('Part')
|
||||||
|
)
|
||||||
|
|
||||||
|
def filter_part(self, queryset, name, part):
|
||||||
|
"""Filter the queryset based on the specified part."""
|
||||||
|
return queryset.filter(part.get_bom_item_filter())
|
||||||
|
|
||||||
|
uses = rest_filters.ModelChoiceFilter(
|
||||||
|
queryset=Part.objects.all(), method='filter_uses', label=_('Uses')
|
||||||
|
)
|
||||||
|
|
||||||
|
def filter_uses(self, queryset, name, part):
|
||||||
|
"""Filter the queryset based on the specified part."""
|
||||||
|
return queryset.filter(part.get_used_in_bom_item_filter())
|
||||||
|
|
||||||
|
|
||||||
class BomMixin:
|
class BomMixin:
|
||||||
"""Mixin class for BomItem API endpoints."""
|
"""Mixin class for BomItem API endpoints."""
|
||||||
@ -1889,62 +1907,6 @@ class BomList(BomMixin, ListCreateDestroyAPIView):
|
|||||||
return JsonResponse(data, safe=False)
|
return JsonResponse(data, safe=False)
|
||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
def filter_queryset(self, queryset):
|
|
||||||
"""Custom query filtering for the BomItem list API."""
|
|
||||||
queryset = super().filter_queryset(queryset)
|
|
||||||
|
|
||||||
params = self.request.query_params
|
|
||||||
|
|
||||||
# Filter by part?
|
|
||||||
part = params.get('part', None)
|
|
||||||
|
|
||||||
if part is not None:
|
|
||||||
"""
|
|
||||||
If we are filtering by "part", there are two cases to consider:
|
|
||||||
|
|
||||||
a) Bom items which are defined for *this* part
|
|
||||||
b) Inherited parts which are defined for a *parent* part
|
|
||||||
|
|
||||||
So we need to construct two queries!
|
|
||||||
"""
|
|
||||||
|
|
||||||
# First, check that the part is actually valid!
|
|
||||||
try:
|
|
||||||
part = Part.objects.get(pk=part)
|
|
||||||
|
|
||||||
queryset = queryset.filter(part.get_bom_item_filter())
|
|
||||||
|
|
||||||
except (ValueError, Part.DoesNotExist):
|
|
||||||
pass
|
|
||||||
|
|
||||||
"""
|
|
||||||
Filter by 'uses'?
|
|
||||||
|
|
||||||
Here we pass a part ID and return BOM items for any assemblies which "use" (or "require") that part.
|
|
||||||
|
|
||||||
There are multiple ways that an assembly can "use" a sub-part:
|
|
||||||
|
|
||||||
A) Directly specifying the sub_part in a BomItem field
|
|
||||||
B) Specifying a "template" part with inherited=True
|
|
||||||
C) Allowing variant parts to be substituted
|
|
||||||
D) Allowing direct substitute parts to be specified
|
|
||||||
|
|
||||||
- BOM items which are "inherited" by parts which are variants of the master BomItem
|
|
||||||
"""
|
|
||||||
uses = params.get('uses', None)
|
|
||||||
|
|
||||||
if uses is not None:
|
|
||||||
try:
|
|
||||||
# Extract the part we are interested in
|
|
||||||
uses_part = Part.objects.get(pk=uses)
|
|
||||||
|
|
||||||
queryset = queryset.filter(uses_part.get_used_in_bom_item_filter())
|
|
||||||
|
|
||||||
except (ValueError, Part.DoesNotExist):
|
|
||||||
pass
|
|
||||||
|
|
||||||
return queryset
|
|
||||||
|
|
||||||
filter_backends = SEARCH_ORDER_FILTER_ALIAS
|
filter_backends = SEARCH_ORDER_FILTER_ALIAS
|
||||||
|
|
||||||
search_fields = [
|
search_fields = [
|
||||||
|
@ -107,7 +107,7 @@ def annotate_on_order_quantity(reference: str = ''):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def annotate_total_stock(reference: str = ''):
|
def annotate_total_stock(reference: str = '', filter: Q = None):
|
||||||
"""Annotate 'total stock' quantity against a queryset.
|
"""Annotate 'total stock' quantity against a queryset.
|
||||||
|
|
||||||
- This function calculates the 'total stock' for a given part
|
- This function calculates the 'total stock' for a given part
|
||||||
@ -121,6 +121,9 @@ def annotate_total_stock(reference: str = ''):
|
|||||||
# Stock filter only returns 'in stock' items
|
# Stock filter only returns 'in stock' items
|
||||||
stock_filter = stock.models.StockItem.IN_STOCK_FILTER
|
stock_filter = stock.models.StockItem.IN_STOCK_FILTER
|
||||||
|
|
||||||
|
if filter is not None:
|
||||||
|
stock_filter &= filter
|
||||||
|
|
||||||
return Coalesce(
|
return Coalesce(
|
||||||
SubquerySum(f'{reference}stock_items__quantity', filter=stock_filter),
|
SubquerySum(f'{reference}stock_items__quantity', filter=stock_filter),
|
||||||
Decimal(0),
|
Decimal(0),
|
||||||
@ -216,9 +219,7 @@ def annotate_sales_order_allocations(reference: str = ''):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def variant_stock_query(
|
def variant_stock_query(reference: str = '', filter: Q = None):
|
||||||
reference: str = '', filter: Q = stock.models.StockItem.IN_STOCK_FILTER
|
|
||||||
):
|
|
||||||
"""Create a queryset to retrieve all stock items for variant parts under the specified part.
|
"""Create a queryset to retrieve all stock items for variant parts under the specified part.
|
||||||
|
|
||||||
- Useful for annotating a queryset with aggregated information about variant parts
|
- Useful for annotating a queryset with aggregated information about variant parts
|
||||||
@ -227,11 +228,16 @@ def variant_stock_query(
|
|||||||
reference: The relationship reference of the part from the current model
|
reference: The relationship reference of the part from the current model
|
||||||
filter: Q object which defines how to filter the returned StockItem instances
|
filter: Q object which defines how to filter the returned StockItem instances
|
||||||
"""
|
"""
|
||||||
|
stock_filter = stock.models.StockItem.IN_STOCK_FILTER
|
||||||
|
|
||||||
|
if filter:
|
||||||
|
stock_filter &= filter
|
||||||
|
|
||||||
return stock.models.StockItem.objects.filter(
|
return stock.models.StockItem.objects.filter(
|
||||||
part__tree_id=OuterRef(f'{reference}tree_id'),
|
part__tree_id=OuterRef(f'{reference}tree_id'),
|
||||||
part__lft__gt=OuterRef(f'{reference}lft'),
|
part__lft__gt=OuterRef(f'{reference}lft'),
|
||||||
part__rght__lt=OuterRef(f'{reference}rght'),
|
part__rght__lt=OuterRef(f'{reference}rght'),
|
||||||
).filter(filter)
|
).filter(stock_filter)
|
||||||
|
|
||||||
|
|
||||||
def annotate_variant_quantity(subquery: Q, reference: str = 'quantity'):
|
def annotate_variant_quantity(subquery: Q, reference: str = 'quantity'):
|
||||||
@ -281,6 +287,32 @@ def annotate_category_parts():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def annotate_default_location(reference=''):
|
||||||
|
"""Construct a queryset that finds the closest default location in the part's category tree.
|
||||||
|
|
||||||
|
If the part's category has its own default_location, this is returned.
|
||||||
|
If not, the category tree is traversed until a value is found.
|
||||||
|
"""
|
||||||
|
subquery = part.models.PartCategory.objects.filter(
|
||||||
|
tree_id=OuterRef(f'{reference}tree_id'),
|
||||||
|
lft__lt=OuterRef(f'{reference}lft'),
|
||||||
|
rght__gt=OuterRef(f'{reference}rght'),
|
||||||
|
level__lte=OuterRef(f'{reference}level'),
|
||||||
|
parent__isnull=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Coalesce(
|
||||||
|
F(f'{reference}default_location'),
|
||||||
|
Subquery(
|
||||||
|
subquery.order_by('-level')
|
||||||
|
.filter(default_location__isnull=False)
|
||||||
|
.values('default_location')
|
||||||
|
),
|
||||||
|
Value(None),
|
||||||
|
output_field=IntegerField(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def annotate_sub_categories():
|
def annotate_sub_categories():
|
||||||
"""Construct a queryset annotation which returns the number of subcategories for each provided category."""
|
"""Construct a queryset annotation which returns the number of subcategories for each provided category."""
|
||||||
subquery = part.models.PartCategory.objects.filter(
|
subquery = part.models.PartCategory.objects.filter(
|
||||||
|
@ -138,6 +138,8 @@ def update_parameter_values(apps, schema_editor):
|
|||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
atomic = False
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('part', '0108_auto_20230516_1334'),
|
('part', '0108_auto_20230516_1334'),
|
||||||
]
|
]
|
||||||
|
@ -3428,6 +3428,13 @@ class PartTestTemplate(InvenTree.models.InvenTreeMetadataModel):
|
|||||||
|
|
||||||
self.key = helpers.generateTestKey(self.test_name)
|
self.key = helpers.generateTestKey(self.test_name)
|
||||||
|
|
||||||
|
if len(self.key) == 0:
|
||||||
|
raise ValidationError({
|
||||||
|
'test_name': _(
|
||||||
|
'Invalid template name - must include at least one alphanumeric character'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
self.validate_unique()
|
self.validate_unique()
|
||||||
super().clean()
|
super().clean()
|
||||||
|
|
||||||
@ -3445,7 +3452,9 @@ class PartTestTemplate(InvenTree.models.InvenTreeMetadataModel):
|
|||||||
|
|
||||||
if tests.exists():
|
if tests.exists():
|
||||||
raise ValidationError({
|
raise ValidationError({
|
||||||
'test_name': _('Test with this name already exists for this part')
|
'test_name': _(
|
||||||
|
'Test template with the same key already exists for part'
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
super().validate_unique(exclude)
|
super().validate_unique(exclude)
|
||||||
|
@ -74,12 +74,14 @@ class CategorySerializer(InvenTree.serializers.InvenTreeModelSerializer):
|
|||||||
'level',
|
'level',
|
||||||
'parent',
|
'parent',
|
||||||
'part_count',
|
'part_count',
|
||||||
|
'subcategories',
|
||||||
'pathstring',
|
'pathstring',
|
||||||
'path',
|
'path',
|
||||||
'starred',
|
'starred',
|
||||||
'url',
|
'url',
|
||||||
'structural',
|
'structural',
|
||||||
'icon',
|
'icon',
|
||||||
|
'parent_default_location',
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
@ -99,13 +101,22 @@ class CategorySerializer(InvenTree.serializers.InvenTreeModelSerializer):
|
|||||||
def annotate_queryset(queryset):
|
def annotate_queryset(queryset):
|
||||||
"""Annotate extra information to the queryset."""
|
"""Annotate extra information to the queryset."""
|
||||||
# Annotate the number of 'parts' which exist in each category (including subcategories!)
|
# Annotate the number of 'parts' which exist in each category (including subcategories!)
|
||||||
queryset = queryset.annotate(part_count=part.filters.annotate_category_parts())
|
queryset = queryset.annotate(
|
||||||
|
part_count=part.filters.annotate_category_parts(),
|
||||||
|
subcategories=part.filters.annotate_sub_categories(),
|
||||||
|
)
|
||||||
|
|
||||||
|
queryset = queryset.annotate(
|
||||||
|
parent_default_location=part.filters.annotate_default_location('parent__')
|
||||||
|
)
|
||||||
|
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
url = serializers.CharField(source='get_absolute_url', read_only=True)
|
url = serializers.CharField(source='get_absolute_url', read_only=True)
|
||||||
|
|
||||||
part_count = serializers.IntegerField(read_only=True)
|
part_count = serializers.IntegerField(read_only=True, label=_('Parts'))
|
||||||
|
|
||||||
|
subcategories = serializers.IntegerField(read_only=True, label=_('Subcategories'))
|
||||||
|
|
||||||
level = serializers.IntegerField(read_only=True)
|
level = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
@ -115,6 +126,8 @@ class CategorySerializer(InvenTree.serializers.InvenTreeModelSerializer):
|
|||||||
child=serializers.DictField(), source='get_path', read_only=True
|
child=serializers.DictField(), source='get_path', read_only=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
parent_default_location = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
|
||||||
class CategoryTree(InvenTree.serializers.InvenTreeModelSerializer):
|
class CategoryTree(InvenTree.serializers.InvenTreeModelSerializer):
|
||||||
"""Serializer for PartCategory tree."""
|
"""Serializer for PartCategory tree."""
|
||||||
@ -277,11 +290,13 @@ class PartBriefSerializer(InvenTree.serializers.InvenTreeModelSerializer):
|
|||||||
'pk',
|
'pk',
|
||||||
'IPN',
|
'IPN',
|
||||||
'barcode_hash',
|
'barcode_hash',
|
||||||
|
'category_default_location',
|
||||||
'default_location',
|
'default_location',
|
||||||
'name',
|
'name',
|
||||||
'revision',
|
'revision',
|
||||||
'full_name',
|
'full_name',
|
||||||
'description',
|
'description',
|
||||||
|
'image',
|
||||||
'thumbnail',
|
'thumbnail',
|
||||||
'active',
|
'active',
|
||||||
'assembly',
|
'assembly',
|
||||||
@ -307,6 +322,9 @@ class PartBriefSerializer(InvenTree.serializers.InvenTreeModelSerializer):
|
|||||||
self.fields.pop('pricing_min')
|
self.fields.pop('pricing_min')
|
||||||
self.fields.pop('pricing_max')
|
self.fields.pop('pricing_max')
|
||||||
|
|
||||||
|
category_default_location = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
image = InvenTree.serializers.InvenTreeImageSerializerField(read_only=True)
|
||||||
thumbnail = serializers.CharField(source='get_thumbnail_url', read_only=True)
|
thumbnail = serializers.CharField(source='get_thumbnail_url', read_only=True)
|
||||||
|
|
||||||
# Pricing fields
|
# Pricing fields
|
||||||
@ -603,6 +621,7 @@ class PartSerializer(
|
|||||||
'allocated_to_build_orders',
|
'allocated_to_build_orders',
|
||||||
'allocated_to_sales_orders',
|
'allocated_to_sales_orders',
|
||||||
'building',
|
'building',
|
||||||
|
'category_default_location',
|
||||||
'in_stock',
|
'in_stock',
|
||||||
'ordering',
|
'ordering',
|
||||||
'required_for_build_orders',
|
'required_for_build_orders',
|
||||||
@ -610,6 +629,7 @@ class PartSerializer(
|
|||||||
'stock_item_count',
|
'stock_item_count',
|
||||||
'suppliers',
|
'suppliers',
|
||||||
'total_in_stock',
|
'total_in_stock',
|
||||||
|
'external_stock',
|
||||||
'unallocated_stock',
|
'unallocated_stock',
|
||||||
'variant_stock',
|
'variant_stock',
|
||||||
# Fields only used for Part creation
|
# Fields only used for Part creation
|
||||||
@ -734,6 +754,12 @@ class PartSerializer(
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
queryset = queryset.annotate(
|
||||||
|
external_stock=part.filters.annotate_total_stock(
|
||||||
|
filter=Q(location__external=True)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# Annotate with the total 'available stock' quantity
|
# Annotate with the total 'available stock' quantity
|
||||||
# This is the current stock, minus any allocations
|
# This is the current stock, minus any allocations
|
||||||
queryset = queryset.annotate(
|
queryset = queryset.annotate(
|
||||||
@ -751,6 +777,12 @@ class PartSerializer(
|
|||||||
required_for_sales_orders=part.filters.annotate_sales_order_requirements(),
|
required_for_sales_orders=part.filters.annotate_sales_order_requirements(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
queryset = queryset.annotate(
|
||||||
|
category_default_location=part.filters.annotate_default_location(
|
||||||
|
'category__'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
def get_starred(self, part) -> bool:
|
def get_starred(self, part) -> bool:
|
||||||
@ -780,14 +812,18 @@ class PartSerializer(
|
|||||||
allocated_to_sales_orders = serializers.FloatField(read_only=True)
|
allocated_to_sales_orders = serializers.FloatField(read_only=True)
|
||||||
building = serializers.FloatField(read_only=True)
|
building = serializers.FloatField(read_only=True)
|
||||||
in_stock = serializers.FloatField(read_only=True)
|
in_stock = serializers.FloatField(read_only=True)
|
||||||
ordering = serializers.FloatField(read_only=True)
|
ordering = serializers.FloatField(read_only=True, label=_('On Order'))
|
||||||
required_for_build_orders = serializers.IntegerField(read_only=True)
|
required_for_build_orders = serializers.IntegerField(read_only=True)
|
||||||
required_for_sales_orders = serializers.IntegerField(read_only=True)
|
required_for_sales_orders = serializers.IntegerField(read_only=True)
|
||||||
stock_item_count = serializers.IntegerField(read_only=True)
|
stock_item_count = serializers.IntegerField(read_only=True, label=_('Stock Items'))
|
||||||
suppliers = serializers.IntegerField(read_only=True)
|
suppliers = serializers.IntegerField(read_only=True, label=_('Suppliers'))
|
||||||
total_in_stock = serializers.FloatField(read_only=True)
|
total_in_stock = serializers.FloatField(read_only=True, label=_('Total Stock'))
|
||||||
unallocated_stock = serializers.FloatField(read_only=True)
|
external_stock = serializers.FloatField(read_only=True, label=_('External Stock'))
|
||||||
variant_stock = serializers.FloatField(read_only=True)
|
unallocated_stock = serializers.FloatField(
|
||||||
|
read_only=True, label=_('Unallocated Stock')
|
||||||
|
)
|
||||||
|
category_default_location = serializers.IntegerField(read_only=True)
|
||||||
|
variant_stock = serializers.FloatField(read_only=True, label=_('Variant Stock'))
|
||||||
|
|
||||||
minimum_stock = serializers.FloatField()
|
minimum_stock = serializers.FloatField()
|
||||||
|
|
||||||
@ -1387,6 +1423,7 @@ class BomItemSerializer(InvenTree.serializers.InvenTreeModelSerializer):
|
|||||||
'available_stock',
|
'available_stock',
|
||||||
'available_substitute_stock',
|
'available_substitute_stock',
|
||||||
'available_variant_stock',
|
'available_variant_stock',
|
||||||
|
'external_stock',
|
||||||
# Annotated field describing quantity on order
|
# Annotated field describing quantity on order
|
||||||
'on_order',
|
'on_order',
|
||||||
# Annotated field describing quantity being built
|
# Annotated field describing quantity being built
|
||||||
@ -1456,6 +1493,8 @@ class BomItemSerializer(InvenTree.serializers.InvenTreeModelSerializer):
|
|||||||
available_substitute_stock = serializers.FloatField(read_only=True)
|
available_substitute_stock = serializers.FloatField(read_only=True)
|
||||||
available_variant_stock = serializers.FloatField(read_only=True)
|
available_variant_stock = serializers.FloatField(read_only=True)
|
||||||
|
|
||||||
|
external_stock = serializers.FloatField(read_only=True)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def setup_eager_loading(queryset):
|
def setup_eager_loading(queryset):
|
||||||
"""Prefetch against the provided queryset to speed up database access."""
|
"""Prefetch against the provided queryset to speed up database access."""
|
||||||
@ -1534,6 +1573,13 @@ class BomItemSerializer(InvenTree.serializers.InvenTreeModelSerializer):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Calculate 'external_stock'
|
||||||
|
queryset = queryset.annotate(
|
||||||
|
external_stock=part.filters.annotate_total_stock(
|
||||||
|
reference=ref, filter=Q(location__external=True)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
ref = 'substitutes__part__'
|
ref = 'substitutes__part__'
|
||||||
|
|
||||||
# Extract similar information for any 'substitute' parts
|
# Extract similar information for any 'substitute' parts
|
||||||
|
@ -431,6 +431,29 @@ class TestTemplateTest(TestCase):
|
|||||||
|
|
||||||
self.assertEqual(variant.getTestTemplates().count(), n + 1)
|
self.assertEqual(variant.getTestTemplates().count(), n + 1)
|
||||||
|
|
||||||
|
def test_key_generation(self):
|
||||||
|
"""Test the key generation method."""
|
||||||
|
variant = Part.objects.get(pk=10004)
|
||||||
|
|
||||||
|
invalid_names = ['', '+', '+++++++', ' ', '<>$&&&']
|
||||||
|
|
||||||
|
for name in invalid_names:
|
||||||
|
template = PartTestTemplate(part=variant, test_name=name)
|
||||||
|
with self.assertRaises(ValidationError):
|
||||||
|
template.clean()
|
||||||
|
|
||||||
|
valid_names = [
|
||||||
|
'Собранный щит',
|
||||||
|
'!! 123 Собранный щит <><><> $$$$$ !!!',
|
||||||
|
'----hello world----',
|
||||||
|
'Olá Mundo',
|
||||||
|
'我不懂中文',
|
||||||
|
]
|
||||||
|
|
||||||
|
for name in valid_names:
|
||||||
|
template = PartTestTemplate(part=variant, test_name=name)
|
||||||
|
template.clean()
|
||||||
|
|
||||||
|
|
||||||
class PartSettingsTest(InvenTreeTestCase):
|
class PartSettingsTest(InvenTreeTestCase):
|
||||||
"""Tests to ensure that the user-configurable default values work as expected.
|
"""Tests to ensure that the user-configurable default values work as expected.
|
||||||
|
@ -17,31 +17,14 @@ import common.models
|
|||||||
import InvenTree.helpers
|
import InvenTree.helpers
|
||||||
import order.models
|
import order.models
|
||||||
import part.models
|
import part.models
|
||||||
|
import report.models
|
||||||
|
import report.serializers
|
||||||
from InvenTree.api import MetadataView
|
from InvenTree.api import MetadataView
|
||||||
from InvenTree.exceptions import log_error
|
from InvenTree.exceptions import log_error
|
||||||
from InvenTree.filters import InvenTreeSearchFilter
|
from InvenTree.filters import InvenTreeSearchFilter
|
||||||
from InvenTree.mixins import ListCreateAPI, RetrieveAPI, RetrieveUpdateDestroyAPI
|
from InvenTree.mixins import ListCreateAPI, RetrieveAPI, RetrieveUpdateDestroyAPI
|
||||||
from stock.models import StockItem, StockItemAttachment, StockLocation
|
from stock.models import StockItem, StockItemAttachment, StockLocation
|
||||||
|
|
||||||
from .models import (
|
|
||||||
BillOfMaterialsReport,
|
|
||||||
BuildReport,
|
|
||||||
PurchaseOrderReport,
|
|
||||||
ReturnOrderReport,
|
|
||||||
SalesOrderReport,
|
|
||||||
StockLocationReport,
|
|
||||||
TestReport,
|
|
||||||
)
|
|
||||||
from .serializers import (
|
|
||||||
BOMReportSerializer,
|
|
||||||
BuildReportSerializer,
|
|
||||||
PurchaseOrderReportSerializer,
|
|
||||||
ReturnOrderReportSerializer,
|
|
||||||
SalesOrderReportSerializer,
|
|
||||||
StockLocationReportSerializer,
|
|
||||||
TestReportSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ReportListView(ListCreateAPI):
|
class ReportListView(ListCreateAPI):
|
||||||
"""Generic API class for report templates."""
|
"""Generic API class for report templates."""
|
||||||
@ -264,6 +247,11 @@ class ReportPrintMixin:
|
|||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
# Log the exception to the database
|
# Log the exception to the database
|
||||||
|
if InvenTree.helpers.str2bool(
|
||||||
|
common.models.InvenTreeSetting.get_setting(
|
||||||
|
'REPORT_LOG_ERRORS', cache=False
|
||||||
|
)
|
||||||
|
):
|
||||||
log_error(request.path)
|
log_error(request.path)
|
||||||
|
|
||||||
# Re-throw the exception to the client as a DRF exception
|
# Re-throw the exception to the client as a DRF exception
|
||||||
@ -287,8 +275,8 @@ class StockItemTestReportMixin(ReportFilterMixin):
|
|||||||
|
|
||||||
ITEM_MODEL = StockItem
|
ITEM_MODEL = StockItem
|
||||||
ITEM_KEY = 'item'
|
ITEM_KEY = 'item'
|
||||||
queryset = TestReport.objects.all()
|
queryset = report.models.TestReport.objects.all()
|
||||||
serializer_class = TestReportSerializer
|
serializer_class = report.serializers.TestReportSerializer
|
||||||
|
|
||||||
|
|
||||||
class StockItemTestReportList(StockItemTestReportMixin, ReportListView):
|
class StockItemTestReportList(StockItemTestReportMixin, ReportListView):
|
||||||
@ -338,8 +326,8 @@ class BOMReportMixin(ReportFilterMixin):
|
|||||||
ITEM_MODEL = part.models.Part
|
ITEM_MODEL = part.models.Part
|
||||||
ITEM_KEY = 'part'
|
ITEM_KEY = 'part'
|
||||||
|
|
||||||
queryset = BillOfMaterialsReport.objects.all()
|
queryset = report.models.BillOfMaterialsReport.objects.all()
|
||||||
serializer_class = BOMReportSerializer
|
serializer_class = report.serializers.BOMReportSerializer
|
||||||
|
|
||||||
|
|
||||||
class BOMReportList(BOMReportMixin, ReportListView):
|
class BOMReportList(BOMReportMixin, ReportListView):
|
||||||
@ -372,8 +360,8 @@ class BuildReportMixin(ReportFilterMixin):
|
|||||||
ITEM_MODEL = build.models.Build
|
ITEM_MODEL = build.models.Build
|
||||||
ITEM_KEY = 'build'
|
ITEM_KEY = 'build'
|
||||||
|
|
||||||
queryset = BuildReport.objects.all()
|
queryset = report.models.BuildReport.objects.all()
|
||||||
serializer_class = BuildReportSerializer
|
serializer_class = report.serializers.BuildReportSerializer
|
||||||
|
|
||||||
|
|
||||||
class BuildReportList(BuildReportMixin, ReportListView):
|
class BuildReportList(BuildReportMixin, ReportListView):
|
||||||
@ -406,8 +394,8 @@ class PurchaseOrderReportMixin(ReportFilterMixin):
|
|||||||
ITEM_MODEL = order.models.PurchaseOrder
|
ITEM_MODEL = order.models.PurchaseOrder
|
||||||
ITEM_KEY = 'order'
|
ITEM_KEY = 'order'
|
||||||
|
|
||||||
queryset = PurchaseOrderReport.objects.all()
|
queryset = report.models.PurchaseOrderReport.objects.all()
|
||||||
serializer_class = PurchaseOrderReportSerializer
|
serializer_class = report.serializers.PurchaseOrderReportSerializer
|
||||||
|
|
||||||
|
|
||||||
class PurchaseOrderReportList(PurchaseOrderReportMixin, ReportListView):
|
class PurchaseOrderReportList(PurchaseOrderReportMixin, ReportListView):
|
||||||
@ -434,8 +422,8 @@ class SalesOrderReportMixin(ReportFilterMixin):
|
|||||||
ITEM_MODEL = order.models.SalesOrder
|
ITEM_MODEL = order.models.SalesOrder
|
||||||
ITEM_KEY = 'order'
|
ITEM_KEY = 'order'
|
||||||
|
|
||||||
queryset = SalesOrderReport.objects.all()
|
queryset = report.models.SalesOrderReport.objects.all()
|
||||||
serializer_class = SalesOrderReportSerializer
|
serializer_class = report.serializers.SalesOrderReportSerializer
|
||||||
|
|
||||||
|
|
||||||
class SalesOrderReportList(SalesOrderReportMixin, ReportListView):
|
class SalesOrderReportList(SalesOrderReportMixin, ReportListView):
|
||||||
@ -462,8 +450,8 @@ class ReturnOrderReportMixin(ReportFilterMixin):
|
|||||||
ITEM_MODEL = order.models.ReturnOrder
|
ITEM_MODEL = order.models.ReturnOrder
|
||||||
ITEM_KEY = 'order'
|
ITEM_KEY = 'order'
|
||||||
|
|
||||||
queryset = ReturnOrderReport.objects.all()
|
queryset = report.models.ReturnOrderReport.objects.all()
|
||||||
serializer_class = ReturnOrderReportSerializer
|
serializer_class = report.serializers.ReturnOrderReportSerializer
|
||||||
|
|
||||||
|
|
||||||
class ReturnOrderReportList(ReturnOrderReportMixin, ReportListView):
|
class ReturnOrderReportList(ReturnOrderReportMixin, ReportListView):
|
||||||
@ -489,8 +477,8 @@ class StockLocationReportMixin(ReportFilterMixin):
|
|||||||
|
|
||||||
ITEM_MODEL = StockLocation
|
ITEM_MODEL = StockLocation
|
||||||
ITEM_KEY = 'location'
|
ITEM_KEY = 'location'
|
||||||
queryset = StockLocationReport.objects.all()
|
queryset = report.models.StockLocationReport.objects.all()
|
||||||
serializer_class = StockLocationReportSerializer
|
serializer_class = report.serializers.StockLocationReportSerializer
|
||||||
|
|
||||||
|
|
||||||
class StockLocationReportList(StockLocationReportMixin, ReportListView):
|
class StockLocationReportList(StockLocationReportMixin, ReportListView):
|
||||||
@ -511,7 +499,57 @@ class StockLocationReportPrint(StockLocationReportMixin, ReportPrintMixin, Retri
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ReportSnippetList(ListCreateAPI):
|
||||||
|
"""API endpoint for listing ReportSnippet objects."""
|
||||||
|
|
||||||
|
queryset = report.models.ReportSnippet.objects.all()
|
||||||
|
serializer_class = report.serializers.ReportSnippetSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class ReportSnippetDetail(RetrieveUpdateDestroyAPI):
|
||||||
|
"""API endpoint for a single ReportSnippet object."""
|
||||||
|
|
||||||
|
queryset = report.models.ReportSnippet.objects.all()
|
||||||
|
serializer_class = report.serializers.ReportSnippetSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class ReportAssetList(ListCreateAPI):
|
||||||
|
"""API endpoint for listing ReportAsset objects."""
|
||||||
|
|
||||||
|
queryset = report.models.ReportAsset.objects.all()
|
||||||
|
serializer_class = report.serializers.ReportAssetSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class ReportAssetDetail(RetrieveUpdateDestroyAPI):
|
||||||
|
"""API endpoint for a single ReportAsset object."""
|
||||||
|
|
||||||
|
queryset = report.models.ReportAsset.objects.all()
|
||||||
|
serializer_class = report.serializers.ReportAssetSerializer
|
||||||
|
|
||||||
|
|
||||||
report_api_urls = [
|
report_api_urls = [
|
||||||
|
# Report assets
|
||||||
|
path(
|
||||||
|
'asset/',
|
||||||
|
include([
|
||||||
|
path(
|
||||||
|
'<int:pk>/', ReportAssetDetail.as_view(), name='api-report-asset-detail'
|
||||||
|
),
|
||||||
|
path('', ReportAssetList.as_view(), name='api-report-asset-list'),
|
||||||
|
]),
|
||||||
|
),
|
||||||
|
# Report snippets
|
||||||
|
path(
|
||||||
|
'snippet/',
|
||||||
|
include([
|
||||||
|
path(
|
||||||
|
'<int:pk>/',
|
||||||
|
ReportSnippetDetail.as_view(),
|
||||||
|
name='api-report-snippet-detail',
|
||||||
|
),
|
||||||
|
path('', ReportSnippetList.as_view(), name='api-report-snippet-list'),
|
||||||
|
]),
|
||||||
|
),
|
||||||
# Purchase order reports
|
# Purchase order reports
|
||||||
path(
|
path(
|
||||||
'po/',
|
'po/',
|
||||||
@ -528,7 +566,7 @@ report_api_urls = [
|
|||||||
path(
|
path(
|
||||||
'metadata/',
|
'metadata/',
|
||||||
MetadataView.as_view(),
|
MetadataView.as_view(),
|
||||||
{'model': PurchaseOrderReport},
|
{'model': report.models.PurchaseOrderReport},
|
||||||
name='api-po-report-metadata',
|
name='api-po-report-metadata',
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
@ -558,7 +596,7 @@ report_api_urls = [
|
|||||||
path(
|
path(
|
||||||
'metadata/',
|
'metadata/',
|
||||||
MetadataView.as_view(),
|
MetadataView.as_view(),
|
||||||
{'model': SalesOrderReport},
|
{'model': report.models.SalesOrderReport},
|
||||||
name='api-so-report-metadata',
|
name='api-so-report-metadata',
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
@ -586,7 +624,7 @@ report_api_urls = [
|
|||||||
path(
|
path(
|
||||||
'metadata/',
|
'metadata/',
|
||||||
MetadataView.as_view(),
|
MetadataView.as_view(),
|
||||||
{'model': ReturnOrderReport},
|
{'model': report.models.ReturnOrderReport},
|
||||||
name='api-so-report-metadata',
|
name='api-so-report-metadata',
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
@ -617,7 +655,7 @@ report_api_urls = [
|
|||||||
path(
|
path(
|
||||||
'metadata/',
|
'metadata/',
|
||||||
MetadataView.as_view(),
|
MetadataView.as_view(),
|
||||||
{'model': BuildReport},
|
{'model': report.models.BuildReport},
|
||||||
name='api-build-report-metadata',
|
name='api-build-report-metadata',
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
@ -645,7 +683,7 @@ report_api_urls = [
|
|||||||
path(
|
path(
|
||||||
'metadata/',
|
'metadata/',
|
||||||
MetadataView.as_view(),
|
MetadataView.as_view(),
|
||||||
{'model': BillOfMaterialsReport},
|
{'model': report.models.BillOfMaterialsReport},
|
||||||
name='api-bom-report-metadata',
|
name='api-bom-report-metadata',
|
||||||
),
|
),
|
||||||
path('', BOMReportDetail.as_view(), name='api-bom-report-detail'),
|
path('', BOMReportDetail.as_view(), name='api-bom-report-detail'),
|
||||||
@ -671,7 +709,7 @@ report_api_urls = [
|
|||||||
path(
|
path(
|
||||||
'metadata/',
|
'metadata/',
|
||||||
MetadataView.as_view(),
|
MetadataView.as_view(),
|
||||||
{'report': TestReport},
|
{'report': report.models.TestReport},
|
||||||
name='api-stockitem-testreport-metadata',
|
name='api-stockitem-testreport-metadata',
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
@ -705,7 +743,7 @@ report_api_urls = [
|
|||||||
path(
|
path(
|
||||||
'metadata/',
|
'metadata/',
|
||||||
MetadataView.as_view(),
|
MetadataView.as_view(),
|
||||||
{'report': StockLocationReport},
|
{'report': report.models.StockLocationReport},
|
||||||
name='api-stocklocation-report-metadata',
|
name='api-stocklocation-report-metadata',
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
|
@ -7,6 +7,7 @@ import sys
|
|||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
from django.core.validators import FileExtensionValidator
|
from django.core.validators import FileExtensionValidator
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.template import Context, Template
|
from django.template import Context, Template
|
||||||
@ -585,10 +586,7 @@ class ReturnOrderReport(ReportTemplateBase):
|
|||||||
|
|
||||||
def rename_snippet(instance, filename):
|
def rename_snippet(instance, filename):
|
||||||
"""Function to rename a report snippet once uploaded."""
|
"""Function to rename a report snippet once uploaded."""
|
||||||
filename = os.path.basename(filename)
|
path = ReportSnippet.snippet_path(filename)
|
||||||
|
|
||||||
path = os.path.join('report', 'snippets', filename)
|
|
||||||
|
|
||||||
fullpath = settings.MEDIA_ROOT.joinpath(path).resolve()
|
fullpath = settings.MEDIA_ROOT.joinpath(path).resolve()
|
||||||
|
|
||||||
# If the snippet file is the *same* filename as the one being uploaded,
|
# If the snippet file is the *same* filename as the one being uploaded,
|
||||||
@ -610,6 +608,40 @@ class ReportSnippet(models.Model):
|
|||||||
Useful for 'common' template actions, sub-templates, etc
|
Useful for 'common' template actions, sub-templates, etc
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
"""String representation of a ReportSnippet instance."""
|
||||||
|
return f'snippets/{self.filename}'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filename(self):
|
||||||
|
"""Return the filename of the asset."""
|
||||||
|
path = self.snippet.name
|
||||||
|
if path:
|
||||||
|
return os.path.basename(path)
|
||||||
|
else:
|
||||||
|
return '-'
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def snippet_path(filename):
|
||||||
|
"""Return the fully-qualified snippet path for the given filename."""
|
||||||
|
return os.path.join('report', 'snippets', os.path.basename(str(filename)))
|
||||||
|
|
||||||
|
def validate_unique(self, exclude=None):
|
||||||
|
"""Validate that this report asset is unique."""
|
||||||
|
proposed_path = self.snippet_path(self.snippet)
|
||||||
|
|
||||||
|
if (
|
||||||
|
ReportSnippet.objects.filter(snippet=proposed_path)
|
||||||
|
.exclude(pk=self.pk)
|
||||||
|
.count()
|
||||||
|
> 0
|
||||||
|
):
|
||||||
|
raise ValidationError({
|
||||||
|
'snippet': _('Snippet file with this name already exists')
|
||||||
|
})
|
||||||
|
|
||||||
|
return super().validate_unique(exclude)
|
||||||
|
|
||||||
snippet = models.FileField(
|
snippet = models.FileField(
|
||||||
upload_to=rename_snippet,
|
upload_to=rename_snippet,
|
||||||
verbose_name=_('Snippet'),
|
verbose_name=_('Snippet'),
|
||||||
@ -626,19 +658,20 @@ class ReportSnippet(models.Model):
|
|||||||
|
|
||||||
def rename_asset(instance, filename):
|
def rename_asset(instance, filename):
|
||||||
"""Function to rename an asset file when uploaded."""
|
"""Function to rename an asset file when uploaded."""
|
||||||
filename = os.path.basename(filename)
|
path = ReportAsset.asset_path(filename)
|
||||||
|
fullpath = settings.MEDIA_ROOT.joinpath(path).resolve()
|
||||||
path = os.path.join('report', 'assets', filename)
|
|
||||||
|
|
||||||
# If the asset file is the *same* filename as the one being uploaded,
|
# If the asset file is the *same* filename as the one being uploaded,
|
||||||
# delete the original one from the media directory
|
# delete the original one from the media directory
|
||||||
if str(filename) == str(instance.asset):
|
if str(filename) == str(instance.asset):
|
||||||
fullpath = settings.MEDIA_ROOT.joinpath(path).resolve()
|
|
||||||
|
|
||||||
if fullpath.exists():
|
if fullpath.exists():
|
||||||
|
# Check for existing asset file with the same name
|
||||||
logger.info("Deleting existing asset file: '%s'", filename)
|
logger.info("Deleting existing asset file: '%s'", filename)
|
||||||
os.remove(fullpath)
|
os.remove(fullpath)
|
||||||
|
|
||||||
|
# Ensure the cache is deleted for this asset
|
||||||
|
cache.delete(fullpath)
|
||||||
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
@ -652,7 +685,35 @@ class ReportAsset(models.Model):
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""String representation of a ReportAsset instance."""
|
"""String representation of a ReportAsset instance."""
|
||||||
return os.path.basename(self.asset.name)
|
return f'assets/{self.filename}'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filename(self):
|
||||||
|
"""Return the filename of the asset."""
|
||||||
|
path = self.asset.name
|
||||||
|
if path:
|
||||||
|
return os.path.basename(path)
|
||||||
|
else:
|
||||||
|
return '-'
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def asset_path(filename):
|
||||||
|
"""Return the fully-qualified asset path for the given filename."""
|
||||||
|
return os.path.join('report', 'assets', os.path.basename(str(filename)))
|
||||||
|
|
||||||
|
def validate_unique(self, exclude=None):
|
||||||
|
"""Validate that this report asset is unique."""
|
||||||
|
proposed_path = self.asset_path(self.asset)
|
||||||
|
|
||||||
|
if (
|
||||||
|
ReportAsset.objects.filter(asset=proposed_path).exclude(pk=self.pk).count()
|
||||||
|
> 0
|
||||||
|
):
|
||||||
|
raise ValidationError({
|
||||||
|
'asset': _('Asset file with this name already exists')
|
||||||
|
})
|
||||||
|
|
||||||
|
return super().validate_unique(exclude)
|
||||||
|
|
||||||
# Asset file
|
# Asset file
|
||||||
asset = models.FileField(
|
asset = models.FileField(
|
||||||
|
@ -1,20 +1,13 @@
|
|||||||
"""API serializers for the reporting models."""
|
"""API serializers for the reporting models."""
|
||||||
|
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
import report.models
|
||||||
from InvenTree.serializers import (
|
from InvenTree.serializers import (
|
||||||
InvenTreeAttachmentSerializerField,
|
InvenTreeAttachmentSerializerField,
|
||||||
InvenTreeModelSerializer,
|
InvenTreeModelSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .models import (
|
|
||||||
BillOfMaterialsReport,
|
|
||||||
BuildReport,
|
|
||||||
PurchaseOrderReport,
|
|
||||||
ReturnOrderReport,
|
|
||||||
SalesOrderReport,
|
|
||||||
StockLocationReport,
|
|
||||||
TestReport,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ReportSerializerBase(InvenTreeModelSerializer):
|
class ReportSerializerBase(InvenTreeModelSerializer):
|
||||||
"""Base class for report serializer."""
|
"""Base class for report serializer."""
|
||||||
@ -24,7 +17,16 @@ class ReportSerializerBase(InvenTreeModelSerializer):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def report_fields():
|
def report_fields():
|
||||||
"""Generic serializer fields for a report template."""
|
"""Generic serializer fields for a report template."""
|
||||||
return ['pk', 'name', 'description', 'template', 'filters', 'enabled']
|
return [
|
||||||
|
'pk',
|
||||||
|
'name',
|
||||||
|
'description',
|
||||||
|
'template',
|
||||||
|
'filters',
|
||||||
|
'page_size',
|
||||||
|
'landscape',
|
||||||
|
'enabled',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class TestReportSerializer(ReportSerializerBase):
|
class TestReportSerializer(ReportSerializerBase):
|
||||||
@ -33,7 +35,7 @@ class TestReportSerializer(ReportSerializerBase):
|
|||||||
class Meta:
|
class Meta:
|
||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = TestReport
|
model = report.models.TestReport
|
||||||
fields = ReportSerializerBase.report_fields()
|
fields = ReportSerializerBase.report_fields()
|
||||||
|
|
||||||
|
|
||||||
@ -43,7 +45,7 @@ class BuildReportSerializer(ReportSerializerBase):
|
|||||||
class Meta:
|
class Meta:
|
||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = BuildReport
|
model = report.models.BuildReport
|
||||||
fields = ReportSerializerBase.report_fields()
|
fields = ReportSerializerBase.report_fields()
|
||||||
|
|
||||||
|
|
||||||
@ -53,7 +55,7 @@ class BOMReportSerializer(ReportSerializerBase):
|
|||||||
class Meta:
|
class Meta:
|
||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = BillOfMaterialsReport
|
model = report.models.BillOfMaterialsReport
|
||||||
fields = ReportSerializerBase.report_fields()
|
fields = ReportSerializerBase.report_fields()
|
||||||
|
|
||||||
|
|
||||||
@ -63,7 +65,7 @@ class PurchaseOrderReportSerializer(ReportSerializerBase):
|
|||||||
class Meta:
|
class Meta:
|
||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = PurchaseOrderReport
|
model = report.models.PurchaseOrderReport
|
||||||
fields = ReportSerializerBase.report_fields()
|
fields = ReportSerializerBase.report_fields()
|
||||||
|
|
||||||
|
|
||||||
@ -73,7 +75,7 @@ class SalesOrderReportSerializer(ReportSerializerBase):
|
|||||||
class Meta:
|
class Meta:
|
||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = SalesOrderReport
|
model = report.models.SalesOrderReport
|
||||||
fields = ReportSerializerBase.report_fields()
|
fields = ReportSerializerBase.report_fields()
|
||||||
|
|
||||||
|
|
||||||
@ -83,7 +85,7 @@ class ReturnOrderReportSerializer(ReportSerializerBase):
|
|||||||
class Meta:
|
class Meta:
|
||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = ReturnOrderReport
|
model = report.models.ReturnOrderReport
|
||||||
fields = ReportSerializerBase.report_fields()
|
fields = ReportSerializerBase.report_fields()
|
||||||
|
|
||||||
|
|
||||||
@ -93,5 +95,30 @@ class StockLocationReportSerializer(ReportSerializerBase):
|
|||||||
class Meta:
|
class Meta:
|
||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = StockLocationReport
|
model = report.models.StockLocationReport
|
||||||
fields = ReportSerializerBase.report_fields()
|
fields = ReportSerializerBase.report_fields()
|
||||||
|
|
||||||
|
|
||||||
|
class ReportSnippetSerializer(InvenTreeModelSerializer):
|
||||||
|
"""Serializer class for the ReportSnippet model."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
"""Metaclass options."""
|
||||||
|
|
||||||
|
model = report.models.ReportSnippet
|
||||||
|
|
||||||
|
fields = ['pk', 'snippet', 'description']
|
||||||
|
|
||||||
|
snippet = InvenTreeAttachmentSerializerField()
|
||||||
|
|
||||||
|
|
||||||
|
class ReportAssetSerializer(InvenTreeModelSerializer):
|
||||||
|
"""Serializer class for the ReportAsset model."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
"""Meta class options."""
|
||||||
|
|
||||||
|
model = report.models.ReportAsset
|
||||||
|
fields = ['pk', 'asset', 'description']
|
||||||
|
|
||||||
|
asset = InvenTreeAttachmentSerializerField()
|
||||||
|
@ -11,7 +11,7 @@ margin-top: 4cm;
|
|||||||
{% endblock page_margin %}
|
{% endblock page_margin %}
|
||||||
|
|
||||||
{% block bottom_left %}
|
{% block bottom_left %}
|
||||||
content: "v{{ report_revision }} - {{ date.isoformat }}";
|
content: "v{{ report_revision }} - {% format_date date %}";
|
||||||
{% endblock bottom_left %}
|
{% endblock bottom_left %}
|
||||||
|
|
||||||
{% block bottom_center %}
|
{% block bottom_center %}
|
||||||
|
@ -74,7 +74,7 @@ margin-top: 4cm;
|
|||||||
{% endblock style %}
|
{% endblock style %}
|
||||||
|
|
||||||
{% block bottom_left %}
|
{% block bottom_left %}
|
||||||
content: "v{{ report_revision }} - {{ date.isoformat }}";
|
content: "v{{ report_revision }} - {% format_date date %}";
|
||||||
{% endblock bottom_left %}
|
{% endblock bottom_left %}
|
||||||
|
|
||||||
{% block header_content %}
|
{% block header_content %}
|
||||||
@ -119,13 +119,13 @@ content: "v{{ report_revision }} - {{ date.isoformat }}";
|
|||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<th>{% trans "Issued" %}</th>
|
<th>{% trans "Issued" %}</th>
|
||||||
<td>{% render_date build.creation_date %}</td>
|
<td>{% format_date build.creation_date %}</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<th>{% trans "Target Date" %}</th>
|
<th>{% trans "Target Date" %}</th>
|
||||||
<td>
|
<td>
|
||||||
{% if build.target_date %}
|
{% if build.target_date %}
|
||||||
{% render_date build.target_date %}
|
{% format_date build.target_date %}
|
||||||
{% else %}
|
{% else %}
|
||||||
<em>Not specified</em>
|
<em>Not specified</em>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
@ -12,7 +12,7 @@ margin-top: 4cm;
|
|||||||
{% endblock page_margin %}
|
{% endblock page_margin %}
|
||||||
|
|
||||||
{% block bottom_left %}
|
{% block bottom_left %}
|
||||||
content: "v{{ report_revision }} - {{ date.isoformat }}";
|
content: "v{{ report_revision }} - {% format_date date %}";
|
||||||
{% endblock bottom_left %}
|
{% endblock bottom_left %}
|
||||||
|
|
||||||
{% block bottom_center %}
|
{% block bottom_center %}
|
||||||
|
@ -11,7 +11,7 @@ margin-top: 4cm;
|
|||||||
{% endblock page_margin %}
|
{% endblock page_margin %}
|
||||||
|
|
||||||
{% block bottom_left %}
|
{% block bottom_left %}
|
||||||
content: "v{{ report_revision }} - {{ date.isoformat }}";
|
content: "v{{ report_revision }} - {% format_date date %}";
|
||||||
{% endblock bottom_left %}
|
{% endblock bottom_left %}
|
||||||
|
|
||||||
{% block bottom_center %}
|
{% block bottom_center %}
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
{% block bottom_left %}
|
{% block bottom_left %}
|
||||||
content: "{{ date.isoformat }}";
|
content: "{% format_date date %}";
|
||||||
{% endblock bottom_left %}
|
{% endblock bottom_left %}
|
||||||
|
|
||||||
{% block bottom_center %}
|
{% block bottom_center %}
|
||||||
@ -133,7 +133,7 @@ content: "{% trans 'Stock Item Test Report' %}";
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
<td>{{ test_result.value }}</td>
|
<td>{{ test_result.value }}</td>
|
||||||
<td>{{ test_result.user.username }}</td>
|
<td>{{ test_result.user.username }}</td>
|
||||||
<td>{{ test_result.date.date.isoformat }}</td>
|
<td>{% format_date test_result.date.date %}</td>
|
||||||
{% else %}
|
{% else %}
|
||||||
{% if test_template.required %}
|
{% if test_template.required %}
|
||||||
<td colspan='4' class='required-test-not-found'>{% trans "No result (required)" %}</td>
|
<td colspan='4' class='required-test-not-found'>{% trans "No result (required)" %}</td>
|
||||||
|
@ -410,7 +410,10 @@ def format_number(number, **kwargs):
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
value = str(number)
|
# Re-encode, and normalize again
|
||||||
|
value = Decimal(number).normalize()
|
||||||
|
value = format(value, 'f')
|
||||||
|
value = str(value)
|
||||||
|
|
||||||
leading = kwargs.get('leading', None)
|
leading = kwargs.get('leading', None)
|
||||||
|
|
||||||
@ -422,3 +425,37 @@ def format_number(number, **kwargs):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
@register.simple_tag
|
||||||
|
def format_datetime(datetime, timezone=None, format=None):
|
||||||
|
"""Format a datetime object for display.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
datetime: The datetime object to format
|
||||||
|
timezone: The timezone to use for the date (defaults to the server timezone)
|
||||||
|
format: The format string to use (defaults to ISO formatting)
|
||||||
|
"""
|
||||||
|
datetime = InvenTree.helpers.to_local_time(datetime, timezone)
|
||||||
|
|
||||||
|
if format:
|
||||||
|
return datetime.strftime(format)
|
||||||
|
else:
|
||||||
|
return datetime.isoformat()
|
||||||
|
|
||||||
|
|
||||||
|
@register.simple_tag
|
||||||
|
def format_date(date, timezone=None, format=None):
|
||||||
|
"""Format a date object for display.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
date: The date to format
|
||||||
|
timezone: The timezone to use for the date (defaults to the server timezone)
|
||||||
|
format: The format string to use (defaults to ISO formatting)
|
||||||
|
"""
|
||||||
|
date = InvenTree.helpers.to_local_time(date, timezone).date()
|
||||||
|
|
||||||
|
if format:
|
||||||
|
return date.strftime(format)
|
||||||
|
else:
|
||||||
|
return date.isoformat()
|
||||||
|
@ -8,10 +8,12 @@ from pathlib import Path
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.http.response import StreamingHttpResponse
|
from django.http.response import StreamingHttpResponse
|
||||||
from django.test import TestCase
|
from django.test import TestCase, override_settings
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
|
from django.utils import timezone
|
||||||
from django.utils.safestring import SafeString
|
from django.utils.safestring import SafeString
|
||||||
|
|
||||||
|
import pytz
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
import report.models as report_models
|
import report.models as report_models
|
||||||
@ -153,6 +155,37 @@ class ReportTagTest(TestCase):
|
|||||||
self.assertEqual(report_tags.multiply(2.3, 4), 9.2)
|
self.assertEqual(report_tags.multiply(2.3, 4), 9.2)
|
||||||
self.assertEqual(report_tags.divide(100, 5), 20)
|
self.assertEqual(report_tags.divide(100, 5), 20)
|
||||||
|
|
||||||
|
@override_settings(TIME_ZONE='America/New_York')
|
||||||
|
def test_date_tags(self):
|
||||||
|
"""Test for date formatting tags.
|
||||||
|
|
||||||
|
- Source timezone is Australia/Sydney
|
||||||
|
- Server timezone is America/New York
|
||||||
|
"""
|
||||||
|
time = timezone.datetime(
|
||||||
|
year=2024,
|
||||||
|
month=3,
|
||||||
|
day=13,
|
||||||
|
hour=12,
|
||||||
|
minute=30,
|
||||||
|
second=0,
|
||||||
|
tzinfo=pytz.timezone('Australia/Sydney'),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Format a set of tests: timezone, format, expected
|
||||||
|
tests = [
|
||||||
|
(None, None, '2024-03-12T22:25:00-04:00'),
|
||||||
|
(None, '%d-%m-%y', '12-03-24'),
|
||||||
|
('UTC', None, '2024-03-13T02:25:00+00:00'),
|
||||||
|
('UTC', '%d-%B-%Y', '13-March-2024'),
|
||||||
|
('Europe/Amsterdam', None, '2024-03-13T03:25:00+01:00'),
|
||||||
|
('Europe/Amsterdam', '%y-%m-%d %H:%M', '24-03-13 03:25'),
|
||||||
|
]
|
||||||
|
|
||||||
|
for tz, fmt, expected in tests:
|
||||||
|
result = report_tags.format_datetime(time, tz, fmt)
|
||||||
|
self.assertEqual(result, expected)
|
||||||
|
|
||||||
|
|
||||||
class BarcodeTagTest(TestCase):
|
class BarcodeTagTest(TestCase):
|
||||||
"""Unit tests for the barcode template tags."""
|
"""Unit tests for the barcode template tags."""
|
||||||
|
@ -6,7 +6,7 @@ from decimal import Decimal
|
|||||||
|
|
||||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
from django.db.models import BooleanField, Case, Count, Q, Value, When
|
from django.db.models import BooleanField, Case, Count, Prefetch, Q, Value, When
|
||||||
from django.db.models.functions import Coalesce
|
from django.db.models.functions import Coalesce
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
@ -20,6 +20,7 @@ import company.models
|
|||||||
import InvenTree.helpers
|
import InvenTree.helpers
|
||||||
import InvenTree.serializers
|
import InvenTree.serializers
|
||||||
import InvenTree.status_codes
|
import InvenTree.status_codes
|
||||||
|
import part.filters as part_filters
|
||||||
import part.models as part_models
|
import part.models as part_models
|
||||||
import stock.filters
|
import stock.filters
|
||||||
from company.serializers import SupplierPartSerializer
|
from company.serializers import SupplierPartSerializer
|
||||||
@ -289,7 +290,14 @@ class StockItemSerializer(InvenTree.serializers.InvenTreeTagModelSerializer):
|
|||||||
'location',
|
'location',
|
||||||
'sales_order',
|
'sales_order',
|
||||||
'purchase_order',
|
'purchase_order',
|
||||||
|
Prefetch(
|
||||||
'part',
|
'part',
|
||||||
|
queryset=part_models.Part.objects.annotate(
|
||||||
|
category_default_location=part_filters.annotate_default_location(
|
||||||
|
'category__'
|
||||||
|
)
|
||||||
|
).prefetch_related(None),
|
||||||
|
),
|
||||||
'part__category',
|
'part__category',
|
||||||
'part__pricing_data',
|
'part__pricing_data',
|
||||||
'supplier_part',
|
'supplier_part',
|
||||||
@ -886,6 +894,7 @@ class LocationSerializer(InvenTree.serializers.InvenTreeTagModelSerializer):
|
|||||||
'pathstring',
|
'pathstring',
|
||||||
'path',
|
'path',
|
||||||
'items',
|
'items',
|
||||||
|
'sublocations',
|
||||||
'owner',
|
'owner',
|
||||||
'icon',
|
'icon',
|
||||||
'custom_icon',
|
'custom_icon',
|
||||||
@ -911,13 +920,18 @@ class LocationSerializer(InvenTree.serializers.InvenTreeTagModelSerializer):
|
|||||||
def annotate_queryset(queryset):
|
def annotate_queryset(queryset):
|
||||||
"""Annotate extra information to the queryset."""
|
"""Annotate extra information to the queryset."""
|
||||||
# Annotate the number of stock items which exist in this category (including subcategories)
|
# Annotate the number of stock items which exist in this category (including subcategories)
|
||||||
queryset = queryset.annotate(items=stock.filters.annotate_location_items())
|
queryset = queryset.annotate(
|
||||||
|
items=stock.filters.annotate_location_items(),
|
||||||
|
sublocations=stock.filters.annotate_sub_locations(),
|
||||||
|
)
|
||||||
|
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
url = serializers.CharField(source='get_absolute_url', read_only=True)
|
url = serializers.CharField(source='get_absolute_url', read_only=True)
|
||||||
|
|
||||||
items = serializers.IntegerField(read_only=True)
|
items = serializers.IntegerField(read_only=True, label=_('Stock Items'))
|
||||||
|
|
||||||
|
sublocations = serializers.IntegerField(read_only=True, label=_('Sublocations'))
|
||||||
|
|
||||||
level = serializers.IntegerField(read_only=True)
|
level = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
{% include "InvenTree/settings/setting.html" with key="REPORT_ENABLE" icon="fa-file-pdf" %}
|
{% include "InvenTree/settings/setting.html" with key="REPORT_ENABLE" icon="fa-file-pdf" %}
|
||||||
{% include "InvenTree/settings/setting.html" with key="REPORT_DEFAULT_PAGE_SIZE" icon="fa-print" %}
|
{% include "InvenTree/settings/setting.html" with key="REPORT_DEFAULT_PAGE_SIZE" icon="fa-print" %}
|
||||||
{% include "InvenTree/settings/setting.html" with key="REPORT_DEBUG_MODE" icon="fa-laptop-code" %}
|
{% include "InvenTree/settings/setting.html" with key="REPORT_DEBUG_MODE" icon="fa-laptop-code" %}
|
||||||
|
{% include "InvenTree/settings/setting.html" with key="REPORT_LOG_ERRORS" icon="fa-exclamation-circle" %}
|
||||||
{% include "InvenTree/settings/setting.html" with key="REPORT_ENABLE_TEST_REPORT" icon="fa-vial" %}
|
{% include "InvenTree/settings/setting.html" with key="REPORT_ENABLE_TEST_REPORT" icon="fa-vial" %}
|
||||||
{% include "InvenTree/settings/setting.html" with key="REPORT_ATTACH_TEST_REPORT" icon="fa-file-upload" %}
|
{% include "InvenTree/settings/setting.html" with key="REPORT_ATTACH_TEST_REPORT" icon="fa-file-upload" %}
|
||||||
</tbody>
|
</tbody>
|
||||||
|
@ -1172,12 +1172,18 @@ function loadBomTable(table, options={}) {
|
|||||||
|
|
||||||
var available_stock = availableQuantity(row);
|
var available_stock = availableQuantity(row);
|
||||||
|
|
||||||
|
var external_stock = row.external_stock ?? 0;
|
||||||
|
|
||||||
var text = renderLink(`${available_stock}`, url);
|
var text = renderLink(`${available_stock}`, url);
|
||||||
|
|
||||||
if (row.sub_part_detail && row.sub_part_detail.units) {
|
if (row.sub_part_detail && row.sub_part_detail.units) {
|
||||||
text += ` <small>${row.sub_part_detail.units}</small>`;
|
text += ` <small>${row.sub_part_detail.units}</small>`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (external_stock > 0) {
|
||||||
|
text += makeIconBadge('fa-sitemap', `{% trans "External stock" %}: ${external_stock}`);
|
||||||
|
}
|
||||||
|
|
||||||
if (available_stock <= 0) {
|
if (available_stock <= 0) {
|
||||||
text += makeIconBadge('fa-times-circle icon-red', '{% trans "No Stock Available" %}');
|
text += makeIconBadge('fa-times-circle icon-red', '{% trans "No Stock Available" %}');
|
||||||
} else {
|
} else {
|
||||||
|
@ -2618,6 +2618,10 @@ function loadBuildLineTable(table, build_id, options={}) {
|
|||||||
icons += makeIconBadge('fa-tools icon-blue', `{% trans "In Production" %}: ${formatDecimal(row.in_production)}`);
|
icons += makeIconBadge('fa-tools icon-blue', `{% trans "In Production" %}: ${formatDecimal(row.in_production)}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (row.external_stock > 0) {
|
||||||
|
icons += makeIconBadge('fa-sitemap', `{% trans "External stock" %}: ${row.external_stock}`);
|
||||||
|
}
|
||||||
|
|
||||||
return renderLink(text, url) + icons;
|
return renderLink(text, url) + icons;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -2730,6 +2734,7 @@ function loadBuildLineTable(table, build_id, options={}) {
|
|||||||
|
|
||||||
allocateStockToBuild(build_id, [row], {
|
allocateStockToBuild(build_id, [row], {
|
||||||
output: options.output,
|
output: options.output,
|
||||||
|
source_location: options.location,
|
||||||
success: function() {
|
success: function() {
|
||||||
$(table).bootstrapTable('refresh');
|
$(table).bootstrapTable('refresh');
|
||||||
}
|
}
|
||||||
|
@ -2804,6 +2804,15 @@ function loadPartCategoryTable(table, options) {
|
|||||||
title: '{% trans "Parts" %}',
|
title: '{% trans "Parts" %}',
|
||||||
switchable: true,
|
switchable: true,
|
||||||
sortable: true,
|
sortable: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
field: 'structural',
|
||||||
|
title: '{% trans "Structural" %}',
|
||||||
|
switchable: true,
|
||||||
|
sortable: true,
|
||||||
|
formatter: function(value) {
|
||||||
|
return yesNoLabel(value);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
|
@ -717,7 +717,10 @@ def check_user_role(user, role, permission):
|
|||||||
# First, check the cache
|
# First, check the cache
|
||||||
key = f'role_{user}_{role}_{permission}'
|
key = f'role_{user}_{role}_{permission}'
|
||||||
|
|
||||||
|
try:
|
||||||
result = cache.get(key)
|
result = cache.get(key)
|
||||||
|
except Exception:
|
||||||
|
result = None
|
||||||
|
|
||||||
if result is not None:
|
if result is not None:
|
||||||
return result
|
return result
|
||||||
@ -745,7 +748,11 @@ def check_user_role(user, role, permission):
|
|||||||
break
|
break
|
||||||
|
|
||||||
# Save result to cache
|
# Save result to cache
|
||||||
|
try:
|
||||||
cache.set(key, result, timeout=3600)
|
cache.set(key, result, timeout=3600)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@ -183,7 +183,7 @@ function create_initscripts() {
|
|||||||
${INIT_CMD} stop nginx
|
${INIT_CMD} stop nginx
|
||||||
echo "# Setting up nginx to ${SETUP_NGINX_FILE}"
|
echo "# Setting up nginx to ${SETUP_NGINX_FILE}"
|
||||||
# Always use the latest nginx config; important if new headers are added / needed for security
|
# Always use the latest nginx config; important if new headers are added / needed for security
|
||||||
cp ${APP_HOME}/docker/production/nginx.prod.conf ${SETUP_NGINX_FILE}
|
cp ${APP_HOME}/contrib/packager.io/nginx.prod.conf ${SETUP_NGINX_FILE}
|
||||||
sed -i s/inventree-server:8000/localhost:6000/g ${SETUP_NGINX_FILE}
|
sed -i s/inventree-server:8000/localhost:6000/g ${SETUP_NGINX_FILE}
|
||||||
sed -i s=var/www=opt/inventree/data=g ${SETUP_NGINX_FILE}
|
sed -i s=var/www=opt/inventree/data=g ${SETUP_NGINX_FILE}
|
||||||
# Start nginx
|
# Start nginx
|
||||||
|
67
contrib/packager.io/nginx.prod.conf
Normal file
67
contrib/packager.io/nginx.prod.conf
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
|
||||||
|
server {
|
||||||
|
|
||||||
|
# Listen for connection on (internal) port 80
|
||||||
|
# If you are exposing this server to the internet, you should use HTTPS!
|
||||||
|
# In which case, you should also set up a redirect from HTTP to HTTPS, and listen on port 443
|
||||||
|
# See the Nginx documentation for more details
|
||||||
|
listen 80;
|
||||||
|
|
||||||
|
real_ip_header proxy_protocol;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
|
||||||
|
proxy_set_header Host $http_host;
|
||||||
|
proxy_set_header X-Forwarded-By $server_addr:$server_port;
|
||||||
|
proxy_set_header X-Forwarded-For $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header CLIENT_IP $remote_addr;
|
||||||
|
|
||||||
|
proxy_pass_request_headers on;
|
||||||
|
|
||||||
|
proxy_redirect off;
|
||||||
|
|
||||||
|
client_max_body_size 100M;
|
||||||
|
|
||||||
|
proxy_buffering off;
|
||||||
|
proxy_request_buffering off;
|
||||||
|
|
||||||
|
# Do not touch this unless you have a specific reason - this and the docker-compose need to match
|
||||||
|
proxy_pass http://inventree-server:8000;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Redirect any requests for static files
|
||||||
|
location /static/ {
|
||||||
|
alias /var/www/static/;
|
||||||
|
autoindex on;
|
||||||
|
|
||||||
|
# Caching settings
|
||||||
|
expires 30d;
|
||||||
|
add_header Pragma public;
|
||||||
|
add_header Cache-Control "public";
|
||||||
|
}
|
||||||
|
|
||||||
|
# Redirect any requests for media files
|
||||||
|
location /media/ {
|
||||||
|
alias /var/www/media/;
|
||||||
|
|
||||||
|
# Media files require user authentication
|
||||||
|
auth_request /auth;
|
||||||
|
|
||||||
|
# Content header to force download
|
||||||
|
add_header Content-disposition "attachment";
|
||||||
|
}
|
||||||
|
|
||||||
|
# Use the 'user' API endpoint for auth
|
||||||
|
location /auth {
|
||||||
|
internal;
|
||||||
|
|
||||||
|
proxy_pass http://inventree-server:8000/auth/;
|
||||||
|
|
||||||
|
proxy_pass_request_body off;
|
||||||
|
proxy_set_header Content-Length "";
|
||||||
|
proxy_set_header X-Original-URI $request_uri;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -43,7 +43,7 @@ INVENTREE_AUTO_UPDATE=True
|
|||||||
# Image tag that should be used
|
# Image tag that should be used
|
||||||
INVENTREE_TAG=stable
|
INVENTREE_TAG=stable
|
||||||
|
|
||||||
# Site URL - update this to match your host (and update the Caddyfile too!)
|
# Site URL - update this to match your host
|
||||||
INVENTREE_SITE_URL="http://inventree.localhost"
|
INVENTREE_SITE_URL="http://inventree.localhost"
|
||||||
|
|
||||||
COMPOSE_PROJECT_NAME=inventree
|
COMPOSE_PROJECT_NAME=inventree
|
||||||
|
@ -5,7 +5,20 @@
|
|||||||
|
|
||||||
(log_common) {
|
(log_common) {
|
||||||
log {
|
log {
|
||||||
output file /var/log/caddy/{args.0}.access.log
|
output file /var/log/caddy/{args[0]}.access.log
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
(cors-headers) {
|
||||||
|
header Allow GET,HEAD,OPTIONS
|
||||||
|
header Access-Control-Allow-Origin *
|
||||||
|
header Access-Control-Allow-Methods GET,HEAD,OPTIONS
|
||||||
|
header Access-Control-Allow-Headers Authorization,Content-Type,User-Agent
|
||||||
|
|
||||||
|
@cors_preflight{args[0]} method OPTIONS
|
||||||
|
|
||||||
|
handle @cors_preflight{args[0]} {
|
||||||
|
respond "" 204
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -20,16 +33,21 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
handle_path /static/* {
|
handle_path /static/* {
|
||||||
|
import cors-headers static
|
||||||
|
|
||||||
root * /var/www/static
|
root * /var/www/static
|
||||||
file_server
|
file_server
|
||||||
}
|
}
|
||||||
|
|
||||||
handle_path /media/* {
|
handle_path /media/* {
|
||||||
|
import cors-headers media
|
||||||
|
|
||||||
|
root * /var/www/media
|
||||||
|
file_server
|
||||||
|
|
||||||
forward_auth {$INVENTREE_SERVER:"http://inventree-server:8000"} {
|
forward_auth {$INVENTREE_SERVER:"http://inventree-server:8000"} {
|
||||||
uri /auth/
|
uri /auth/
|
||||||
}
|
}
|
||||||
root * /var/www/media
|
|
||||||
file_server
|
|
||||||
}
|
}
|
||||||
|
|
||||||
reverse_proxy {$INVENTREE_SERVER:"http://inventree-server:8000"}
|
reverse_proxy {$INVENTREE_SERVER:"http://inventree-server:8000"}
|
||||||
|
@ -4,7 +4,7 @@ version: "3.8"
|
|||||||
# - PostgreSQL as the database backend
|
# - PostgreSQL as the database backend
|
||||||
# - gunicorn as the InvenTree web server
|
# - gunicorn as the InvenTree web server
|
||||||
# - django-q as the InvenTree background worker process
|
# - django-q as the InvenTree background worker process
|
||||||
# - nginx as a reverse proxy
|
# - Caddy as a reverse proxy
|
||||||
# - redis as the cache manager (optional, disabled by default)
|
# - redis as the cache manager (optional, disabled by default)
|
||||||
|
|
||||||
# ---------------------
|
# ---------------------
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
# Note that for postgreslql, we use the 13 version, which matches the version used in the InvenTree docker image
|
# Note that for postgreslql, we use the 13 version, which matches the version used in the InvenTree docker image
|
||||||
|
|
||||||
apk add gcc g++ musl-dev openssl-dev libffi-dev cargo python3-dev openldap-dev \
|
apk add gcc g++ musl-dev openssl-dev libffi-dev cargo python3-dev openldap-dev \
|
||||||
|
libstdc++ build-base linux-headers py3-grpcio \
|
||||||
jpeg-dev openjpeg-dev libwebp-dev zlib-dev \
|
jpeg-dev openjpeg-dev libwebp-dev zlib-dev \
|
||||||
sqlite sqlite-dev \
|
sqlite sqlite-dev \
|
||||||
mariadb-connector-c-dev mariadb-client mariadb-dev \
|
mariadb-connector-c-dev mariadb-client mariadb-dev \
|
||||||
|
3
docs/.gitignore
vendored
3
docs/.gitignore
vendored
@ -10,6 +10,9 @@ env/
|
|||||||
_build/
|
_build/
|
||||||
site/
|
site/
|
||||||
|
|
||||||
|
# Generated API schema files
|
||||||
|
docs/api/schema/*.yml
|
||||||
|
|
||||||
# Temp files
|
# Temp files
|
||||||
releases.json
|
releases.json
|
||||||
versions.json
|
versions.json
|
||||||
|
@ -17,6 +17,17 @@ The API is self-documenting, and the documentation is provided alongside any Inv
|
|||||||
{% include 'img.html' %}
|
{% include 'img.html' %}
|
||||||
{% endwith %}
|
{% endwith %}
|
||||||
|
|
||||||
|
### Schema Description
|
||||||
|
The API schema is also documented in the [API Schema](./schema.md) page.
|
||||||
|
|
||||||
|
### Generating Schema File
|
||||||
|
|
||||||
|
If you want to generate the API schema file yourself (for example to use with an external client, use the `invoke schema` command. Run with the `-help` command to see available options.
|
||||||
|
|
||||||
|
```
|
||||||
|
invoke schema -help
|
||||||
|
```
|
||||||
|
|
||||||
## Authentication
|
## Authentication
|
||||||
|
|
||||||
Users must be authenticated to gain access to the InvenTree API. The API accepts either basic username:password authentication, or token authentication. Token authentication is recommended as it provides much faster API access.
|
Users must be authenticated to gain access to the InvenTree API. The API accepts either basic username:password authentication, or token authentication. Token authentication is recommended as it provides much faster API access.
|
||||||
|
40
docs/docs/api/schema.md
Normal file
40
docs/docs/api/schema.md
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
---
|
||||||
|
title: InvenTree API Schema
|
||||||
|
---
|
||||||
|
|
||||||
|
The InvenTree API is implemented using the [Django REST framework](https://www.django-rest-framework.org).
|
||||||
|
The API schema as documented below is generated using the [drf-spectactular](https://github.com/tfranzel/drf-spectacular/) extension.
|
||||||
|
|
||||||
|
## API Version
|
||||||
|
|
||||||
|
This documentation is for API version: `171`
|
||||||
|
|
||||||
|
!!! tip "API Schema History"
|
||||||
|
We track API schema changes, and provide a snapshot of each API schema version in the [API schema repository](https://github.com/inventree/schema/).
|
||||||
|
|
||||||
|
## API Schema File
|
||||||
|
|
||||||
|
The API schema file is available for download, and can be used for generating client libraries, or for testing API endpoints.
|
||||||
|
|
||||||
|
## API Schema Documentation
|
||||||
|
|
||||||
|
API schema documentation is split into the following categories:
|
||||||
|
|
||||||
|
| Category | Description |
|
||||||
|
| --- | --- |
|
||||||
|
| [Authorization and Authentication](./schema/auth.md) | Authorization and Authentication |
|
||||||
|
| [Background Task Management](./schema/background-task.md) | Background Task Management |
|
||||||
|
| [Barcode Scanning](./schema/barcode.md) | Barcode Scanning |
|
||||||
|
| [Bill of Materials](./schema/bom.md) | Bill of Materials |
|
||||||
|
| [Build Order Management](./schema/build.md) | Build Order Management |
|
||||||
|
| [Company Management](./schema/company.md) | Company Management |
|
||||||
|
| [Label Printing](./schema/label.md) | Label Printing |
|
||||||
|
| [External Machine Management](./schema/machine.md) | External Machine Management |
|
||||||
|
| [External Order Management](./schema/order.md) | External Order Management |
|
||||||
|
| [Parts and Part Categories](./schema/part.md) | Parts and Part Categories |
|
||||||
|
| [Plugin Functionality](./schema/plugins.md) | Plugin Functionality |
|
||||||
|
| [Report Generation](./schema/report.md) | Report Generation |
|
||||||
|
| [Settings Management](./schema/settings.md) | Settings Management |
|
||||||
|
| [Stock and Stock Locations](./schema/stock.md) | Stock and Stock Locations |
|
||||||
|
| [User Management](./schema/user.md) | User Management |
|
||||||
|
| [General](./schema/general.md) | General API endpoints |
|
7
docs/docs/api/schema/auth.md
Normal file
7
docs/docs/api/schema/auth.md
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
title: Authorization and Authentication
|
||||||
|
---
|
||||||
|
|
||||||
|
The *Authorization and Authentication* section of the InvenTree API schema is documented below.
|
||||||
|
|
||||||
|
[OAD(./docs/docs/api/schema/auth.yml)]
|
7
docs/docs/api/schema/background-task.md
Normal file
7
docs/docs/api/schema/background-task.md
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
title: Background Task Management
|
||||||
|
---
|
||||||
|
|
||||||
|
The *Background Task Management* section of the InvenTree API schema is documented below.
|
||||||
|
|
||||||
|
[OAD(./docs/docs/api/schema/background-task.yml)]
|
7
docs/docs/api/schema/barcode.md
Normal file
7
docs/docs/api/schema/barcode.md
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
title: Barcode Scanning
|
||||||
|
---
|
||||||
|
|
||||||
|
The *Barcode Scanning* section of the InvenTree API schema is documented below.
|
||||||
|
|
||||||
|
[OAD(./docs/docs/api/schema/barcode.yml)]
|
7
docs/docs/api/schema/bom.md
Normal file
7
docs/docs/api/schema/bom.md
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
title: Bill of Materials
|
||||||
|
---
|
||||||
|
|
||||||
|
The *Bill of Materials* section of the InvenTree API schema is documented below.
|
||||||
|
|
||||||
|
[OAD(./docs/docs/api/schema/bom.yml)]
|
7
docs/docs/api/schema/build.md
Normal file
7
docs/docs/api/schema/build.md
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
title: Build Order Management
|
||||||
|
---
|
||||||
|
|
||||||
|
The *Build Order Management* section of the InvenTree API schema is documented below.
|
||||||
|
|
||||||
|
[OAD(./docs/docs/api/schema/build.yml)]
|
7
docs/docs/api/schema/company.md
Normal file
7
docs/docs/api/schema/company.md
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
title: Company Management
|
||||||
|
---
|
||||||
|
|
||||||
|
The *Company Management* section of the InvenTree API schema is documented below.
|
||||||
|
|
||||||
|
[OAD(./docs/docs/api/schema/company.yml)]
|
7
docs/docs/api/schema/general.md
Normal file
7
docs/docs/api/schema/general.md
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
title: General API Endpoints
|
||||||
|
---
|
||||||
|
|
||||||
|
The *General API Endpoints* section of the InvenTree API schema is documented below.
|
||||||
|
|
||||||
|
[OAD(./docs/docs/api/schema/general.yml)]
|
7
docs/docs/api/schema/label.md
Normal file
7
docs/docs/api/schema/label.md
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
title: Label Printing
|
||||||
|
---
|
||||||
|
|
||||||
|
The *Label Printing* section of the InvenTree API schema is documented below.
|
||||||
|
|
||||||
|
[OAD(./docs/docs/api/schema/label.yml)]
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user