Merge branch 'master' into add-changelog

This commit is contained in:
Matthias Mair 2024-08-09 16:21:44 +02:00 committed by GitHub
commit ad4840cf73
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
303 changed files with 153543 additions and 125580 deletions

View File

@ -1,8 +1,6 @@
version: "3"
services: services:
db: db:
image: postgres:14 image: postgres:13
restart: unless-stopped restart: unless-stopped
expose: expose:
- 5432/tcp - 5432/tcp

1
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@ -0,0 +1 @@
blank_issues_enabled: false

View File

@ -18,7 +18,7 @@ updates:
directories: directories:
- /contrib/container - /contrib/container
- /docs - /docs
- /.github - /contrib/dev_reqs
- /src/backend - /src/backend
schedule: schedule:
interval: weekly interval: weekly

View File

@ -18,8 +18,11 @@ from pathlib import Path
import requests import requests
REPO = os.getenv('GITHUB_REPOSITORY', 'inventree/inventree')
GITHUB_API_URL = os.getenv('GITHUB_API_URL', 'https://api.github.com')
def get_existing_release_tags():
def get_existing_release_tags(include_prerelease=True):
"""Request information on existing releases via the GitHub API.""" """Request information on existing releases via the GitHub API."""
# Check for github token # Check for github token
token = os.getenv('GITHUB_TOKEN', None) token = os.getenv('GITHUB_TOKEN', None)
@ -28,9 +31,7 @@ def get_existing_release_tags():
if token: if token:
headers = {'Authorization': f'Bearer {token}'} headers = {'Authorization': f'Bearer {token}'}
response = requests.get( response = requests.get(f'{GITHUB_API_URL}/repos/{REPO}/releases', headers=headers)
'https://api.github.com/repos/inventree/inventree/releases', headers=headers
)
if response.status_code != 200: if response.status_code != 200:
raise ValueError( raise ValueError(
@ -50,6 +51,9 @@ def get_existing_release_tags():
print(f"Version '{tag}' did not match expected pattern") print(f"Version '{tag}' did not match expected pattern")
continue continue
if not include_prerelease and release['prerelease']:
continue
tags.append([int(x) for x in match.groups()]) tags.append([int(x) for x in match.groups()])
return tags return tags
@ -73,7 +77,7 @@ def check_version_number(version_string, allow_duplicate=False):
version_tuple = [int(x) for x in match.groups()] version_tuple = [int(x) for x in match.groups()]
# Look through the existing releases # Look through the existing releases
existing = get_existing_release_tags() existing = get_existing_release_tags(include_prerelease=False)
# Assume that this is the highest release, unless told otherwise # Assume that this is the highest release, unless told otherwise
highest_release = True highest_release = True
@ -90,6 +94,11 @@ def check_version_number(version_string, allow_duplicate=False):
if __name__ == '__main__': if __name__ == '__main__':
# Ensure that we are running in GH Actions
if os.environ.get('GITHUB_ACTIONS', '') != 'true':
print('This script is intended to be run within a GitHub Action!')
sys.exit(1)
if 'only_version' in sys.argv: if 'only_version' in sys.argv:
here = Path(__file__).parent.absolute() here = Path(__file__).parent.absolute()
version_file = here.joinpath( version_file = here.joinpath(
@ -124,14 +133,13 @@ if __name__ == '__main__':
env_file.write(f'tag={tag}\n') env_file.write(f'tag={tag}\n')
env_file.write(f'new_tag={new_tag}\n') env_file.write(f'new_tag={new_tag}\n')
exit(0) exit(0)
# GITHUB_REF_TYPE may be either 'branch' or 'tag' # GITHUB_REF_TYPE may be either 'branch' or 'tag'
GITHUB_REF_TYPE = os.environ['GITHUB_REF_TYPE'] GITHUB_REF_TYPE = os.environ['GITHUB_REF_TYPE']
# GITHUB_REF may be either 'refs/heads/<branch>' or 'refs/heads/<tag>' # GITHUB_REF may be either 'refs/heads/<branch>' or 'refs/heads/<tag>'
GITHUB_REF = os.environ['GITHUB_REF'] GITHUB_REF = os.environ['GITHUB_REF']
GITHUB_REF_NAME = os.environ['GITHUB_REF_NAME'] GITHUB_REF_NAME = os.environ['GITHUB_REF_NAME']
GITHUB_BASE_REF = os.environ['GITHUB_BASE_REF'] GITHUB_BASE_REF = os.environ['GITHUB_BASE_REF']
# Print out version information, makes debugging actions *much* easier! # Print out version information, makes debugging actions *much* easier!
@ -215,7 +223,7 @@ if __name__ == '__main__':
# Ref: https://getridbug.com/python/how-to-set-environment-variables-in-github-actions-using-python/ # Ref: https://getridbug.com/python/how-to-set-environment-variables-in-github-actions-using-python/
with open(os.getenv('GITHUB_ENV'), 'a') as env_file: with open(os.getenv('GITHUB_ENV'), 'a') as env_file:
# Construct tag string # Construct tag string
tags = ','.join([f'inventree/inventree:{tag}' for tag in docker_tags]) tags = ','.join([f'{REPO.lower()}:{tag}' for tag in docker_tags])
env_file.write(f'docker_tags={tags}\n') env_file.write(f'docker_tags={tags}\n')

View File

@ -124,10 +124,10 @@ jobs:
rm -rf InvenTree/_testfolder rm -rf InvenTree/_testfolder
- name: Set up QEMU - name: Set up QEMU
if: github.event_name != 'pull_request' if: github.event_name != 'pull_request'
uses: docker/setup-qemu-action@5927c834f5b4fdf503fca6f4c7eccda82949e1ee # pin@v3.1.0 uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # pin@v3.2.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
if: github.event_name != 'pull_request' if: github.event_name != 'pull_request'
uses: docker/setup-buildx-action@4fd812986e6c8c2a69e18311145f9371337f27d4 # pin@v3.4.0 uses: docker/setup-buildx-action@aa33708b10e362ff993539393ff100fa93ed6a27 # pin@v3.5.0
- name: Set up cosign - name: Set up cosign
if: github.event_name != 'pull_request' if: github.event_name != 'pull_request'
uses: sigstore/cosign-installer@59acb6260d9c0ba8f4a2f9d9b48431a222b68e20 # pin@v3.5.0 uses: sigstore/cosign-installer@59acb6260d9c0ba8f4a2f9d9b48431a222b68e20 # pin@v3.5.0
@ -141,14 +141,14 @@ jobs:
fi fi
- name: Login to Dockerhub - name: Login to Dockerhub
if: github.event_name != 'pull_request' && steps.docker_login.outputs.skip_dockerhub_login != 'true' if: github.event_name != 'pull_request' && steps.docker_login.outputs.skip_dockerhub_login != 'true'
uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446 # pin@v3.2.0 uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # pin@v3.3.0
with: with:
username: ${{ secrets.DOCKER_USERNAME }} username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }} password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log into registry ghcr.io - name: Log into registry ghcr.io
if: github.event_name != 'pull_request' if: github.event_name != 'pull_request'
uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446 # pin@v3.2.0 uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # pin@v3.3.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.actor }} username: ${{ github.actor }}
@ -166,7 +166,7 @@ jobs:
- name: Push Docker Images - name: Push Docker Images
id: push-docker id: push-docker
if: github.event_name != 'pull_request' if: github.event_name != 'pull_request'
uses: docker/build-push-action@a254f8ca60a858f3136a2f1f23a60969f2c402dd # pin@v6.4.0 uses: docker/build-push-action@5176d81f87c23d6fc96624dfdbcd9f3830bbe445 # pin@v6.5.0
with: with:
context: . context: .
file: ./contrib/container/Dockerfile file: ./contrib/container/Dockerfile

View File

@ -177,7 +177,7 @@ jobs:
echo "Downloaded api.yaml" echo "Downloaded api.yaml"
- name: Running OpenAPI Spec diff action - name: Running OpenAPI Spec diff action
id: breaking_changes id: breaking_changes
uses: oasdiff/oasdiff-action/diff@main uses: oasdiff/oasdiff-action/diff@205ce7e2c5ae1511e720cbd307cae79fd7d4a909 # pin@main
with: with:
base: 'api.yaml' base: 'api.yaml'
revision: 'src/backend/InvenTree/schema.yml' revision: 'src/backend/InvenTree/schema.yml'

View File

@ -1,13 +1,16 @@
# Runs on releases # Runs on releases
name: Publish release notes name: Publish release
on: on:
release: release:
types: [published] types: [published]
permissions:
contents: read
jobs: jobs:
stable: stable:
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: Write release to stable branch
permissions: permissions:
contents: write contents: write
pull-requests: write pull-requests: write
@ -28,11 +31,13 @@ jobs:
branch: stable branch: stable
force: true force: true
publish-build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: Build and attest frontend
permissions: permissions:
id-token: write
contents: write contents: write
pull-requests: write attestations: write
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7 - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Environment Setup - name: Environment Setup
@ -43,6 +48,11 @@ jobs:
run: cd src/frontend && yarn install run: cd src/frontend && yarn install
- name: Build frontend - name: Build frontend
run: cd src/frontend && npm run compile && npm run build run: cd src/frontend && npm run compile && npm run build
- name: Create SBOM for frontend
uses: anchore/sbom-action@v0
with:
artifact-name: frontend-build.spdx
path: src/frontend
- name: Write version file - SHA - name: Write version file - SHA
run: cd src/backend/InvenTree/web/static/web/.vite && echo "$GITHUB_SHA" > sha.txt run: cd src/backend/InvenTree/web/static/web/.vite && echo "$GITHUB_SHA" > sha.txt
- name: Write version file - TAG - name: Write version file - TAG
@ -51,10 +61,25 @@ jobs:
run: | run: |
cd src/backend/InvenTree/web/static/web cd src/backend/InvenTree/web/static/web
zip -r ../frontend-build.zip * .vite zip -r ../frontend-build.zip * .vite
- uses: svenstaro/upload-release-action@04733e069f2d7f7f0b4aebc4fbdbce8613b03ccd # pin@2.9.0 - name: Attest Build Provenance
id: attest
uses: actions/attest-build-provenance@v1
with:
subject-path: "${{ github.workspace }}/src/backend/InvenTree/web/static/frontend-build.zip"
- name: Upload frontend
uses: svenstaro/upload-release-action@04733e069f2d7f7f0b4aebc4fbdbce8613b03ccd # pin@2.9.0
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: src/backend/InvenTree/web/static/frontend-build.zip file: src/backend/InvenTree/web/static/frontend-build.zip
asset_name: frontend-build.zip asset_name: frontend-build.zip
tag: ${{ github.ref }} tag: ${{ github.ref }}
overwrite: true overwrite: true
- name: Upload Attestation
uses: svenstaro/upload-release-action@04733e069f2d7f7f0b4aebc4fbdbce8613b03ccd # pin@2.9.0
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
asset_name: frontend-build.intoto.jsonl
file: ${{ steps.attest.outputs.bundle-path}}
tag: ${{ github.ref }}
overwrite: true

View File

@ -67,6 +67,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard. # Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning" - name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@4fa2a7953630fd2f3fb380f21be14ede0169dd4f # v3.25.12 uses: github/codeql-action/upload-sarif@2d790406f505036ef40ecba973cc774a50395aac # v3.25.13
with: with:
sarif_file: results.sarif sarif_file: results.sarif

View File

@ -35,7 +35,7 @@ dependencies:
- gettext - gettext
- nginx - nginx
- jq - jq
- libffi7 - "libffi7 | libffi8"
targets: targets:
ubuntu-20.04: true ubuntu-20.04: true
debian-11: true debian-11: true

26
.vscode/launch.json vendored
View File

@ -6,19 +6,37 @@
"configurations": [ "configurations": [
{ {
"name": "InvenTree Server", "name": "InvenTree Server",
"type": "python", "type": "debugpy",
"request": "launch", "request": "launch",
"program": "${workspaceFolder}/src/backend/InvenTree/manage.py", "program": "${workspaceFolder}/src/backend/InvenTree/manage.py",
"args": ["runserver"], "args": [
"runserver",
// "0.0.0.0:8000", // expose server in network (useful for testing with mobile app)
// "--noreload" // disable auto-reload
],
"django": true,
"justMyCode": true
},
{
"name": "InvenTree Server - Tests",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/src/backend/InvenTree/manage.py",
"args": [
"test",
// "part.test_api.PartCategoryAPITest", // run only a specific test
],
"django": true, "django": true,
"justMyCode": true "justMyCode": true
}, },
{ {
"name": "InvenTree Server - 3rd party", "name": "InvenTree Server - 3rd party",
"type": "python", "type": "debugpy",
"request": "launch", "request": "launch",
"program": "${workspaceFolder}/src/backend/InvenTree/manage.py", "program": "${workspaceFolder}/src/backend/InvenTree/manage.py",
"args": ["runserver"], "args": [
"runserver"
],
"django": true, "django": true,
"justMyCode": false "justMyCode": false
}, },

View File

@ -9,7 +9,7 @@
# - Runs InvenTree web server under django development server # - Runs InvenTree web server under django development server
# - Monitors source files for any changes, and live-reloads server # - Monitors source files for any changes, and live-reloads server
ARG base_image=python:3.11-alpine3.20 ARG base_image=python:3.11-alpine3.18
FROM ${base_image} AS inventree_base FROM ${base_image} AS inventree_base
# Build arguments for this image # Build arguments for this image
@ -64,7 +64,7 @@ RUN apk add --no-cache \
# Weasyprint requirements : https://doc.courtbouillon.org/weasyprint/stable/first_steps.html#alpine-3-12 # Weasyprint requirements : https://doc.courtbouillon.org/weasyprint/stable/first_steps.html#alpine-3-12
py3-pip py3-pillow py3-cffi py3-brotli pango poppler-utils openldap \ py3-pip py3-pillow py3-cffi py3-brotli pango poppler-utils openldap \
# Postgres client # Postgres client
postgresql14-client \ postgresql13-client \
# MySQL / MariaDB client # MySQL / MariaDB client
mariadb-client mariadb-connector-c \ mariadb-client mariadb-connector-c \
&& \ && \

View File

@ -1,5 +1,3 @@
version: "3.8"
# Docker compose recipe for InvenTree development server # Docker compose recipe for InvenTree development server
# - Runs PostgreSQL as the database backend # - Runs PostgreSQL as the database backend
# - Uses built-in django webserver # - Uses built-in django webserver
@ -20,7 +18,7 @@ services:
# Use PostgreSQL as the database backend # Use PostgreSQL as the database backend
# Note: This can be changed to a different backend if required # Note: This can be changed to a different backend if required
inventree-dev-db: inventree-dev-db:
image: postgres:14 image: postgres:13
expose: expose:
- 5432/tcp - 5432/tcp
environment: environment:

View File

@ -1,5 +1,3 @@
version: "3.8"
# Docker compose recipe for a production-ready InvenTree setup, with the following containers: # Docker compose recipe for a production-ready InvenTree setup, with the following containers:
# - PostgreSQL as the database backend # - PostgreSQL as the database backend
# - gunicorn as the InvenTree web server # - gunicorn as the InvenTree web server
@ -38,7 +36,7 @@ services:
# Database service # Database service
# Use PostgreSQL as the database backend # Use PostgreSQL as the database backend
inventree-db: inventree-db:
image: postgres:14 image: postgres:13
container_name: inventree-db container_name: inventree-db
expose: expose:
- ${INVENTREE_DB_PORT:-5432}/tcp - ${INVENTREE_DB_PORT:-5432}/tcp

View File

@ -1,12 +1,12 @@
#!/bin/ash #!/bin/ash
# Install system packages required for building InvenTree python libraries # Install system packages required for building InvenTree python libraries
# Note that for postgreslql, we use the 14 version, which matches the version used in the InvenTree docker image # Note that for postgreslql, we use the version 13, which matches the version used in the InvenTree docker image
apk add gcc g++ musl-dev openssl-dev libffi-dev cargo python3-dev openldap-dev \ apk add gcc g++ musl-dev openssl-dev libffi-dev cargo python3-dev openldap-dev \
libstdc++ build-base linux-headers py3-grpcio \ libstdc++ build-base linux-headers py3-grpcio \
jpeg-dev openjpeg-dev libwebp-dev zlib-dev \ jpeg-dev openjpeg-dev libwebp-dev zlib-dev \
sqlite sqlite-dev \ sqlite sqlite-dev \
mariadb-connector-c-dev mariadb-client mariadb-dev \ mariadb-connector-c-dev mariadb-client mariadb-dev \
postgresql14-dev postgresql-libs \ postgresql13-dev postgresql-libs \
$@ $@

View File

@ -4,9 +4,9 @@ asgiref==3.8.1 \
--hash=sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47 \ --hash=sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47 \
--hash=sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590 --hash=sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590
# via django # via django
django==4.2.14 \ django==4.2.15 \
--hash=sha256:3ec32bc2c616ab02834b9cac93143a7dc1cdcd5b822d78ac95fc20a38c534240 \ --hash=sha256:61ee4a130efb8c451ef3467c67ca99fdce400fedd768634efc86a68c18d80d30 \
--hash=sha256:fc6919875a6226c7ffcae1a7d51e0f2ceaf6f160393180818f6c95f51b1e7b96 --hash=sha256:c77f926b81129493961e19c0e02188f8d07c112a1162df69bfab178ae447f94a
# via django-auth-ldap # via django-auth-ldap
django-auth-ldap==4.8.0 \ django-auth-ldap==4.8.0 \
--hash=sha256:4b4b944f3c28bce362f33fb6e8db68429ed8fd8f12f0c0c4b1a4344a7ef225ce \ --hash=sha256:4b4b944f3c28bce362f33fb6e8db68429ed8fd8f12f0c0c4b1a4344a7ef225ce \

View File

@ -1,4 +1,4 @@
# Packages needed for CI/packages # Packages needed for CI/packages
requests==2.32.2 requests==2.32.3
pyyaml==6.0.1 pyyaml==6.0.1
jc==1.25.2 jc==1.25.3

View File

@ -100,9 +100,9 @@ idna==3.7 \
--hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \
--hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0
# via requests # via requests
jc==1.25.2 \ jc==1.25.3 \
--hash=sha256:26e412a65a478f9da3097653db6277f915cfae5c0f0a3f42026b405936abd358 \ --hash=sha256:ea17a8578497f2da92f73924d9d403f4563ba59422fbceff7bb4a16cdf84a54f \
--hash=sha256:97ada193495f79550f06fe0cbfb119ff470bcca57c1cc593a5cdb0008720e0b3 --hash=sha256:fa3140ceda6cba1210d1362f363cd79a0514741e8a1dd6167db2b2e2d5f24f7b
# via -r contrib/dev_reqs/requirements.in # via -r contrib/dev_reqs/requirements.in
pygments==2.17.2 \ pygments==2.17.2 \
--hash=sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c \ --hash=sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c \
@ -161,9 +161,9 @@ pyyaml==6.0.1 \
--hash=sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d \ --hash=sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d \
--hash=sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f --hash=sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f
# via -r contrib/dev_reqs/requirements.in # via -r contrib/dev_reqs/requirements.in
requests==2.32.2 \ requests==2.32.3 \
--hash=sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289 \ --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \
--hash=sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6
# via -r contrib/dev_reqs/requirements.in # via -r contrib/dev_reqs/requirements.in
ruamel-yaml==0.18.6 \ ruamel-yaml==0.18.6 \
--hash=sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636 \ --hash=sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636 \

View File

@ -75,6 +75,7 @@ root_command() {
;; ;;
"Debian GNU/Linux" | "debian gnu/linux" | Raspbian) "Debian GNU/Linux" | "debian gnu/linux" | Raspbian)
if [[ $VER == "12" ]]; then if [[ $VER == "12" ]]; then
DIST_VER="11"
SUPPORTED=true SUPPORTED=true
elif [[ $VER == "11" ]]; then elif [[ $VER == "11" ]]; then
SUPPORTED=true SUPPORTED=true

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 45 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 32 KiB

View File

@ -66,10 +66,11 @@ Each *Build Order* has an associated *Status* flag, which indicates the state of
| Status | Description | | Status | Description |
| ----------- | ----------- | | ----------- | ----------- |
| `Pending` | Build has been created and build is ready for subpart allocation | | `Pending` | Build order has been created, but is not yet in production |
| `Production` | One or more build outputs have been created for this build | | `Production` | Build order is currently in production |
| `Cancelled` | Build has been cancelled | | `On Hold` | Build order has been placed on hold, but is still active |
| `Completed` | Build has been completed | | `Cancelled` | Build order has been cancelled |
| `Completed` | Build order has been completed |
**Source Code** **Source Code**
@ -103,25 +104,44 @@ For further information, refer to the [stock allocation documentation](./allocat
## Build Order Display ## Build Order Display
The detail view for a single build order provides multiple display tabs, as follows: The detail view for a single build order provides multiple display panels, as follows:
### Build Details ### Build Details
The *Build Details* tab provides an overview of the Build Order: The *Build Details* panel provides an overview of the Build Order:
{% with id="build_details", url="build/build_details.png", description="Details tab" %} {% with id="build_details", url="build/build_panel_details.png", description="Build details panel" %}
{% include "img.html" %} {% include "img.html" %}
{% endwith %} {% endwith %}
### Line Items ### Line Items
The *Line Items* tab provides an interface to allocate required stock (as specified by the BOM) to the build: The *Line Items* panel displays all the line items (as defined by the [bill of materials](./bom.md)) required to complete the build order.
{% with id="build_allocate", url="build/build_allocate.png", description="Allocation tab" %} {% with id="build_allocate", url="build/build_panel_line_items.png", description="Build line items panel" %}
{% include "img.html" %} {% include "img.html" %}
{% endwith %} {% endwith %}
The allocation table (as shown above) shows the stock allocation progress for this build. In the example above, there are two BOM lines, which have been partially allocated. The allocation table (as shown above) provides an interface to allocate required stock, and also shows the stock allocation progress for each line item in the build.
### Incomplete Outputs
The *Incomplete Outputs* panel shows the list of in-progress [build outputs](./output.md) (created stock items) associated with this build.
{% with id="build_outputs", url="build/build_outputs.png", description="Outputs tab" %}
{% include "img.html" %}
{% endwith %}
!!! info "Example: Build Outputs"
In the example image above, a single output (serial number 2) has been completed, while serial numbers 1 and 4 are still in progress.
- Build outputs can be created from this screen, by selecting the *Create New Output* button
- Outputs which are "in progress" can be completed or cancelled
- Completed outputs (which are simply *stock items*) can be viewed in the stock table at the bottom of the screen
### Completed Outputs
This panel displays all the completed build outputs (stock items) which have been created by this build order:
### Allocated Stock ### Allocated Stock
@ -138,28 +158,29 @@ The *Consumed Stock* tab displays all stock items which have been *consumed* by
- [Tracked stock items](./allocate.md#tracked-stock) are consumed by specific build outputs - [Tracked stock items](./allocate.md#tracked-stock) are consumed by specific build outputs
- [Untracked stock items](./allocate.md#untracked-stock) are consumed by the build order - [Untracked stock items](./allocate.md#untracked-stock) are consumed by the build order
### Build Outputs
The *Build Outputs* tab shows the [build outputs](./output.md) (created stock items) associated with this build.
As shown below, there are separate panels for *incomplete* and *completed* build outputs.
{% with id="build_outputs", url="build/build_outputs.png", description="Outputs tab" %}
{% include "img.html" %}
{% endwith %}
!!! info "Example: Build Outputs"
In the example image above, a single output (serial number 2) has been completed, while serial numbers 1 and 4 are still in progress.
- Build outputs can be created from this screen, by selecting the *Create New Output* button
- Outputs which are "in progress" can be completed or cancelled
- Completed outputs (which are simply *stock items*) can be viewed in the stock table at the bottom of the screen
### Child Builds ### Child Builds
If there exist any build orders which are *children* of the selected build order, they are displayed in the *Child Builds* tab: If there exist any build orders which are *children* of the selected build order, they are displayed in the *Child Builds* tab:
{% with id="build_childs", url="build/build_childs.png", description="Child builds tab" %} {% with id="build_childs", url="build/build_childs.png", description="Child builds panel" %}
{% include "img.html" %}
{% endwith %}
### Test Results
For *trackable* parts, test results can be recorded against each build output. These results are displayed in the *Test Results* panel:
{% with id="build_test_results", url="build/build_panel_test_results.png", description="Test Results panel" %}
{% include "img.html" %}
{% endwith %}
This table provides a summary of the test results for each build output, and allows test results to be quickly added for each build output.
### Test Statistics
For *trackable* parts, this panel displays a summary of the test results for all build outputs:
{% with id="build_test_stats", url="build/build_panel_test_statistics.png", description="Test Statistics panel" %}
{% include "img.html" %} {% include "img.html" %}
{% endwith %} {% endwith %}

View File

@ -4,4 +4,22 @@ title: InvenTree Demo
## InvenTree Demo ## InvenTree Demo
This page has moved to [https://inventree.org/demo.html](https://inventree.org/demo.html) If you are interested in trying out InvenTree, you can access the InvenTree demo instance at [https://demo.inventree.org](https://demo.inventree.org).
This page is populated with a sample dataset, which is reset every 24 hours.
You can read more about the InvenTree demo here: [https://inventree.org/demo.html](https://inventree.org/demo.html)
### User Accounts
The demo instance has a number of user accounts which you can use to explore the system:
| Username | Password | Staff Access | Enabled | Description |
| -------- | -------- | ------------ | ------- | ----------- |
| allaccess | nolimits | No | Yes | View / create / edit all pages and items |
| reader | readonly | No | Yes | Can view all pages but cannot create, edit or delete database records |
| engineer | partsonly | No | Yes | Can manage parts, view stock, but no access to purchase orders or sales orders |
| steven | wizardstaff | Yes | Yes | Staff account, can access some admin sections |
| ian | inactive | No | No | Inactive account, cannot log in |
| susan | inactive | No | No | Inactive account, cannot log in |
| admin | inventree | Yes | Yes | Superuser account, can access all parts of the system |

View File

@ -0,0 +1,19 @@
---
title: Icon Pack Mixin
---
## IconPackMixin
The IconPackMixin class provides basic functionality for letting plugins expose custom icon packs that are available in the InvenTree UI. This is especially useful to provide a custom crafted icon pack with icons for different location types, e.g. different sizes and styles of drawers, bags, ESD bags, ... which are not available in the standard tabler icons library.
### Sample Plugin
The following example demonstrates how to use the `IconPackMixin` class to add a custom icon pack:
::: plugin.samples.icons.icon_sample.SampleIconPlugin
options:
show_bases: False
show_root_heading: False
show_root_toc_entry: False
show_source: True
members: []

View File

@ -20,6 +20,7 @@ Each Purchase Order has a specific status code which indicates the current state
| --- | --- | | --- | --- |
| Pending | The purchase order has been created, but has not been submitted to the supplier | | Pending | The purchase order has been created, but has not been submitted to the supplier |
| In Progress | The purchase order has been issued to the supplier, and is in progress | | In Progress | The purchase order has been issued to the supplier, and is in progress |
| On Hold | The purchase order has been placed on hold, but is still active |
| Complete | The purchase order has been completed, and is now closed | | Complete | The purchase order has been completed, and is now closed |
| Cancelled | The purchase order was cancelled, and is now closed | | Cancelled | The purchase order was cancelled, and is now closed |
| Lost | The purchase order was lost, and is now closed | | Lost | The purchase order was lost, and is now closed |

View File

@ -45,6 +45,7 @@ Each Return Order has a specific status code, as follows:
| --- | --- | | --- | --- |
| Pending | The return order has been created, but not sent to the customer | | Pending | The return order has been created, but not sent to the customer |
| In Progress | The return order has been issued to the customer | | In Progress | The return order has been issued to the customer |
| On Hold | The return order has been placed on hold, but is still active |
| Complete | The return order was marked as complete, and is now closed | | Complete | The return order was marked as complete, and is now closed |
| Cancelled | The return order was cancelled, and is now closed | | Cancelled | The return order was cancelled, and is now closed |

View File

@ -20,6 +20,7 @@ Each Sales Order has a specific status code, which represents the state of the o
| --- | --- | | --- | --- |
| Pending | The sales order has been created, but has not been finalized or submitted | | Pending | The sales order has been created, but has not been finalized or submitted |
| In Progress | The sales order has been issued, and is in progress | | In Progress | The sales order has been issued, and is in progress |
| On Hold | The sales order has been placed on hold, but is still active |
| Shipped | The sales order has been shipped, but is not yet complete | | Shipped | The sales order has been shipped, but is not yet complete |
| Complete | The sales order is fully completed, and is now closed | | Complete | The sales order is fully completed, and is now closed |
| Cancelled | The sales order was cancelled, and is now closed | | Cancelled | The sales order was cancelled, and is now closed |

View File

@ -259,6 +259,31 @@ A shortcut function is provided for rendering an image associated with a Company
*Preview* and *thumbnail* image variations can be rendered for the `company_image` tag, in a similar manner to [part image variations](#image-variations) *Preview* and *thumbnail* image variations can be rendered for the `company_image` tag, in a similar manner to [part image variations](#image-variations)
## Icons
Some models (e.g. part categories and locations) allow to specify a custom icon. To render these icons in a report, there is a `{% raw %}{% icon location.icon %}{% endraw %}` template tag from the report template library available.
This tag renders the required html for the icon.
!!! info "Loading fonts"
Additionally the icon fonts need to be loaded into the template. This can be done using the `{% raw %}{% include_icon_fonts %}{% endraw %}` template tag inside of a style block
!!! tip "Custom classes for styling the icon further"
The icon template tag accepts an optional `class` argument which can be used to apply a custom class to the rendered icon used to style the icon further e.g. positioning it, changing it's size, ... `{% raw %}{% icon location.icon class="my-class" %}{% endraw %}`.
```html
{% raw %}
{% load report %}
{% block style %}
{% include_icon_fonts %}
{% endblock style %}
{% icon location.icon %}
{% endraw %}
```
## InvenTree Logo ## InvenTree Logo
A template tag is provided to load the InvenTree logo image into a report. You can render the logo using the `{% raw %}{% logo_image %}{% endraw %}` tag: A template tag is provided to load the InvenTree logo image into a report. You can render the logo using the `{% raw %}{% logo_image %}{% endraw %}` tag:

View File

@ -54,6 +54,8 @@ The following basic options are available:
| --- | --- | --- | --- | | --- | --- | --- | --- |
| INVENTREE_SITE_URL | site_url | Specify a fixed site URL | *Not specified* | | INVENTREE_SITE_URL | site_url | Specify a fixed site URL | *Not specified* |
| INVENTREE_DEBUG | debug | Enable [debug mode](./intro.md#debug-mode) | True | | INVENTREE_DEBUG | debug | Enable [debug mode](./intro.md#debug-mode) | True |
| INVENTREE_DEBUG_QUERYCOUNT | debug_querycount | Enable [query count logging](https://github.com/bradmontgomery/django-querycount) in the terminal | False |
| INVENTREE_DEBUG_SHELL | debug_shell | Enable [administrator shell](https://github.com/djk2/django-admin-shell) (only in debug mode) | False |
| INVENTREE_LOG_LEVEL | log_level | Set level of logging to terminal | WARNING | | INVENTREE_LOG_LEVEL | log_level | Set level of logging to terminal | WARNING |
| INVENTREE_DB_LOGGING | db_logging | Enable logging of database messages | False | | INVENTREE_DB_LOGGING | db_logging | Enable logging of database messages | False |
| INVENTREE_TIMEZONE | timezone | Server timezone | UTC | | INVENTREE_TIMEZONE | timezone | Server timezone | UTC |
@ -297,6 +299,10 @@ Alternatively this location can be specified with the `INVENTREE_BACKUP_DIR` env
InvenTree provides allowance for additional sign-in options. The following options are not enabled by default, and care must be taken by the system administrator when configuring these settings. InvenTree provides allowance for additional sign-in options. The following options are not enabled by default, and care must be taken by the system administrator when configuring these settings.
| Environment Variable | Configuration File | Description | Default |
| --- | --- | --- | --- |
| INVENTREE_MFA_ENABLED | mfa_enabled | Enable or disable multi-factor authentication support for the InvenTree server | True |
### Single Sign On ### Single Sign On
Single Sign On (SSO) allows users to sign in to InvenTree using a third-party authentication provider. This functionality is provided by the [django-allauth](https://docs.allauth.org/en/latest/) package. Single Sign On (SSO) allows users to sign in to InvenTree using a third-party authentication provider. This functionality is provided by the [django-allauth](https://docs.allauth.org/en/latest/) package.

View File

@ -199,7 +199,7 @@ Any persistent files generated by the Caddy container (such as certificates, etc
### Demo Dataset ### Demo Dataset
To quickly get started with a demo dataset, you can run the following command: To quickly get started with a [demo dataset](../demo.md), you can run the following command:
``` ```
docker compose run --rm inventree-server invoke setup-test -i docker compose run --rm inventree-server invoke setup-test -i
@ -212,3 +212,86 @@ To start afresh (and completely remove the existing database), run the following
``` ```
docker compose run --rm inventree-server invoke delete-data docker compose run --rm inventree-server invoke delete-data
``` ```
## Install custom packages
To install custom packages to your docker image, a custom docker image can be built and used automatically each time when updating. The following changes need to be applied to the docker compose file:
<details><summary>docker-compose.yml changes</summary>
```diff
diff --git a/docker-compose.yml b/docker-compose.yml
index 8adee63..dc3993c 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -69,7 +69,14 @@ services:
# Uses gunicorn as the web server
inventree-server:
# If you wish to specify a particular InvenTree version, do so here
- image: inventree/inventree:${INVENTREE_TAG:-stable}
+ image: inventree/inventree:${INVENTREE_TAG:-stable}-custom
+ pull_policy: never
+ build:
+ context: .
+ dockerfile: Dockerfile
+ target: production
+ args:
+ INVENTREE_TAG: ${INVENTREE_TAG:-stable}
# Only change this port if you understand the stack.
# If you change this you have to change:
# - the proxy settings (on two lines)
@@ -88,7 +95,8 @@ services:
# Background worker process handles long-running or periodic tasks
inventree-worker:
# If you wish to specify a particular InvenTree version, do so here
- image: inventree/inventree:${INVENTREE_TAG:-stable}
+ image: inventree/inventree:${INVENTREE_TAG:-stable}-custom
+ pull_policy: never
command: invoke worker
depends_on:
- inventree-server
```
</details>
And the following `Dockerfile` needs to be created:
<details><summary>Dockerfile</summary>
```dockerfile
ARG INVENTREE_TAG
FROM inventree/inventree:${INVENTREE_TAG} as production
# Install whatever dependency is needed here (e.g. git)
RUN apk add --no-cache git
```
</details>
And if additional, development packages are needed e.g. just for building a wheel for a pip package, a multi stage build can be used with the following `Dockerfile`:
<details><summary>Dockerfile</summary>
```dockerfile
ARG INVENTREE_TAG
# prebuild stage - needs a lot of build dependencies
# make sure, the alpine and python version matches the version used in the inventree base image
FROM python:3.11-alpine3.18 as prebuild
# Install whatever development dependency is needed (e.g. cups-dev, gcc, the musl-dev build tools and the pip pycups package)
RUN apk add --no-cache cups-dev gcc musl-dev && \
pip install --user --no-cache-dir pycups
# production image - only install the cups shared library
FROM inventree/inventree:${INVENTREE_TAG} as production
# Install e.g. shared library later available in the final image
RUN apk add --no-cache cups-libs
# Copy the pip wheels from the build stage in the production stage
COPY --from=prebuild /root/.local /root/.local
```
</details>

View File

@ -239,6 +239,7 @@ Run the following command to initialize the database with the required tables.
cd /home/inventree/src cd /home/inventree/src
invoke update invoke update
``` ```
NOTE: If you are on Debian, and get "No module named 'django', it might be that `/usr/bin/invoke` are used. Make sure that the python environment (`/home/inventree/env/bin`) is ahead in the PATH variable.
### Create Admin Account ### Create Admin Account

View File

@ -6,6 +6,14 @@ title: Stock
A stock location represents a physical real-world location where *Stock Items* are stored. Locations are arranged in a cascading manner and each location may contain multiple sub-locations, or stock, or both. A stock location represents a physical real-world location where *Stock Items* are stored. Locations are arranged in a cascading manner and each location may contain multiple sub-locations, or stock, or both.
### Icons
Stock locations can be assigned custom icons (either directly or through [Stock Location Types](#stock-location-type)). When using PUI there is a custom icon picker component available that can help to select the right icon. However in CUI the icon needs to be entered manually.
By default, the tabler icons package (with prefix: `ti`) is available. To manually select an item, search on the [tabler icons](https://tabler.io/icons) page for an icon and copy its name e.g. `bookmark`. Some icons have a filled and an outline version (if no variants are specified, it's an outline variant). Now these values can be put into the format: `<package-prefix>:<icon-name>:<variant>`. E.g. `ti:bookmark:outline` or `ti:bookmark:filled`.
If there are some icons missing in the tabler icons package, users can even install their own custom icon packs through a plugin. See [`IconPackMixin`](../extend/plugins/icon.md).
## Stock Location Type ## Stock Location Type
A stock location type represents a specific type of location (e.g. one specific size of drawer, shelf, ... or box) which can be assigned to multiple stock locations. In the first place, it is used to specify an icon and having the icon in sync for all locations that use this location type, but it also serves as a data field to quickly see what type of location this is. It is planned to add e.g. drawer dimension information to the location type to add a "find a matching, empty stock location" tool. A stock location type represents a specific type of location (e.g. one specific size of drawer, shelf, ... or box) which can be assigned to multiple stock locations. In the first place, it is used to specify an icon and having the icon in sync for all locations that use this location type, but it also serves as a data field to quickly see what type of location this is. It is planned to add e.g. drawer dimension information to the location type to add a "find a matching, empty stock location" tool.

View File

@ -54,11 +54,23 @@ def check_link(url) -> bool:
return False return False
def get_build_enviroment() -> str:
"""Returns the branch we are currently building on, based on the environment variables of the various CI platforms."""
# Check if we are in ReadTheDocs
if os.environ.get('READTHEDOCS') == 'True':
return os.environ.get('READTHEDOCS_GIT_IDENTIFIER')
# We are in GitHub Actions
elif os.environ.get('GITHUB_ACTIONS') == 'true':
return os.environ.get('GITHUB_REF')
else:
return 'master'
def define_env(env): def define_env(env):
"""Define custom environment variables for the documentation build process.""" """Define custom environment variables for the documentation build process."""
@env.macro @env.macro
def sourcedir(dirname, branch='master'): def sourcedir(dirname, branch=None):
"""Return a link to a directory within the source code repository. """Return a link to a directory within the source code repository.
Arguments: Arguments:
@ -70,6 +82,9 @@ def define_env(env):
Raises: Raises:
- FileNotFoundError: If the directory does not exist, or the generated URL is invalid - FileNotFoundError: If the directory does not exist, or the generated URL is invalid
""" """
if branch == None:
branch = get_build_enviroment()
if dirname.startswith('/'): if dirname.startswith('/'):
dirname = dirname[1:] dirname = dirname[1:]
@ -94,7 +109,7 @@ def define_env(env):
return url return url
@env.macro @env.macro
def sourcefile(filename, branch='master', raw=False): def sourcefile(filename, branch=None, raw=False):
"""Return a link to a file within the source code repository. """Return a link to a file within the source code repository.
Arguments: Arguments:
@ -106,6 +121,9 @@ def define_env(env):
Raises: Raises:
- FileNotFoundError: If the file does not exist, or the generated URL is invalid - FileNotFoundError: If the file does not exist, or the generated URL is invalid
""" """
if branch == None:
branch = get_build_enviroment()
if filename.startswith('/'): if filename.startswith('/'):
filename = filename[1:] filename = filename[1:]

View File

@ -203,6 +203,7 @@ nav:
- Barcode Mixin: extend/plugins/barcode.md - Barcode Mixin: extend/plugins/barcode.md
- Currency Mixin: extend/plugins/currency.md - Currency Mixin: extend/plugins/currency.md
- Event Mixin: extend/plugins/event.md - Event Mixin: extend/plugins/event.md
- Icon Pack Mixin: extend/plugins/icon.md
- Label Printing Mixin: extend/plugins/label.md - Label Printing Mixin: extend/plugins/label.md
- Locate Mixin: extend/plugins/locate.md - Locate Mixin: extend/plugins/locate.md
- Navigation Mixin: extend/plugins/navigation.md - Navigation Mixin: extend/plugins/navigation.md

View File

@ -134,9 +134,9 @@ essentials-openapi==1.0.9 \
--hash=sha256:1431e98ef0a442f1919fd9833385bf44d832c355fd05919dc06d43d4da0f8ef4 \ --hash=sha256:1431e98ef0a442f1919fd9833385bf44d832c355fd05919dc06d43d4da0f8ef4 \
--hash=sha256:ebc46aac41c0b917a658f77caaa0ca93a6e4a4519de8a272f82c1538ccd5619f --hash=sha256:ebc46aac41c0b917a658f77caaa0ca93a6e4a4519de8a272f82c1538ccd5619f
# via neoteroi-mkdocs # via neoteroi-mkdocs
exceptiongroup==1.2.1 \ exceptiongroup==1.2.2 \
--hash=sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad \ --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \
--hash=sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16 --hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc
# via anyio # via anyio
ghp-import==2.1.0 \ ghp-import==2.1.0 \
--hash=sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619 \ --hash=sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619 \
@ -173,9 +173,9 @@ idna==3.7 \
# anyio # anyio
# httpx # httpx
# requests # requests
importlib-metadata==8.0.0 \ importlib-metadata==8.2.0 \
--hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ --hash=sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369 \
--hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 --hash=sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d
# via # via
# markdown # markdown
# mkdocs # mkdocs
@ -301,21 +301,21 @@ mkdocs-get-deps==0.2.0 \
--hash=sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c \ --hash=sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c \
--hash=sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134 --hash=sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134
# via mkdocs # via mkdocs
mkdocs-git-revision-date-localized-plugin==1.2.5 \ mkdocs-git-revision-date-localized-plugin==1.2.6 \
--hash=sha256:0c439816d9d0dba48e027d9d074b2b9f1d7cd179f74ba46b51e4da7bb3dc4b9b \ --hash=sha256:e432942ce4ee8aa9b9f4493e993dee9d2cc08b3ea2b40a3d6b03ca0f2a4bcaa2 \
--hash=sha256:d796a18b07cfcdb154c133e3ec099d2bb5f38389e4fd54d3eb516a8a736815b8 --hash=sha256:f015cb0f3894a39b33447b18e270ae391c4e25275cac5a626e80b243784e2692
# via -r docs/requirements.in # via -r docs/requirements.in
mkdocs-include-markdown-plugin==6.0.6 \ mkdocs-include-markdown-plugin==6.2.1 \
--hash=sha256:7c80258b2928563c75cc057a7b9a0014701c40804b1b6aa290f3b4032518b43c \ --hash=sha256:46fc372886d48eec541d36138d1fe1db42afd08b976ef7c8d8d4ea6ee4d5d1e8 \
--hash=sha256:7ccafbaa412c1e5d3510c4aff46d1fe64c7a810c01dace4c636253d1aa5bc193 --hash=sha256:8dfc3aee9435679b094cbdff023239e91d86cf357c40b0e99c28036449661830
# via -r docs/requirements.in # via -r docs/requirements.in
mkdocs-macros-plugin==1.0.5 \ mkdocs-macros-plugin==1.0.5 \
--hash=sha256:f60e26f711f5a830ddf1e7980865bf5c0f1180db56109803cdd280073c1a050a \ --hash=sha256:f60e26f711f5a830ddf1e7980865bf5c0f1180db56109803cdd280073c1a050a \
--hash=sha256:fe348d75f01c911f362b6d998c57b3d85b505876dde69db924f2c512c395c328 --hash=sha256:fe348d75f01c911f362b6d998c57b3d85b505876dde69db924f2c512c395c328
# via -r docs/requirements.in # via -r docs/requirements.in
mkdocs-material==9.5.24 \ mkdocs-material==9.5.31 \
--hash=sha256:02d5aaba0ee755e707c3ef6e748f9acb7b3011187c0ea766db31af8905078a34 \ --hash=sha256:1b1f49066fdb3824c1e96d6bacd2d4375de4ac74580b47e79ff44c4d835c5fcb \
--hash=sha256:e12cd75954c535b61e716f359cf2a5056bf4514889d17161fdebd5df4b0153c6 --hash=sha256:31833ec664772669f5856f4f276bf3fdf0e642a445e64491eda459249c3a1ca8
# via -r docs/requirements.in # via -r docs/requirements.in
mkdocs-material-extensions==1.3.1 \ mkdocs-material-extensions==1.3.1 \
--hash=sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443 \ --hash=sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443 \
@ -325,9 +325,9 @@ mkdocs-simple-hooks==0.1.5 \
--hash=sha256:dddbdf151a18723c9302a133e5cf79538be8eb9d274e8e07d2ac3ac34890837c \ --hash=sha256:dddbdf151a18723c9302a133e5cf79538be8eb9d274e8e07d2ac3ac34890837c \
--hash=sha256:efeabdbb98b0850a909adee285f3404535117159d5cb3a34f541d6eaa644d50a --hash=sha256:efeabdbb98b0850a909adee285f3404535117159d5cb3a34f541d6eaa644d50a
# via -r docs/requirements.in # via -r docs/requirements.in
mkdocstrings[python]==0.25.1 \ mkdocstrings[python]==0.25.2 \
--hash=sha256:c3a2515f31577f311a9ee58d089e4c51fc6046dbd9e9b4c3de4c3194667fe9bf \ --hash=sha256:5cf57ad7f61e8be3111a2458b4e49c2029c9cb35525393b179f9c916ca8042dc \
--hash=sha256:da01fcc2670ad61888e8fe5b60afe9fee5781017d67431996832d63e887c2e51 --hash=sha256:9e2cda5e2e12db8bb98d21e3410f3f27f8faab685a24b03b06ba7daa5b92abfc
# via # via
# -r docs/requirements.in # -r docs/requirements.in
# mkdocstrings-python # mkdocstrings-python
@ -441,90 +441,90 @@ pyyaml-env-tag==0.1 \
--hash=sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb \ --hash=sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb \
--hash=sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069 --hash=sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069
# via mkdocs # via mkdocs
regex==2024.5.15 \ regex==2024.7.24 \
--hash=sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649 \ --hash=sha256:01b689e887f612610c869421241e075c02f2e3d1ae93a037cb14f88ab6a8934c \
--hash=sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35 \ --hash=sha256:04ce29e2c5fedf296b1a1b0acc1724ba93a36fb14031f3abfb7abda2806c1535 \
--hash=sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb \ --hash=sha256:0ffe3f9d430cd37d8fa5632ff6fb36d5b24818c5c986893063b4e5bdb84cdf24 \
--hash=sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68 \ --hash=sha256:18300a1d78cf1290fa583cd8b7cde26ecb73e9f5916690cf9d42de569c89b1ce \
--hash=sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5 \ --hash=sha256:185e029368d6f89f36e526764cf12bf8d6f0e3a2a7737da625a76f594bdfcbfc \
--hash=sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133 \ --hash=sha256:19c65b00d42804e3fbea9708f0937d157e53429a39b7c61253ff15670ff62cb5 \
--hash=sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0 \ --hash=sha256:228b0d3f567fafa0633aee87f08b9276c7062da9616931382993c03808bb68ce \
--hash=sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d \ --hash=sha256:23acc72f0f4e1a9e6e9843d6328177ae3074b4182167e34119ec7233dfeccf53 \
--hash=sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da \ --hash=sha256:25419b70ba00a16abc90ee5fce061228206173231f004437730b67ac77323f0d \
--hash=sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f \ --hash=sha256:2dfbb8baf8ba2c2b9aa2807f44ed272f0913eeeba002478c4577b8d29cde215c \
--hash=sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d \ --hash=sha256:2f1baff13cc2521bea83ab2528e7a80cbe0ebb2c6f0bfad15be7da3aed443908 \
--hash=sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53 \ --hash=sha256:33e2614a7ce627f0cdf2ad104797d1f68342d967de3695678c0cb84f530709f8 \
--hash=sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa \ --hash=sha256:3426de3b91d1bc73249042742f45c2148803c111d1175b283270177fdf669024 \
--hash=sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a \ --hash=sha256:382281306e3adaaa7b8b9ebbb3ffb43358a7bbf585fa93821300a418bb975281 \
--hash=sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890 \ --hash=sha256:3d974d24edb231446f708c455fd08f94c41c1ff4f04bcf06e5f36df5ef50b95a \
--hash=sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67 \ --hash=sha256:3f3b6ca8eae6d6c75a6cff525c8530c60e909a71a15e1b731723233331de4169 \
--hash=sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c \ --hash=sha256:3fac296f99283ac232d8125be932c5cd7644084a30748fda013028c815ba3364 \
--hash=sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2 \ --hash=sha256:416c0e4f56308f34cdb18c3f59849479dde5b19febdcd6e6fa4d04b6c31c9faa \
--hash=sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced \ --hash=sha256:438d9f0f4bc64e8dea78274caa5af971ceff0f8771e1a2333620969936ba10be \
--hash=sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741 \ --hash=sha256:43affe33137fcd679bdae93fb25924979517e011f9dea99163f80b82eadc7e53 \
--hash=sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f \ --hash=sha256:44fc61b99035fd9b3b9453f1713234e5a7c92a04f3577252b45feefe1b327759 \
--hash=sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa \ --hash=sha256:45104baae8b9f67569f0f1dca5e1f1ed77a54ae1cd8b0b07aba89272710db61e \
--hash=sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf \ --hash=sha256:4fdd1384619f406ad9037fe6b6eaa3de2749e2e12084abc80169e8e075377d3b \
--hash=sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4 \ --hash=sha256:538d30cd96ed7d1416d3956f94d54e426a8daf7c14527f6e0d6d425fcb4cca52 \
--hash=sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5 \ --hash=sha256:558a57cfc32adcf19d3f791f62b5ff564922942e389e3cfdb538a23d65a6b610 \
--hash=sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2 \ --hash=sha256:5eefee9bfe23f6df09ffb6dfb23809f4d74a78acef004aa904dc7c88b9944b05 \
--hash=sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384 \ --hash=sha256:64bd50cf16bcc54b274e20235bf8edbb64184a30e1e53873ff8d444e7ac656b2 \
--hash=sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7 \ --hash=sha256:65fd3d2e228cae024c411c5ccdffae4c315271eee4a8b839291f84f796b34eca \
--hash=sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014 \ --hash=sha256:66b4c0731a5c81921e938dcf1a88e978264e26e6ac4ec96a4d21ae0354581ae0 \
--hash=sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704 \ --hash=sha256:68a8f8c046c6466ac61a36b65bb2395c74451df2ffb8458492ef49900efed293 \
--hash=sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5 \ --hash=sha256:6a1141a1dcc32904c47f6846b040275c6e5de0bf73f17d7a409035d55b76f289 \
--hash=sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2 \ --hash=sha256:6b9fc7e9cc983e75e2518496ba1afc524227c163e43d706688a6bb9eca41617e \
--hash=sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49 \ --hash=sha256:6f51f9556785e5a203713f5efd9c085b4a45aecd2a42573e2b5041881b588d1f \
--hash=sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1 \ --hash=sha256:7214477bf9bd195894cf24005b1e7b496f46833337b5dedb7b2a6e33f66d962c \
--hash=sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694 \ --hash=sha256:731fcd76bbdbf225e2eb85b7c38da9633ad3073822f5ab32379381e8c3c12e94 \
--hash=sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629 \ --hash=sha256:74007a5b25b7a678459f06559504f1eec2f0f17bca218c9d56f6a0a12bfffdad \
--hash=sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6 \ --hash=sha256:7a5486ca56c8869070a966321d5ab416ff0f83f30e0e2da1ab48815c8d165d46 \
--hash=sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435 \ --hash=sha256:7c479f5ae937ec9985ecaf42e2e10631551d909f203e31308c12d703922742f9 \
--hash=sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c \ --hash=sha256:7df9ea48641da022c2a3c9c641650cd09f0cd15e8908bf931ad538f5ca7919c9 \
--hash=sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835 \ --hash=sha256:7e37e809b9303ec3a179085415cb5f418ecf65ec98cdfe34f6a078b46ef823ee \
--hash=sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e \ --hash=sha256:80c811cfcb5c331237d9bad3bea2c391114588cf4131707e84d9493064d267f9 \
--hash=sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201 \ --hash=sha256:836d3cc225b3e8a943d0b02633fb2f28a66e281290302a79df0e1eaa984ff7c1 \
--hash=sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62 \ --hash=sha256:84c312cdf839e8b579f504afcd7b65f35d60b6285d892b19adea16355e8343c9 \
--hash=sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5 \ --hash=sha256:86b17ba823ea76256b1885652e3a141a99a5c4422f4a869189db328321b73799 \
--hash=sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16 \ --hash=sha256:871e3ab2838fbcb4e0865a6e01233975df3a15e6fce93b6f99d75cacbd9862d1 \
--hash=sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f \ --hash=sha256:88ecc3afd7e776967fa16c80f974cb79399ee8dc6c96423321d6f7d4b881c92b \
--hash=sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1 \ --hash=sha256:8bc593dcce679206b60a538c302d03c29b18e3d862609317cb560e18b66d10cf \
--hash=sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f \ --hash=sha256:8fd5afd101dcf86a270d254364e0e8dddedebe6bd1ab9d5f732f274fa00499a5 \
--hash=sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f \ --hash=sha256:945352286a541406f99b2655c973852da7911b3f4264e010218bbc1cc73168f2 \
--hash=sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145 \ --hash=sha256:973335b1624859cb0e52f96062a28aa18f3a5fc77a96e4a3d6d76e29811a0e6e \
--hash=sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3 \ --hash=sha256:994448ee01864501912abf2bad9203bffc34158e80fe8bfb5b031f4f8e16da51 \
--hash=sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed \ --hash=sha256:9cfd009eed1a46b27c14039ad5bbc5e71b6367c5b2e6d5f5da0ea91600817506 \
--hash=sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143 \ --hash=sha256:a2ec4419a3fe6cf8a4795752596dfe0adb4aea40d3683a132bae9c30b81e8d73 \
--hash=sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca \ --hash=sha256:a4997716674d36a82eab3e86f8fa77080a5d8d96a389a61ea1d0e3a94a582cf7 \
--hash=sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9 \ --hash=sha256:a512eed9dfd4117110b1881ba9a59b31433caed0c4101b361f768e7bcbaf93c5 \
--hash=sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa \ --hash=sha256:a82465ebbc9b1c5c50738536fdfa7cab639a261a99b469c9d4c7dcbb2b3f1e57 \
--hash=sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850 \ --hash=sha256:ae2757ace61bc4061b69af19e4689fa4416e1a04840f33b441034202b5cd02d4 \
--hash=sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80 \ --hash=sha256:b16582783f44fbca6fcf46f61347340c787d7530d88b4d590a397a47583f31dd \
--hash=sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe \ --hash=sha256:ba2537ef2163db9e6ccdbeb6f6424282ae4dea43177402152c67ef869cf3978b \
--hash=sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656 \ --hash=sha256:bf7a89eef64b5455835f5ed30254ec19bf41f7541cd94f266ab7cbd463f00c41 \
--hash=sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388 \ --hash=sha256:c0abb5e4e8ce71a61d9446040c1e86d4e6d23f9097275c5bd49ed978755ff0fe \
--hash=sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1 \ --hash=sha256:c414cbda77dbf13c3bc88b073a1a9f375c7b0cb5e115e15d4b73ec3a2fbc6f59 \
--hash=sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294 \ --hash=sha256:c51edc3541e11fbe83f0c4d9412ef6c79f664a3745fab261457e84465ec9d5a8 \
--hash=sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3 \ --hash=sha256:c5e69fd3eb0b409432b537fe3c6f44ac089c458ab6b78dcec14478422879ec5f \
--hash=sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d \ --hash=sha256:c918b7a1e26b4ab40409820ddccc5d49871a82329640f5005f73572d5eaa9b5e \
--hash=sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b \ --hash=sha256:c9bb87fdf2ab2370f21e4d5636e5317775e5d51ff32ebff2cf389f71b9b13750 \
--hash=sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40 \ --hash=sha256:ca5b2028c2f7af4e13fb9fc29b28d0ce767c38c7facdf64f6c2cd040413055f1 \
--hash=sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600 \ --hash=sha256:d0a07763776188b4db4c9c7fb1b8c494049f84659bb387b71c73bbc07f189e96 \
--hash=sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c \ --hash=sha256:d33a0021893ede5969876052796165bab6006559ab845fd7b515a30abdd990dc \
--hash=sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569 \ --hash=sha256:d55588cba7553f0b6ec33130bc3e114b355570b45785cebdc9daed8c637dd440 \
--hash=sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456 \ --hash=sha256:dac8e84fff5d27420f3c1e879ce9929108e873667ec87e0c8eeb413a5311adfe \
--hash=sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9 \ --hash=sha256:eaef80eac3b4cfbdd6de53c6e108b4c534c21ae055d1dbea2de6b3b8ff3def38 \
--hash=sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb \ --hash=sha256:eb462f0e346fcf41a901a126b50f8781e9a474d3927930f3490f38a6e73b6950 \
--hash=sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e \ --hash=sha256:eb563dd3aea54c797adf513eeec819c4213d7dbfc311874eb4fd28d10f2ff0f2 \
--hash=sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f \ --hash=sha256:f273674b445bcb6e4409bf8d1be67bc4b58e8b46fd0d560055d515b8830063cd \
--hash=sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d \ --hash=sha256:f6442f0f0ff81775eaa5b05af8a0ffa1dda36e9cf6ec1e0d3d245e8564b684ce \
--hash=sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a \ --hash=sha256:fb168b5924bef397b5ba13aabd8cf5df7d3d93f10218d7b925e360d436863f66 \
--hash=sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a \ --hash=sha256:fbf8c2f00904eaf63ff37718eb13acf8e178cb940520e47b2f05027f5bb34ce3 \
--hash=sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796 --hash=sha256:fe4ebef608553aff8deb845c7f4f1d0740ff76fa672c011cc0bacb2a00fbde86
# via mkdocs-material # via mkdocs-material
requests==2.32.2 \ requests==2.32.3 \
--hash=sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289 \ --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \
--hash=sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6
# via mkdocs-material # via mkdocs-material
rich==13.7.1 \ rich==13.7.1 \
--hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \

View File

@ -16,7 +16,7 @@ exclude = [
src = ["src/backend/InvenTree"] src = ["src/backend/InvenTree"]
# line-length = 120 # line-length = 120
[tool.ruff.extend-per-file-ignores] [tool.ruff.lint.extend-per-file-ignores]
"__init__.py" = ["D104"] "__init__.py" = ["D104"]
[tool.ruff.lint] [tool.ruff.lint]
@ -87,4 +87,4 @@ known_django="django"
sections=["FUTURE","STDLIB","DJANGO","THIRDPARTY","FIRSTPARTY","LOCALFOLDER"] sections=["FUTURE","STDLIB","DJANGO","THIRDPARTY","FIRSTPARTY","LOCALFOLDER"]
[tool.codespell] [tool.codespell]
ignore-words-list = ["assertIn","SME"] ignore-words-list = ["assertIn","SME","intoto"]

View File

@ -1,12 +1,45 @@
"""InvenTree API version information.""" """InvenTree API version information."""
# InvenTree API version # InvenTree API version
INVENTREE_API_VERSION = 226 INVENTREE_API_VERSION = 235
"""Increment this API version number whenever there is a significant change to the API that any clients need to know about.""" """Increment this API version number whenever there is a significant change to the API that any clients need to know about."""
INVENTREE_API_TEXT = """ INVENTREE_API_TEXT = """
v235 - 2024-08-08 : https://github.com/inventree/InvenTree/pull/7837
- Adds "on_order" quantity to SalesOrderLineItem serializer
- Adds "building" quantity to SalesOrderLineItem serializer
v234 - 2024-08-08 : https://github.com/inventree/InvenTree/pull/7829
- Fixes bug in the plugin metadata endpoint
v233 - 2024-08-04 : https://github.com/inventree/InvenTree/pull/7807
- Adds new endpoints for managing state of build orders
- Adds new endpoints for managing state of purchase orders
- Adds new endpoints for managing state of sales orders
- Adds new endpoints for managing state of return orders
v232 - 2024-08-03 : https://github.com/inventree/InvenTree/pull/7793
- Allow ordering of SalesOrderShipment API by 'shipment_date' and 'delivery_date'
v231 - 2024-08-03 : https://github.com/inventree/InvenTree/pull/7794
- Optimize BuildItem and BuildLine serializers to improve API efficiency
v230 - 2024-05-05 : https://github.com/inventree/InvenTree/pull/7164
- Adds test statistics endpoint
v229 - 2024-07-31 : https://github.com/inventree/InvenTree/pull/7775
- Add extra exportable fields to the BomItem serializer
v228 - 2024-07-18 : https://github.com/inventree/InvenTree/pull/7684
- Adds "icon" field to the PartCategory.path and StockLocation.path API
- Adds icon packages API endpoint
v227 - 2024-07-19 : https://github.com/inventree/InvenTree/pull/7693/
- Adds endpoints to list and revoke the tokens issued to the current user
v226 - 2024-07-15 : https://github.com/inventree/InvenTree/pull/7648 v226 - 2024-07-15 : https://github.com/inventree/InvenTree/pull/7648
- Adds barcode generation API endpoint - Adds barcode generation API endpoint

View File

@ -15,6 +15,7 @@ from allauth.account.forms import LoginForm, SignupForm, set_form_field_order
from allauth.core.exceptions import ImmediateHttpResponse from allauth.core.exceptions import ImmediateHttpResponse
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
from allauth_2fa.adapter import OTPAdapter from allauth_2fa.adapter import OTPAdapter
from allauth_2fa.forms import TOTPDeviceForm
from allauth_2fa.utils import user_has_valid_totp_device from allauth_2fa.utils import user_has_valid_totp_device
from crispy_forms.bootstrap import AppendedText, PrependedAppendedText, PrependedText from crispy_forms.bootstrap import AppendedText, PrependedAppendedText, PrependedText
from crispy_forms.helper import FormHelper from crispy_forms.helper import FormHelper
@ -211,6 +212,16 @@ class CustomSignupForm(SignupForm):
return cleaned_data return cleaned_data
class CustomTOTPDeviceForm(TOTPDeviceForm):
"""Ensure that db registration is enabled."""
def __init__(self, user, metadata=None, **kwargs):
"""Override to check if registration is open."""
if not settings.MFA_ENABLED:
raise forms.ValidationError(_('MFA Registration is disabled.'))
super().__init__(user, metadata, **kwargs)
def registration_enabled(): def registration_enabled():
"""Determine whether user registration is enabled.""" """Determine whether user registration is enabled."""
if get_global_setting('LOGIN_ENABLE_REG') or InvenTree.sso.registration_enabled(): if get_global_setting('LOGIN_ENABLE_REG') or InvenTree.sso.registration_enabled():

View File

@ -0,0 +1,192 @@
"""Custom management command to migrate the old FontAwesome icons."""
import json
from django.core.exceptions import ValidationError
from django.core.management.base import BaseCommand, CommandError
from django.db import models
from common.icons import validate_icon
from part.models import PartCategory
from stock.models import StockLocation, StockLocationType
class Command(BaseCommand):
"""Generate an icon map from the FontAwesome library to the new icon library."""
help = """Helper command to migrate the old FontAwesome icons to the new icon library."""
def add_arguments(self, parser):
"""Add the arguments."""
parser.add_argument(
'--output-file',
type=str,
help='Path to file to write generated icon map to',
)
parser.add_argument(
'--input-file', type=str, help='Path to file to read icon map from'
)
parser.add_argument(
'--include-items',
default=False,
action='store_true',
help='Include referenced inventree items in the output icon map (optional)',
)
parser.add_argument(
'--import-now',
default=False,
action='store_true',
help='CAUTION: If this flag is set, the icon map will be imported and the database will be touched',
)
def handle(self, *args, **kwargs):
"""Generate an icon map from the FontAwesome library to the new icon library."""
# Check for invalid combinations of arguments
if kwargs['output_file'] and kwargs['input_file']:
raise CommandError('Cannot specify both --input-file and --output-file')
if not kwargs['output_file'] and not kwargs['input_file']:
raise CommandError('Must specify either --input-file or --output-file')
if kwargs['include_items'] and not kwargs['output_file']:
raise CommandError(
'--include-items can only be used with an --output-file specified'
)
if kwargs['output_file'] and kwargs['import_now']:
raise CommandError(
'--import-now can only be used with an --input-file specified'
)
ICON_MODELS = [
(StockLocation, 'custom_icon'),
(StockLocationType, 'icon'),
(PartCategory, '_icon'),
]
def get_model_items_with_icons(model: models.Model, icon_field: str):
"""Return a list of models with icon fields."""
return model.objects.exclude(**{f'{icon_field}__isnull': True}).exclude(**{
f'{icon_field}__exact': ''
})
# Generate output icon map file
if kwargs['output_file']:
icons = {}
for model, icon_name in ICON_MODELS:
self.stdout.write(
f'Processing model {model.__name__} with icon field {icon_name}'
)
items = get_model_items_with_icons(model, icon_name)
for item in items:
icon = getattr(item, icon_name)
try:
validate_icon(icon)
continue # Skip if the icon is already valid
except ValidationError:
pass
if icon not in icons:
icons[icon] = {
**({'items': []} if kwargs['include_items'] else {}),
'new_icon': '',
}
if kwargs['include_items']:
icons[icon]['items'].append({
'model': model.__name__.lower(),
'id': item.id, # type: ignore
})
self.stdout.write(f'Writing icon map for {len(icons.keys())} icons')
with open(kwargs['output_file'], 'w') as f:
json.dump(icons, f, indent=2)
self.stdout.write(f'Icon map written to {kwargs["output_file"]}')
# Import icon map file
if kwargs['input_file']:
with open(kwargs['input_file'], 'r') as f:
icons = json.load(f)
self.stdout.write(f'Loaded icon map for {len(icons.keys())} icons')
self.stdout.write('Verifying icon map')
has_errors = False
# Verify that all new icons are valid icons
for old_icon, data in icons.items():
try:
validate_icon(data.get('new_icon', ''))
except ValidationError:
self.stdout.write(
f'[ERR] Invalid icon: "{old_icon}" -> "{data.get("new_icon", "")}'
)
has_errors = True
# Verify that all required items are provided in the icon map
for model, icon_name in ICON_MODELS:
self.stdout.write(
f'Processing model {model.__name__} with icon field {icon_name}'
)
items = get_model_items_with_icons(model, icon_name)
for item in items:
icon = getattr(item, icon_name)
try:
validate_icon(icon)
continue # Skip if the icon is already valid
except ValidationError:
pass
if icon not in icons:
self.stdout.write(
f' [ERR] Icon "{icon}" not found in icon map'
)
has_errors = True
# If there are errors, stop here
if has_errors:
self.stdout.write(
'[ERR] Icon map has errors, please fix them before continuing with importing'
)
return
# Import the icon map into the database if the flag is set
if kwargs['import_now']:
self.stdout.write('Start importing icons and updating database...')
cnt = 0
for model, icon_name in ICON_MODELS:
self.stdout.write(
f'Processing model {model.__name__} with icon field {icon_name}'
)
items = get_model_items_with_icons(model, icon_name)
for item in items:
icon = getattr(item, icon_name)
try:
validate_icon(icon)
continue # Skip if the icon is already valid
except ValidationError:
pass
setattr(item, icon_name, icons[icon]['new_icon'])
cnt += 1
item.save()
self.stdout.write(
f'Icon map successfully imported - changed {cnt} items'
)
self.stdout.write('Icons are now migrated')
else:
self.stdout.write('Icon map is valid and ready to be imported')
self.stdout.write(
'Run the command with --import-now to import the icon map and update the database'
)

View File

@ -575,6 +575,9 @@ class InvenTreeTree(MetadataMixin, PluginValidationMixin, MPTTModel):
# e.g. for StockLocation, this value is 'location' # e.g. for StockLocation, this value is 'location'
ITEM_PARENT_KEY = None ITEM_PARENT_KEY = None
# Extra fields to include in the get_path result. E.g. icon
EXTRA_PATH_FIELDS = []
class Meta: class Meta:
"""Metaclass defines extra model properties.""" """Metaclass defines extra model properties."""
@ -868,7 +871,14 @@ class InvenTreeTree(MetadataMixin, PluginValidationMixin, MPTTModel):
name: <name>, name: <name>,
} }
""" """
return [{'pk': item.pk, 'name': item.name} for item in self.path] return [
{
'pk': item.pk,
'name': item.name,
**{k: getattr(item, k, None) for k in self.EXTRA_PATH_FIELDS},
}
for item in self.path
]
def __str__(self): def __str__(self):
"""String representation of a category is the full path to that category.""" """String representation of a category is the full path to that category."""

View File

@ -115,6 +115,7 @@ def canAppAccessDatabase(
'makemessages', 'makemessages',
'compilemessages', 'compilemessages',
'spectactular', 'spectactular',
'collectstatic',
] ]
if not allow_shell: if not allow_shell:
@ -125,7 +126,7 @@ def canAppAccessDatabase(
excluded_commands.append('test') excluded_commands.append('test')
if not allow_plugins: if not allow_plugins:
excluded_commands.extend(['collectstatic', 'collectplugins']) excluded_commands.extend(['collectplugins'])
for cmd in excluded_commands: for cmd in excluded_commands:
if cmd in sys.argv: if cmd in sys.argv:

View File

@ -1210,6 +1210,9 @@ ACCOUNT_FORMS = {
'reset_password_from_key': 'allauth.account.forms.ResetPasswordKeyForm', 'reset_password_from_key': 'allauth.account.forms.ResetPasswordKeyForm',
'disconnect': 'allauth.socialaccount.forms.DisconnectForm', 'disconnect': 'allauth.socialaccount.forms.DisconnectForm',
} }
ALLAUTH_2FA_FORMS = {'setup': 'InvenTree.forms.CustomTOTPDeviceForm'}
# Determine if multi-factor authentication is enabled for this server (default = True)
MFA_ENABLED = get_boolean_setting('INVENTREE_MFA_ENABLED', 'mfa_enabled', True)
SOCIALACCOUNT_ADAPTER = 'InvenTree.forms.CustomSocialAccountAdapter' SOCIALACCOUNT_ADAPTER = 'InvenTree.forms.CustomSocialAccountAdapter'
ACCOUNT_ADAPTER = 'InvenTree.forms.CustomAccountAdapter' ACCOUNT_ADAPTER = 'InvenTree.forms.CustomAccountAdapter'

View File

@ -3,6 +3,7 @@
import logging import logging
from importlib import import_module from importlib import import_module
from django.conf import settings
from django.urls import NoReverseMatch, include, path, reverse from django.urls import NoReverseMatch, include, path, reverse
from allauth.account.models import EmailAddress from allauth.account.models import EmailAddress
@ -177,7 +178,9 @@ class SocialProviderListView(ListAPI):
data = { data = {
'sso_enabled': InvenTree.sso.login_enabled(), 'sso_enabled': InvenTree.sso.login_enabled(),
'sso_registration': InvenTree.sso.registration_enabled(), 'sso_registration': InvenTree.sso.registration_enabled(),
'mfa_required': get_global_setting('LOGIN_ENFORCE_MFA'), 'mfa_required': settings.MFA_ENABLED
and get_global_setting('LOGIN_ENFORCE_MFA'),
'mfa_enabled': settings.MFA_ENABLED,
'providers': provider_list, 'providers': provider_list,
'registration_enabled': get_global_setting('LOGIN_ENABLE_REG'), 'registration_enabled': get_global_setting('LOGIN_ENABLE_REG'),
'password_forgotten_enabled': get_global_setting('LOGIN_ENABLE_PWD_FORGOT'), 'password_forgotten_enabled': get_global_setting('LOGIN_ENABLE_PWD_FORGOT'),

View File

@ -1101,3 +1101,19 @@ a {
.large-treeview-icon { .large-treeview-icon {
font-size: 1em; font-size: 1em;
} }
.api-icon {
font-style: normal;
font-weight: normal;
font-variant: normal;
text-transform: none;
line-height: 1;
/* Better font rendering */
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
.test-statistics-table-total-row {
font-weight: bold;
border-top-style: double;
}

View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020-2024 Paweł Kuna
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

File diff suppressed because one or more lines are too long

View File

@ -118,7 +118,7 @@ def check_daily_holdoff(task_name: str, n_days: int = 1) -> bool:
if last_success: if last_success:
threshold = datetime.now() - timedelta(days=n_days) threshold = datetime.now() - timedelta(days=n_days)
if last_success > threshold: if last_success.date() > threshold.date():
logger.info( logger.info(
"Last successful run for '%s' was too recent - skipping task", task_name "Last successful run for '%s' was too recent - skipping task", task_name
) )

View File

@ -35,6 +35,7 @@ from company.urls import company_urls, manufacturer_part_urls, supplier_part_url
from order.urls import order_urls from order.urls import order_urls
from part.urls import part_urls from part.urls import part_urls
from plugin.urls import get_plugin_urls from plugin.urls import get_plugin_urls
from stock.api import test_statistics_api_urls
from stock.urls import stock_urls from stock.urls import stock_urls
from web.urls import api_urls as web_api_urls from web.urls import api_urls as web_api_urls
from web.urls import urlpatterns as platform_urls from web.urls import urlpatterns as platform_urls
@ -109,6 +110,7 @@ apipatterns = [
), ),
]), ]),
), ),
path('test-statistics/', include(test_statistics_api_urls)),
path('user/', include(users.api.user_urls)), path('user/', include(users.api.user_urls)),
path('web/', include(web_api_urls)), path('web/', include(web_api_urls)),
# Plugin endpoints # Plugin endpoints

View File

@ -18,7 +18,7 @@ from django.conf import settings
from .api_version import INVENTREE_API_TEXT, INVENTREE_API_VERSION from .api_version import INVENTREE_API_TEXT, INVENTREE_API_VERSION
# InvenTree software version # InvenTree software version
INVENTREE_SW_VERSION = '0.16.0 dev' INVENTREE_SW_VERSION = '0.17.0 dev'
logger = logging.getLogger('inventree') logger = logging.getLogger('inventree')

View File

@ -359,6 +359,8 @@ class BuildLineList(BuildLineEndpoint, DataExportViewMixin, ListCreateAPI):
'unit_quantity', 'unit_quantity',
'available_stock', 'available_stock',
'trackable', 'trackable',
'allow_variants',
'inherited',
] ]
ordering_field_aliases = { ordering_field_aliases = {
@ -368,6 +370,8 @@ class BuildLineList(BuildLineEndpoint, DataExportViewMixin, ListCreateAPI):
'consumable': 'bom_item__consumable', 'consumable': 'bom_item__consumable',
'optional': 'bom_item__optional', 'optional': 'bom_item__optional',
'trackable': 'bom_item__sub_part__trackable', 'trackable': 'bom_item__sub_part__trackable',
'allow_variants': 'bom_item__allow_variants',
'inherited': 'bom_item__inherited',
} }
search_fields = [ search_fields = [
@ -466,9 +470,19 @@ class BuildFinish(BuildOrderContextMixin, CreateAPI):
"""API endpoint for marking a build as finished (completed).""" """API endpoint for marking a build as finished (completed)."""
queryset = Build.objects.none() queryset = Build.objects.none()
serializer_class = build.serializers.BuildCompleteSerializer serializer_class = build.serializers.BuildCompleteSerializer
def get_queryset(self):
"""Return the queryset for the BuildFinish API endpoint."""
queryset = super().get_queryset()
queryset = queryset.prefetch_related(
'build_lines',
'build_lines__allocations'
)
return queryset
class BuildAutoAllocate(BuildOrderContextMixin, CreateAPI): class BuildAutoAllocate(BuildOrderContextMixin, CreateAPI):
"""API endpoint for 'automatically' allocating stock against a build order. """API endpoint for 'automatically' allocating stock against a build order.
@ -480,7 +494,6 @@ class BuildAutoAllocate(BuildOrderContextMixin, CreateAPI):
""" """
queryset = Build.objects.none() queryset = Build.objects.none()
serializer_class = build.serializers.BuildAutoAllocationSerializer serializer_class = build.serializers.BuildAutoAllocationSerializer
@ -496,10 +509,22 @@ class BuildAllocate(BuildOrderContextMixin, CreateAPI):
""" """
queryset = Build.objects.none() queryset = Build.objects.none()
serializer_class = build.serializers.BuildAllocationSerializer serializer_class = build.serializers.BuildAllocationSerializer
class BuildIssue(BuildOrderContextMixin, CreateAPI):
"""API endpoint for issuing a BuildOrder."""
queryset = Build.objects.all()
serializer_class = build.serializers.BuildIssueSerializer
class BuildHold(BuildOrderContextMixin, CreateAPI):
"""API endpoint for placing a BuildOrder on hold."""
queryset = Build.objects.all()
serializer_class = build.serializers.BuildHoldSerializer
class BuildCancel(BuildOrderContextMixin, CreateAPI): class BuildCancel(BuildOrderContextMixin, CreateAPI):
"""API endpoint for cancelling a BuildOrder.""" """API endpoint for cancelling a BuildOrder."""
@ -571,19 +596,23 @@ class BuildItemList(DataExportViewMixin, BulkDeleteMixin, ListCreateAPI):
return self.serializer_class(*args, **kwargs) return self.serializer_class(*args, **kwargs)
def get_queryset(self): def get_queryset(self):
"""Override the queryset method, to allow filtering by stock_item.part.""" """Override the queryset method, to perform custom prefetch."""
queryset = super().get_queryset() queryset = super().get_queryset()
queryset = queryset.select_related( queryset = queryset.select_related(
'build_line', 'build_line',
'build_line__build', 'build_line__build',
'build_line__bom_item', 'build_line__bom_item',
'build_line__bom_item__part',
'build_line__bom_item__sub_part',
'install_into', 'install_into',
'stock_item', 'stock_item',
'stock_item__location', 'stock_item__location',
'stock_item__part', 'stock_item__part',
'stock_item__supplier_part', 'stock_item__supplier_part__part',
'stock_item__supplier_part__supplier',
'stock_item__supplier_part__manufacturer_part', 'stock_item__supplier_part__manufacturer_part',
'stock_item__supplier_part__manufacturer_part__manufacturer',
).prefetch_related( ).prefetch_related(
'stock_item__location__tags', 'stock_item__location__tags',
) )
@ -655,6 +684,8 @@ build_api_urls = [
path('create-output/', BuildOutputCreate.as_view(), name='api-build-output-create'), path('create-output/', BuildOutputCreate.as_view(), name='api-build-output-create'),
path('delete-outputs/', BuildOutputDelete.as_view(), name='api-build-output-delete'), path('delete-outputs/', BuildOutputDelete.as_view(), name='api-build-output-delete'),
path('scrap-outputs/', BuildOutputScrap.as_view(), name='api-build-output-scrap'), path('scrap-outputs/', BuildOutputScrap.as_view(), name='api-build-output-scrap'),
path('issue/', BuildIssue.as_view(), name='api-build-issue'),
path('hold/', BuildHold.as_view(), name='api-build-hold'),
path('finish/', BuildFinish.as_view(), name='api-build-finish'), path('finish/', BuildFinish.as_view(), name='api-build-finish'),
path('cancel/', BuildCancel.as_view(), name='api-build-cancel'), path('cancel/', BuildCancel.as_view(), name='api-build-cancel'),
path('unallocate/', BuildUnallocate.as_view(), name='api-build-unallocate'), path('unallocate/', BuildUnallocate.as_view(), name='api-build-unallocate'),

View File

@ -5,6 +5,8 @@ from django.db import migrations, models
import django.db.models.deletion import django.db.models.deletion
import mptt.fields import mptt.fields
from build.status_codes import BuildStatus
class Migration(migrations.Migration): class Migration(migrations.Migration):
@ -40,7 +42,7 @@ class Migration(migrations.Migration):
migrations.AlterField( migrations.AlterField(
model_name='build', model_name='build',
name='status', name='status',
field=models.PositiveIntegerField(choices=[(10, 'Pending'), (20, 'Production'), (30, 'Cancelled'), (40, 'Complete')], default=10, help_text='Build status code', validators=[django.core.validators.MinValueValidator(0)], verbose_name='Build Status'), field=models.PositiveIntegerField(choices=BuildStatus.items(), default=BuildStatus.PENDING.value, help_text='Build status code', validators=[django.core.validators.MinValueValidator(0)], verbose_name='Build Status'),
), ),
migrations.AlterField( migrations.AlterField(
model_name='build', model_name='build',

View File

@ -2,7 +2,6 @@
import decimal import decimal
import logging import logging
import os
from datetime import datetime from datetime import datetime
from django.conf import settings from django.conf import settings
@ -26,6 +25,7 @@ from build.status_codes import BuildStatus, BuildStatusGroups
from stock.status_codes import StockStatus, StockHistoryCode from stock.status_codes import StockStatus, StockHistoryCode
from build.validators import generate_next_build_reference, validate_build_order_reference from build.validators import generate_next_build_reference, validate_build_order_reference
from generic.states import StateTransitionMixin
import InvenTree.fields import InvenTree.fields
import InvenTree.helpers import InvenTree.helpers
@ -56,6 +56,7 @@ class Build(
InvenTree.models.MetadataMixin, InvenTree.models.MetadataMixin,
InvenTree.models.PluginValidationMixin, InvenTree.models.PluginValidationMixin,
InvenTree.models.ReferenceIndexingMixin, InvenTree.models.ReferenceIndexingMixin,
StateTransitionMixin,
MPTTModel): MPTTModel):
"""A Build object organises the creation of new StockItem objects from other existing StockItem objects. """A Build object organises the creation of new StockItem objects from other existing StockItem objects.
@ -574,6 +575,10 @@ class Build(
- Completed count must meet the required quantity - Completed count must meet the required quantity
- Untracked parts must be allocated - Untracked parts must be allocated
""" """
if self.status != BuildStatus.PRODUCTION.value:
return False
if self.incomplete_count > 0: if self.incomplete_count > 0:
return False return False
@ -602,8 +607,18 @@ class Build(
def complete_build(self, user, trim_allocated_stock=False): def complete_build(self, user, trim_allocated_stock=False):
"""Mark this build as complete.""" """Mark this build as complete."""
return self.handle_transition(
self.status, BuildStatus.COMPLETE.value, self, self._action_complete, user=user, trim_allocated_stock=trim_allocated_stock
)
def _action_complete(self, *args, **kwargs):
"""Action to be taken when a build is completed."""
import build.tasks import build.tasks
trim_allocated_stock = kwargs.pop('trim_allocated_stock', False)
user = kwargs.pop('user', None)
if self.incomplete_count > 0: if self.incomplete_count > 0:
return return
@ -665,6 +680,59 @@ class Build(
target_exclude=[user], target_exclude=[user],
) )
@transaction.atomic
def issue_build(self):
"""Mark the Build as IN PRODUCTION.
Args:
user: The user who is issuing the build
"""
return self.handle_transition(
self.status, BuildStatus.PENDING.value, self, self._action_issue
)
@property
def can_issue(self):
"""Returns True if this BuildOrder can be issued."""
return self.status in [
BuildStatus.PENDING.value,
BuildStatus.ON_HOLD.value,
]
def _action_issue(self, *args, **kwargs):
"""Perform the action to mark this order as PRODUCTION."""
if self.can_issue:
self.status = BuildStatus.PRODUCTION.value
self.save()
trigger_event('build.issued', id=self.pk)
@transaction.atomic
def hold_build(self):
"""Mark the Build as ON HOLD."""
return self.handle_transition(
self.status, BuildStatus.ON_HOLD.value, self, self._action_hold
)
@property
def can_hold(self):
"""Returns True if this BuildOrder can be placed on hold"""
return self.status in [
BuildStatus.PENDING.value,
BuildStatus.PRODUCTION.value,
]
def _action_hold(self, *args, **kwargs):
"""Action to be taken when a build is placed on hold."""
if self.can_hold:
self.status = BuildStatus.ON_HOLD.value
self.save()
trigger_event('build.hold', id=self.pk)
@transaction.atomic @transaction.atomic
def cancel_build(self, user, **kwargs): def cancel_build(self, user, **kwargs):
"""Mark the Build as CANCELLED. """Mark the Build as CANCELLED.
@ -674,8 +742,17 @@ class Build(
- Save the Build object - Save the Build object
""" """
return self.handle_transition(
self.status, BuildStatus.CANCELLED.value, self, self._action_cancel, user=user, **kwargs
)
def _action_cancel(self, *args, **kwargs):
"""Action to be taken when a build is cancelled."""
import build.tasks import build.tasks
user = kwargs.pop('user', None)
remove_allocated_stock = kwargs.get('remove_allocated_stock', False) remove_allocated_stock = kwargs.get('remove_allocated_stock', False)
remove_incomplete_outputs = kwargs.get('remove_incomplete_outputs', False) remove_incomplete_outputs = kwargs.get('remove_incomplete_outputs', False)
@ -1276,7 +1353,7 @@ class Build(
@property @property
def is_complete(self): def is_complete(self):
"""Returns True if the build status is COMPLETE.""" """Returns True if the build status is COMPLETE."""
return self.status == BuildStatus.COMPLETE return self.status == BuildStatus.COMPLETE.value
@transaction.atomic @transaction.atomic
def create_build_line_items(self, prevent_duplicates=True): def create_build_line_items(self, prevent_duplicates=True):

View File

@ -23,7 +23,7 @@ from stock.status_codes import StockStatus
from stock.generators import generate_batch_code from stock.generators import generate_batch_code
from stock.models import StockItem, StockLocation from stock.models import StockItem, StockLocation
from stock.serializers import StockItemSerializerBrief, LocationSerializer from stock.serializers import StockItemSerializerBrief, LocationBriefSerializer
import common.models import common.models
from common.serializers import ProjectCodeSerializer from common.serializers import ProjectCodeSerializer
@ -34,6 +34,7 @@ import part.serializers as part_serializers
from users.serializers import OwnerSerializer from users.serializers import OwnerSerializer
from .models import Build, BuildLine, BuildItem from .models import Build, BuildLine, BuildItem
from .status_codes import BuildStatus
class BuildSerializer(NotesFieldMixin, DataImportExportSerializerMixin, InvenTreeModelSerializer): class BuildSerializer(NotesFieldMixin, DataImportExportSerializerMixin, InvenTreeModelSerializer):
@ -597,6 +598,33 @@ class BuildOutputCompleteSerializer(serializers.Serializer):
) )
class BuildIssueSerializer(serializers.Serializer):
"""DRF serializer for issuing a build order."""
class Meta:
"""Serializer metaclass"""
fields = []
def save(self):
"""Issue the specified build order"""
build = self.context['build']
build.issue_build()
class BuildHoldSerializer(serializers.Serializer):
"""DRF serializer for placing a BuildOrder on hold."""
class Meta:
"""Serializer metaclass."""
fields = []
def save(self):
"""Place the specified build on hold."""
build = self.context['build']
build.hold_build()
class BuildCancelSerializer(serializers.Serializer): class BuildCancelSerializer(serializers.Serializer):
"""DRF serializer class for cancelling an active BuildOrder""" """DRF serializer class for cancelling an active BuildOrder"""
@ -737,6 +765,9 @@ class BuildCompleteSerializer(serializers.Serializer):
"""Perform validation of this serializer prior to saving""" """Perform validation of this serializer prior to saving"""
build = self.context['build'] build = self.context['build']
if build.status != BuildStatus.PRODUCTION.value:
raise ValidationError(_("Build order must be in production state"))
if build.incomplete_count > 0: if build.incomplete_count > 0:
raise ValidationError(_("Build order has incomplete outputs")) raise ValidationError(_("Build order has incomplete outputs"))
@ -1064,6 +1095,8 @@ class BuildItemSerializer(DataImportExportSerializerMixin, InvenTreeModelSeriali
# These fields are only used for data export # These fields are only used for data export
export_only_fields = [ export_only_fields = [
'bom_part_id',
'bom_part_name',
'build_reference', 'build_reference',
'sku', 'sku',
'mpn', 'mpn',
@ -1071,9 +1104,11 @@ class BuildItemSerializer(DataImportExportSerializerMixin, InvenTreeModelSeriali
'part_id', 'part_id',
'part_name', 'part_name',
'part_ipn', 'part_ipn',
'part_description',
'available_quantity', 'available_quantity',
'item_batch_code', 'item_batch_code',
'item_serial', 'item_serial',
'item_packaging',
] ]
class Meta: class Meta:
@ -1097,6 +1132,8 @@ class BuildItemSerializer(DataImportExportSerializerMixin, InvenTreeModelSeriali
# The following fields are only used for data export # The following fields are only used for data export
'bom_reference', 'bom_reference',
'bom_part_id',
'bom_part_name',
'build_reference', 'build_reference',
'location_name', 'location_name',
'mpn', 'mpn',
@ -1104,9 +1141,11 @@ class BuildItemSerializer(DataImportExportSerializerMixin, InvenTreeModelSeriali
'part_id', 'part_id',
'part_name', 'part_name',
'part_ipn', 'part_ipn',
'part_description',
'available_quantity', 'available_quantity',
'item_batch_code', 'item_batch_code',
'item_serial_number', 'item_serial_number',
'item_packaging',
] ]
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
@ -1136,11 +1175,17 @@ class BuildItemSerializer(DataImportExportSerializerMixin, InvenTreeModelSeriali
location_name = serializers.CharField(source='stock_item.location.name', label=_('Location Name'), read_only=True) location_name = serializers.CharField(source='stock_item.location.name', label=_('Location Name'), read_only=True)
build_reference = serializers.CharField(source='build.reference', label=_('Build Reference'), read_only=True) build_reference = serializers.CharField(source='build.reference', label=_('Build Reference'), read_only=True)
bom_reference = serializers.CharField(source='build_line.bom_item.reference', label=_('BOM Reference'), read_only=True) bom_reference = serializers.CharField(source='build_line.bom_item.reference', label=_('BOM Reference'), read_only=True)
item_packaging = serializers.CharField(source='stock_item.packaging', label=_('Packaging'), read_only=True)
# Part detail fields # Part detail fields
part_id = serializers.PrimaryKeyRelatedField(source='stock_item.part', label=_('Part ID'), many=False, read_only=True) part_id = serializers.PrimaryKeyRelatedField(source='stock_item.part', label=_('Part ID'), many=False, read_only=True)
part_name = serializers.CharField(source='stock_item.part.name', label=_('Part Name'), read_only=True) part_name = serializers.CharField(source='stock_item.part.name', label=_('Part Name'), read_only=True)
part_ipn = serializers.CharField(source='stock_item.part.IPN', label=_('Part IPN'), read_only=True) part_ipn = serializers.CharField(source='stock_item.part.IPN', label=_('Part IPN'), read_only=True)
part_description = serializers.CharField(source='stock_item.part.description', label=_('Part Description'), read_only=True)
# BOM Item Part ID (it may be different to the allocated part)
bom_part_id = serializers.PrimaryKeyRelatedField(source='build_line.bom_item.sub_part', label=_('BOM Part ID'), many=False, read_only=True)
bom_part_name = serializers.CharField(source='build_line.bom_item.sub_part.name', label=_('BOM Part Name'), read_only=True)
item_batch_code = serializers.CharField(source='stock_item.batch', label=_('Batch Code'), read_only=True) item_batch_code = serializers.CharField(source='stock_item.batch', label=_('Batch Code'), read_only=True)
item_serial_number = serializers.CharField(source='stock_item.serial', label=_('Serial Number'), read_only=True) item_serial_number = serializers.CharField(source='stock_item.serial', label=_('Serial Number'), read_only=True)
@ -1152,9 +1197,9 @@ class BuildItemSerializer(DataImportExportSerializerMixin, InvenTreeModelSeriali
part_detail = part_serializers.PartBriefSerializer(source='stock_item.part', many=False, read_only=True, pricing=False) part_detail = part_serializers.PartBriefSerializer(source='stock_item.part', many=False, read_only=True, pricing=False)
stock_item_detail = StockItemSerializerBrief(source='stock_item', read_only=True) stock_item_detail = StockItemSerializerBrief(source='stock_item', read_only=True)
location = serializers.PrimaryKeyRelatedField(source='stock_item.location', many=False, read_only=True) location = serializers.PrimaryKeyRelatedField(source='stock_item.location', many=False, read_only=True)
location_detail = LocationSerializer(source='stock_item.location', read_only=True) location_detail = LocationBriefSerializer(source='stock_item.location', read_only=True)
build_detail = BuildSerializer(source='build_line.build', many=False, read_only=True) build_detail = BuildSerializer(source='build_line.build', many=False, read_only=True)
supplier_part_detail = company.serializers.SupplierPartSerializer(source='stock_item.supplier_part', many=False, read_only=True) supplier_part_detail = company.serializers.SupplierPartSerializer(source='stock_item.supplier_part', many=False, read_only=True, brief=True)
quantity = InvenTreeDecimalField(label=_('Allocated Quantity')) quantity = InvenTreeDecimalField(label=_('Allocated Quantity'))
available_quantity = InvenTreeDecimalField(source='stock_item.quantity', read_only=True, label=_('Available Quantity')) available_quantity = InvenTreeDecimalField(source='stock_item.quantity', read_only=True, label=_('Available Quantity'))
@ -1243,7 +1288,7 @@ class BuildLineSerializer(DataImportExportSerializerMixin, InvenTreeModelSeriali
# Foreign key fields # Foreign key fields
bom_item_detail = part_serializers.BomItemSerializer(source='bom_item', many=False, read_only=True, pricing=False) bom_item_detail = part_serializers.BomItemSerializer(source='bom_item', many=False, read_only=True, pricing=False)
part_detail = part_serializers.PartSerializer(source='bom_item.sub_part', many=False, read_only=True, pricing=False) part_detail = part_serializers.PartBriefSerializer(source='bom_item.sub_part', many=False, read_only=True, pricing=False)
allocations = BuildItemSerializer(many=True, read_only=True) allocations = BuildItemSerializer(many=True, read_only=True)
# Annotated (calculated) fields # Annotated (calculated) fields
@ -1289,16 +1334,20 @@ class BuildLineSerializer(DataImportExportSerializerMixin, InvenTreeModelSeriali
""" """
queryset = queryset.select_related( queryset = queryset.select_related(
'build', 'bom_item', 'build',
'bom_item',
'bom_item__part',
'bom_item__part__pricing_data',
'bom_item__sub_part',
'bom_item__sub_part__pricing_data',
) )
# Pre-fetch related fields # Pre-fetch related fields
queryset = queryset.prefetch_related( queryset = queryset.prefetch_related(
'bom_item__sub_part', 'bom_item__sub_part__tags',
'bom_item__sub_part__stock_items', 'bom_item__sub_part__stock_items',
'bom_item__sub_part__stock_items__allocations', 'bom_item__sub_part__stock_items__allocations',
'bom_item__sub_part__stock_items__sales_order_allocations', 'bom_item__sub_part__stock_items__sales_order_allocations',
'bom_item__sub_part__tags',
'bom_item__substitutes', 'bom_item__substitutes',
'bom_item__substitutes__part__stock_items', 'bom_item__substitutes__part__stock_items',
@ -1310,6 +1359,11 @@ class BuildLineSerializer(DataImportExportSerializerMixin, InvenTreeModelSeriali
'allocations__stock_item__part', 'allocations__stock_item__part',
'allocations__stock_item__location', 'allocations__stock_item__location',
'allocations__stock_item__location__tags', 'allocations__stock_item__location__tags',
'allocations__stock_item__supplier_part',
'allocations__stock_item__supplier_part__part',
'allocations__stock_item__supplier_part__supplier',
'allocations__stock_item__supplier_part__manufacturer_part',
'allocations__stock_item__supplier_part__manufacturer_part__manufacturer',
) )
# Annotate the "allocated" quantity # Annotate the "allocated" quantity

View File

@ -9,7 +9,8 @@ class BuildStatus(StatusCode):
"""Build status codes.""" """Build status codes."""
PENDING = 10, _('Pending'), 'secondary' # Build is pending / active PENDING = 10, _('Pending'), 'secondary' # Build is pending / active
PRODUCTION = 20, _('Production'), 'primary' # BuildOrder is in production PRODUCTION = 20, _('Production'), 'primary' # Build is in production
ON_HOLD = 25, _('On Hold'), 'warning' # Build is on hold
CANCELLED = 30, _('Cancelled'), 'danger' # Build was cancelled CANCELLED = 30, _('Cancelled'), 'danger' # Build was cancelled
COMPLETE = 40, _('Complete'), 'success' # Build is complete COMPLETE = 40, _('Complete'), 'success' # Build is complete
@ -17,4 +18,8 @@ class BuildStatus(StatusCode):
class BuildStatusGroups: class BuildStatusGroups:
"""Groups for BuildStatus codes.""" """Groups for BuildStatus codes."""
ACTIVE_CODES = [BuildStatus.PENDING.value, BuildStatus.PRODUCTION.value] ACTIVE_CODES = [
BuildStatus.PENDING.value,
BuildStatus.ON_HOLD.value,
BuildStatus.PRODUCTION.value,
]

View File

@ -69,22 +69,30 @@ src="{% static 'img/blank_image.png' %}"
</button> </button>
<ul class='dropdown-menu' role='menu'> <ul class='dropdown-menu' role='menu'>
<li><a class='dropdown-item' href='#' id='build-edit'><span class='fas fa-edit icon-green'></span> {% trans "Edit Build" %}</a></li> <li><a class='dropdown-item' href='#' id='build-edit'><span class='fas fa-edit icon-green'></span> {% trans "Edit Build" %}</a></li>
{% if build.is_active %}
<li><a class='dropdown-item' href='#' id='build-cancel'><span class='fas fa-times-circle icon-red'></span> {% trans "Cancel Build" %}</a></li>
{% endif %}
{% if roles.build.add %} {% if roles.build.add %}
<li><a class='dropdown-item' href='#' id='build-duplicate'><span class='fas fa-clone'></span> {% trans "Duplicate Build" %}</a></li> <li><a class='dropdown-item' href='#' id='build-duplicate'><span class='fas fa-clone'></span> {% trans "Duplicate Build" %}</a></li>
{% endif %} {% endif %}
{% if build.can_hold %}
<li><a class='dropdown-item' href='#' id='build-hold'><span class='fas fa-hand-paper icon-yellow'></span> {% trans "Hold Build" %}</a></li>
{% endif %}
{% if build.is_active %}
<li><a class='dropdown-item' href='#' id='build-cancel'><span class='fas fa-times-circle icon-red'></span> {% trans "Cancel Build" %}</a></li>
{% endif %}
{% if build.status == BuildStatus.CANCELLED and roles.build.delete %} {% if build.status == BuildStatus.CANCELLED and roles.build.delete %}
<li><a class='dropdown-item' href='#' id='build-delete'><span class='fas fa-trash-alt icon-red'></span> {% trans "Delete Build" %}</a> <li><a class='dropdown-item' href='#' id='build-delete'><span class='fas fa-trash-alt icon-red'></span> {% trans "Delete Build" %}</a>
{% endif %} {% endif %}
</ul> </ul>
</div> </div>
{% if build.active %} {% if build.can_issue %}
<button id='build-issue' title='{% trans "Isueue Build" %}' class='btn btn-primary'>
<span class='fas fa-paper-plane'></span> {% trans "Issue Build" %}
</button>
{% elif build.active %}
<button id='build-complete' title='{% trans "Complete Build" %}' class='btn btn-success'> <button id='build-complete' title='{% trans "Complete Build" %}' class='btn btn-success'>
<span class='fas fa-check-circle'></span> {% trans "Complete Build" %} <span class='fas fa-check-circle'></span> {% trans "Complete Build" %}
</button> </button>
{% endif %} {% endif %}
{% endif %} {% endif %}
{% endblock actions %} {% endblock actions %}
@ -244,6 +252,31 @@ src="{% static 'img/blank_image.png' %}"
); );
}); });
$('#build-hold').click(function() {
holdOrder(
'{% url "api-build-hold" build.pk %}',
{
reload: true,
}
);
});
$('#build-issue').click(function() {
constructForm('{% url "api-build-issue" build.pk %}', {
method: 'POST',
title: '{% trans "Issue Build Order" %}',
confirm: true,
preFormContent: `
<div class='alert alert-block alert-info'>
{% trans "Issue this Build Order?" %}
</div>
`,
onSuccess: function(response) {
window.location.reload();
}
});
});
$("#build-complete").on('click', function() { $("#build-complete").on('click', function() {
completeBuildOrder({{ build.pk }}); completeBuildOrder({{ build.pk }});
}); });
@ -298,6 +331,12 @@ src="{% static 'img/blank_image.png' %}"
build: {{ build.pk }}, build: {{ build.pk }},
}); });
}); });
{% if build.part.trackable > 0 %}
onPanelLoad("test-statistics", function() {
prepareTestStatisticsTable('build', '{% url "api-test-statistics-by-build" build.pk %}')
});
{% endif %}
{% endif %} {% endif %}
{% endif %} {% endif %}

View File

@ -267,6 +267,21 @@
</div> </div>
</div> </div>
<div class='panel panel-hidden' id='panel-test-statistics'>
<div class='panel-heading'>
<h4>
{% trans "Build test statistics" %}
</h4>
</div>
<div class='panel-content'>
<div id='teststatistics-button-toolbar'>
{% include "filter_list.html" with id="buildteststatistics" %}
</div>
{% include "test_statistics_table.html" with prefix="build-" %}
</div>
</div>
<div class='panel panel-hidden' id='panel-attachments'> <div class='panel panel-hidden' id='panel-attachments'>
<div class='panel-heading'> <div class='panel-heading'>
<div class='d-flex flex-wrap'> <div class='d-flex flex-wrap'>

View File

@ -20,6 +20,10 @@
{% include "sidebar_item.html" with label='consumed' text=text icon="fa-tasks" %} {% include "sidebar_item.html" with label='consumed' text=text icon="fa-tasks" %}
{% trans "Child Build Orders" as text %} {% trans "Child Build Orders" as text %}
{% include "sidebar_item.html" with label='children' text=text icon="fa-sitemap" %} {% include "sidebar_item.html" with label='children' text=text icon="fa-sitemap" %}
{% if build.part.trackable %}
{% trans "Test Statistics" as text %}
{% include "sidebar_item.html" with label='test-statistics' text=text icon="fa-chart-line" %}
{% endif %}
{% trans "Attachments" as text %} {% trans "Attachments" as text %}
{% include "sidebar_item.html" with label='attachments' text=text icon="fa-paperclip" %} {% include "sidebar_item.html" with label='attachments' text=text icon="fa-paperclip" %}
{% trans "Notes" as text %} {% trans "Notes" as text %}

View File

@ -1015,7 +1015,7 @@ class BuildOverallocationTest(BuildAPITest):
'accept_overallocated': 'trim', 'accept_overallocated': 'trim',
}, },
expected_code=201, expected_code=201,
max_query_count=550, # TODO: Come back and refactor this max_query_count=555, # TODO: Come back and refactor this
) )
self.build.refresh_from_db() self.build.refresh_from_db()

View File

@ -15,6 +15,7 @@ import common.models
from common.settings import set_global_setting from common.settings import set_global_setting
import build.tasks import build.tasks
from build.models import Build, BuildItem, BuildLine, generate_next_build_reference from build.models import Build, BuildItem, BuildLine, generate_next_build_reference
from build.status_codes import BuildStatus
from part.models import Part, BomItem, BomItemSubstitute, PartTestTemplate from part.models import Part, BomItem, BomItemSubstitute, PartTestTemplate
from stock.models import StockItem, StockItemTestResult from stock.models import StockItem, StockItemTestResult
from users.models import Owner from users.models import Owner
@ -175,6 +176,7 @@ class BuildTestBase(TestCase):
part=cls.assembly, part=cls.assembly,
quantity=10, quantity=10,
issued_by=get_user_model().objects.get(pk=1), issued_by=get_user_model().objects.get(pk=1),
status=BuildStatus.PENDING,
) )
# Create some BuildLine items we can use later on # Create some BuildLine items we can use later on
@ -321,6 +323,10 @@ class BuildTest(BuildTestBase):
# Build is PENDING # Build is PENDING
self.assertEqual(self.build.status, status.BuildStatus.PENDING) self.assertEqual(self.build.status, status.BuildStatus.PENDING)
self.assertTrue(self.build.is_active)
self.assertTrue(self.build.can_hold)
self.assertTrue(self.build.can_issue)
# Build has two build outputs # Build has two build outputs
self.assertEqual(self.build.output_count, 2) self.assertEqual(self.build.output_count, 2)
@ -470,6 +476,11 @@ class BuildTest(BuildTestBase):
def test_overallocation_and_trim(self): def test_overallocation_and_trim(self):
"""Test overallocation of stock and trim function""" """Test overallocation of stock and trim function"""
self.assertEqual(self.build.status, status.BuildStatus.PENDING)
self.build.issue_build()
self.assertEqual(self.build.status, status.BuildStatus.PRODUCTION)
# Fully allocate tracked stock (not eligible for trimming) # Fully allocate tracked stock (not eligible for trimming)
self.allocate_stock( self.allocate_stock(
self.output_1, self.output_1,
@ -516,6 +527,7 @@ class BuildTest(BuildTestBase):
self.build.complete_build_output(self.output_1, None) self.build.complete_build_output(self.output_1, None)
self.build.complete_build_output(self.output_2, None) self.build.complete_build_output(self.output_2, None)
self.assertTrue(self.build.can_complete) self.assertTrue(self.build.can_complete)
n = StockItem.objects.filter(consumed_by=self.build).count() n = StockItem.objects.filter(consumed_by=self.build).count()
@ -583,6 +595,8 @@ class BuildTest(BuildTestBase):
self.stock_2_1.quantity = 30 self.stock_2_1.quantity = 30
self.stock_2_1.save() self.stock_2_1.save()
self.build.issue_build()
# Allocate non-tracked parts # Allocate non-tracked parts
self.allocate_stock( self.allocate_stock(
None, None,

View File

@ -9,6 +9,7 @@ from django.http.response import HttpResponse
from django.urls import include, path, re_path from django.urls import include, path, re_path
from django.utils.decorators import method_decorator from django.utils.decorators import method_decorator
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from django.views.decorators.cache import cache_control
from django.views.decorators.csrf import csrf_exempt from django.views.decorators.csrf import csrf_exempt
import django_q.models import django_q.models
@ -25,6 +26,7 @@ from rest_framework.views import APIView
import common.models import common.models
import common.serializers import common.serializers
from common.icons import get_icon_packs
from common.settings import get_global_setting from common.settings import get_global_setting
from generic.states.api import AllStatusViews, StatusView from generic.states.api import AllStatusViews, StatusView
from importer.mixins import DataExportViewMixin from importer.mixins import DataExportViewMixin
@ -743,6 +745,18 @@ class AttachmentDetail(RetrieveUpdateDestroyAPI):
return super().destroy(request, *args, **kwargs) return super().destroy(request, *args, **kwargs)
@method_decorator(cache_control(public=True, max_age=86400), name='dispatch')
class IconList(ListAPI):
"""List view for available icon packages."""
serializer_class = common.serializers.IconPackageSerializer
permission_classes = [permissions.AllowAny]
def get_queryset(self):
"""Return a list of all available icon packages."""
return get_icon_packs().values()
settings_api_urls = [ settings_api_urls = [
# User settings # User settings
path( path(
@ -957,6 +971,8 @@ common_api_urls = [
path('', ContentTypeList.as_view(), name='api-contenttype-list'), path('', ContentTypeList.as_view(), name='api-contenttype-list'),
]), ]),
), ),
# Icons
path('icons/', IconList.as_view(), name='api-icon-list'),
] ]
admin_api_urls = [ admin_api_urls = [

View File

@ -0,0 +1,114 @@
"""Icon utilities for InvenTree."""
import json
import logging
from dataclasses import dataclass
from pathlib import Path
from typing import TypedDict
from django.core.exceptions import ValidationError
from django.templatetags.static import static
logger = logging.getLogger('inventree')
_icon_packs = None
class Icon(TypedDict):
"""Dict type for an icon.
Attributes:
name: The name of the icon.
category: The category of the icon.
tags: A list of tags for the icon (used for search).
variants: A dictionary of variants for the icon, where the key is the variant name and the value is the variant's unicode hex character.
"""
name: str
category: str
tags: list[str]
variants: dict[str, str]
@dataclass
class IconPack:
"""Dataclass for an icon pack.
Attributes:
name: The name of the icon pack.
prefix: The prefix used for the icon pack.
fonts: A dictionary of different font file formats for the icon pack, where the key is the css format and the value a path to the font file.
icons: A dictionary of icons in the icon pack, where the key is the icon name and the value is a dictionary of the icon's variants.
"""
name: str
prefix: str
fonts: dict[str, str]
icons: dict[str, Icon]
def get_icon_packs():
"""Return a dictionary of available icon packs including their icons."""
global _icon_packs
if _icon_packs is None:
tabler_icons_path = Path(__file__).parent.parent.joinpath(
'InvenTree/static/tabler-icons/icons.json'
)
with open(tabler_icons_path, 'r') as tabler_icons_file:
tabler_icons = json.load(tabler_icons_file)
icon_packs = [
IconPack(
name='Tabler Icons',
prefix='ti',
fonts={
'woff2': static('tabler-icons/tabler-icons.woff2'),
'woff': static('tabler-icons/tabler-icons.woff'),
'truetype': static('tabler-icons/tabler-icons.ttf'),
},
icons=tabler_icons,
)
]
from plugin import registry
for plugin in registry.with_mixin('icon_pack', active=True):
try:
icon_packs.extend(plugin.icon_packs())
except Exception as e:
logger.warning('Error loading icon pack from plugin %s: %s', plugin, e)
_icon_packs = {pack.prefix: pack for pack in icon_packs}
return _icon_packs
def reload_icon_packs():
"""Reload the icon packs."""
global _icon_packs
_icon_packs = None
get_icon_packs()
def validate_icon(icon: str):
"""Validate an icon string in the format pack:name:variant."""
try:
pack, name, variant = icon.split(':')
except ValueError:
raise ValidationError(
f'Invalid icon format: {icon}, expected: pack:name:variant'
)
packs = get_icon_packs()
if pack not in packs:
raise ValidationError(f'Invalid icon pack: {pack}')
if name not in packs[pack].icons:
raise ValidationError(f'Invalid icon name: {name}')
if variant not in packs[pack].icons[name]['variants']:
raise ValidationError(f'Invalid icon variant: {variant}')
return packs[pack], packs[pack].icons[name], variant

View File

@ -1558,6 +1558,7 @@ class InvenTreeSetting(BaseInvenTreeSetting):
'name': _('Part Category Default Icon'), 'name': _('Part Category Default Icon'),
'description': _('Part category default icon (empty means no icon)'), 'description': _('Part category default icon (empty means no icon)'),
'default': '', 'default': '',
'validator': common.validators.validate_icon,
}, },
'PART_PARAMETER_ENFORCE_UNITS': { 'PART_PARAMETER_ENFORCE_UNITS': {
'name': _('Enforce Parameter Units'), 'name': _('Enforce Parameter Units'),
@ -1779,6 +1780,7 @@ class InvenTreeSetting(BaseInvenTreeSetting):
'name': _('Stock Location Default Icon'), 'name': _('Stock Location Default Icon'),
'description': _('Stock location default icon (empty means no icon)'), 'description': _('Stock location default icon (empty means no icon)'),
'default': '', 'default': '',
'validator': common.validators.validate_icon,
}, },
'STOCK_SHOW_INSTALLED_ITEMS': { 'STOCK_SHOW_INSTALLED_ITEMS': {
'name': _('Show Installed Stock Items'), 'name': _('Show Installed Stock Items'),

View File

@ -565,3 +565,21 @@ class AttachmentSerializer(InvenTreeModelSerializer):
) )
return super().save() return super().save()
class IconSerializer(serializers.Serializer):
"""Serializer for an icon."""
name = serializers.CharField()
category = serializers.CharField()
tags = serializers.ListField(child=serializers.CharField())
variants = serializers.DictField(child=serializers.CharField())
class IconPackageSerializer(serializers.Serializer):
"""Serializer for a list of icons."""
name = serializers.CharField()
prefix = serializers.CharField()
fonts = serializers.DictField(child=serializers.CharField())
icons = serializers.DictField(child=IconSerializer())

View File

@ -20,6 +20,7 @@ from django.urls import reverse
import PIL import PIL
import common.validators
from common.settings import get_global_setting, set_global_setting from common.settings import get_global_setting, set_global_setting
from InvenTree.helpers import str2bool from InvenTree.helpers import str2bool
from InvenTree.unit_test import InvenTreeAPITestCase, InvenTreeTestCase, PluginMixin from InvenTree.unit_test import InvenTreeAPITestCase, InvenTreeTestCase, PluginMixin
@ -1524,3 +1525,44 @@ class ContentTypeAPITest(InvenTreeAPITestCase):
reverse('api-contenttype-detail-modelname', kwargs={'model': None}), reverse('api-contenttype-detail-modelname', kwargs={'model': None}),
expected_code=404, expected_code=404,
) )
class IconAPITest(InvenTreeAPITestCase):
"""Unit tests for the Icons API."""
def test_list(self):
"""Test API list functionality."""
response = self.get(reverse('api-icon-list'), expected_code=200)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['prefix'], 'ti')
self.assertEqual(response.data[0]['name'], 'Tabler Icons')
for font_format in ['woff2', 'woff', 'truetype']:
self.assertIn(font_format, response.data[0]['fonts'])
self.assertGreater(len(response.data[0]['icons']), 1000)
class ValidatorsTest(TestCase):
"""Unit tests for the custom validators."""
def test_validate_icon(self):
"""Test the validate_icon function."""
common.validators.validate_icon('')
common.validators.validate_icon(None)
with self.assertRaises(ValidationError):
common.validators.validate_icon('invalid')
with self.assertRaises(ValidationError):
common.validators.validate_icon('my:package:non-existing')
with self.assertRaises(ValidationError):
common.validators.validate_icon(
'ti:my-non-existing-icon:non-existing-variant'
)
with self.assertRaises(ValidationError):
common.validators.validate_icon('ti:package:non-existing-variant')
common.validators.validate_icon('ti:package:outline')

View File

@ -1,10 +1,12 @@
"""Validation helpers for common models.""" """Validation helpers for common models."""
import re import re
from typing import Union
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
import common.icons
from common.settings import get_global_setting from common.settings import get_global_setting
@ -103,3 +105,11 @@ def validate_email_domains(setting):
raise ValidationError(_('An empty domain is not allowed.')) raise ValidationError(_('An empty domain is not allowed.'))
if not re.match(r'^@[a-zA-Z0-9\.\-_]+$', domain): if not re.match(r'^@[a-zA-Z0-9\.\-_]+$', domain):
raise ValidationError(_(f'Invalid domain name: {domain}')) raise ValidationError(_(f'Invalid domain name: {domain}'))
def validate_icon(name: Union[str, None]):
"""Validate the provided icon name, and ignore if empty."""
if name == '' or name is None:
return
common.icons.validate_icon(name)

View File

@ -381,9 +381,14 @@ class SupplierPartSerializer(
self.fields.pop('manufacturer_detail', None) self.fields.pop('manufacturer_detail', None)
self.fields.pop('manufacturer_part_detail', None) self.fields.pop('manufacturer_part_detail', None)
if prettify is not True: if brief or prettify is not True:
self.fields.pop('pretty_name', None) self.fields.pop('pretty_name', None)
if brief:
self.fields.pop('tags')
self.fields.pop('available')
self.fields.pop('availability_updated')
# Annotated field showing total in-stock quantity # Annotated field showing total in-stock quantity
in_stock = serializers.FloatField(read_only=True, label=_('In Stock')) in_stock = serializers.FloatField(read_only=True, label=_('In Stock'))

View File

@ -16,11 +16,26 @@ class BaseEnum(enum.IntEnum):
obj._value_ = args[0] obj._value_ = args[0]
return obj return obj
def __int__(self):
"""Return an integer representation of the value."""
return self.value
def __str__(self):
"""Return a string representation of the value."""
return str(self.value)
def __eq__(self, obj): def __eq__(self, obj):
"""Override equality operator to allow comparison with int.""" """Override equality operator to allow comparison with int."""
if type(self) is type(obj): if type(obj) is int:
return super().__eq__(obj) return self.value == obj
return self.value == obj
if isinstance(obj, BaseEnum):
return self.value == obj.value
if hasattr(obj, 'value'):
return self.value == obj.value
return super().__eq__(obj)
def __ne__(self, obj): def __ne__(self, obj):
"""Override inequality operator to allow comparison with int.""" """Override inequality operator to allow comparison with int."""

View File

@ -47,6 +47,8 @@ class DataImporterModelList(APIView):
class DataImportSessionList(BulkDeleteMixin, ListCreateAPI): class DataImportSessionList(BulkDeleteMixin, ListCreateAPI):
"""API endpoint for accessing a list of DataImportSession objects.""" """API endpoint for accessing a list of DataImportSession objects."""
permission_classes = [permissions.IsAuthenticated]
queryset = importer.models.DataImportSession.objects.all() queryset = importer.models.DataImportSession.objects.all()
serializer_class = importer.serializers.DataImportSessionSerializer serializer_class = importer.serializers.DataImportSessionSerializer

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More