Merge branch 'master' of https://github.com/inventree/InvenTree into add-changelog

This commit is contained in:
Matthias Mair 2024-04-03 19:00:59 +02:00
commit 4e55838fa9
No known key found for this signature in database
GPG Key ID: A593429DDA23B66A
3465 changed files with 514163 additions and 512350 deletions

View File

@ -1,33 +0,0 @@
version = 1
exclude_patterns = [
"docs/docs/javascripts/**", # Docs: Helpers
"docs/ci/**", # Docs: CI
"InvenTree/InvenTree/static/**", # Backend: CUI static files
"ci/**", # Backend: CI
"InvenTree/**/migrations/*.py", # Backend: Migration files
"src/frontend/src/locales/**", # Frontend: Translations
]
test_patterns = ["**/test_*.py", "**/test.py", "**/tests.py"]
[[analyzers]]
name = "shell"
[[analyzers]]
name = "javascript"
[analyzers.meta]
plugins = ["react"]
[[analyzers]]
name = "python"
[analyzers.meta]
runtime_version = "3.x.x"
[[analyzers]]
name = "docker"
[[analyzers]]
name = "test-coverage"
enabled = false

View File

@ -16,6 +16,7 @@ services:
inventree:
build:
context: ..
dockerfile: ../InvenTree/contrib/container/Dockerfile
target: dev
args:
base_image: "mcr.microsoft.com/vscode/devcontainers/base:alpine-3.18"

3
.git-blame-ignore-revs Normal file
View File

@ -0,0 +1,3 @@
# .git-blame-ignore-revs
# Code Structure refactor https://github.com/inventree/InvenTree/pull/5582
0bace3f3afaa213c63b5dcc70103f0d232637a9a

11
.github/CODEOWNERS vendored
View File

@ -1,2 +1,13 @@
# General owner is the maintainers team
* @SchrodingersGat
# plugins are co-owned
/src/backend/InvenTree/plugin/ @SchrodingersGat @matmair
/src/backend/InvenTree/plugins/ @SchrodingersGat @matmair
# Installer functions
.pkgr.yml @matmair
Procfile @matmair
runtime.txt @matmair
/contrib/installer @matmair
/contrib/packager.io @matmair

View File

@ -43,7 +43,9 @@ body:
label: "Deployment Method"
options:
- label: "Docker"
- label: "Package"
- label: "Bare metal"
- label: "Other - added info in Steps to Reproduce"
- type: textarea
id: version-info
validations:

View File

@ -11,7 +11,7 @@ runs:
invoke migrate
invoke import-fixtures
invoke export-records -f data.json
python3 ./InvenTree/manage.py flush --noinput
python3 ./src/backend/InvenTree/manage.py flush --noinput
invoke migrate
invoke import-records -f data.json
invoke import-records -f data.json

View File

@ -65,10 +65,11 @@ runs:
with:
node-version: ${{ env.node_version }}
cache: 'npm'
cache-dependency-path: src/backend/package-lock.json
- name: Install npm packages
if: ${{ inputs.npm == 'true' }}
shell: bash
run: npm install
run: cd src/backend && npm install
# OS installs
- name: Install OS Dependencies
@ -77,12 +78,13 @@ runs:
run: |
sudo apt-get update
sudo apt-get install ${{ inputs.apt-dependency }}
sudo apt-get install ${{ inputs.apt-dependency }}
# Invoke commands
- name: Install dev requirements
if: ${{ inputs.dev-install == 'true' ||inputs.install == 'true' }}
shell: bash
run: uv pip install -r requirements-dev.txt
run: uv pip install -r src/backend/requirements-dev.txt
- name: Run invoke install
if: ${{ inputs.install == 'true' }}
shell: bash

View File

@ -3,34 +3,34 @@ updates:
- package-ecosystem: github-actions
directory: /
schedule:
interval: daily
interval: weekly
- package-ecosystem: docker
directory: /
directory: /contrib/container
schedule:
interval: daily
interval: weekly
- package-ecosystem: pip
directory: /docker
directory: /contrib/container
schedule:
interval: daily
interval: weekly
- package-ecosystem: pip
directory: /docs
schedule:
interval: daily
interval: weekly
- package-ecosystem: npm
directory: /
directory: /src/backend
schedule:
interval: daily
interval: weekly
- package-ecosystem: pip
directory: /
directory: /src/backend
schedule:
interval: daily
interval: weekly
- package-ecosystem: npm
directory: /src/frontend
schedule:
interval: daily
interval: weekly

220
.github/scripts/version_check.py vendored Normal file
View File

@ -0,0 +1,220 @@
"""Ensure that the release tag matches the InvenTree version number.
Behaviour:
master / main branch:
- version number must end with 'dev'
tagged branch:
- version number must match tag being built
- version number cannot already exist as a release tag
"""
import json
import os
import re
import sys
from pathlib import Path
import requests
def get_existing_release_tags():
"""Request information on existing releases via the GitHub API."""
# Check for github token
token = os.getenv('GITHUB_TOKEN', None)
headers = None
if token:
headers = {'Authorization': f'Bearer {token}'}
response = requests.get(
'https://api.github.com/repos/inventree/inventree/releases', headers=headers
)
if response.status_code != 200:
raise ValueError(
f'Unexpected status code from GitHub API: {response.status_code}'
)
data = json.loads(response.text)
# Return a list of all tags
tags = []
for release in data:
tag = release['tag_name'].strip()
match = re.match(r'^.*(\d+)\.(\d+)\.(\d+).*$', tag)
if len(match.groups()) != 3:
print(f"Version '{tag}' did not match expected pattern")
continue
tags.append([int(x) for x in match.groups()])
return tags
def check_version_number(version_string, allow_duplicate=False):
"""Check the provided version number.
Returns True if the provided version is the 'newest' InvenTree release
"""
print(f"Checking version '{version_string}'")
# Check that the version string matches the required format
match = re.match(r'^(\d+)\.(\d+)\.(\d+)(?: dev)?$', version_string)
if not match or len(match.groups()) != 3:
raise ValueError(
f"Version string '{version_string}' did not match required pattern"
)
version_tuple = [int(x) for x in match.groups()]
# Look through the existing releases
existing = get_existing_release_tags()
# Assume that this is the highest release, unless told otherwise
highest_release = True
for release in existing:
if release == version_tuple and not allow_duplicate:
raise ValueError(f"Duplicate release '{version_string}' exists!")
if release > version_tuple:
highest_release = False
print(f'Found newer release: {str(release)}')
return highest_release
if __name__ == '__main__':
if 'only_version' in sys.argv:
here = Path(__file__).parent.absolute()
version_file = here.joinpath(
'..', '..', 'src', 'backend', 'InvenTree', 'InvenTree', 'api_version.py'
)
text = version_file.read_text()
results = re.findall(r"""INVENTREE_API_VERSION = (.*)""", text)
print(results[0])
exit(0)
if 'do_release' in sys.argv:
target = os.getenv('TARGET')
highest_tupple = get_existing_release_tags()[0]
old_version = f'{highest_tupple[0]}.{highest_tupple[1]}.{highest_tupple[2]}'
if target == 'master':
tag = f'{highest_tupple[0]}.{highest_tupple[1] + 1}.0'
old_branch = 'master'
elif target == 'stable':
tag = f'{highest_tupple[0]}.{highest_tupple[1]}.{highest_tupple[2] + 1}'
old_branch = f'{highest_tupple[0]}.{highest_tupple[1]}.x'
else:
raise ValueError(f"Unknown target '{target}'")
new_tag = f'{tag} dev'
with open(os.getenv('GITHUB_OUTPUT'), 'a') as env_file:
env_file.write(f'old_version={old_version}\n')
env_file.write(f'old_branch={old_branch}\n')
env_file.write(f'tag={tag}\n')
env_file.write(f'new_tag={new_tag}\n')
exit(0)
# GITHUB_REF_TYPE may be either 'branch' or 'tag'
GITHUB_REF_TYPE = os.environ['GITHUB_REF_TYPE']
# GITHUB_REF may be either 'refs/heads/<branch>' or 'refs/heads/<tag>'
GITHUB_REF = os.environ['GITHUB_REF']
GITHUB_REF_NAME = os.environ['GITHUB_REF_NAME']
GITHUB_BASE_REF = os.environ['GITHUB_BASE_REF']
# Print out version information, makes debugging actions *much* easier!
print(f'GITHUB_REF: {GITHUB_REF}')
print(f'GITHUB_REF_NAME: {GITHUB_REF_NAME}')
print(f'GITHUB_REF_TYPE: {GITHUB_REF_TYPE}')
print(f'GITHUB_BASE_REF: {GITHUB_BASE_REF}')
here = Path(__file__).parent.absolute()
version_file = here.joinpath(
'..', '..', 'src', 'backend', 'InvenTree', 'InvenTree', 'version.py'
)
version = None
with open(version_file, 'r') as f:
text = f.read()
# Extract the InvenTree software version
results = re.findall(r"""INVENTREE_SW_VERSION = '(.*)'""", text)
if len(results) != 1:
print(f'Could not find INVENTREE_SW_VERSION in {version_file}')
sys.exit(1)
version = results[0]
print(f"InvenTree Version: '{version}'")
# Check version number and look for existing versions
# If a release is found which matches the current tag, throw an error
allow_duplicate = False
# Note: on a 'tag' (release) we *must* allow duplicate versions, as this *is* the version that has just been released
if GITHUB_REF_TYPE == 'tag':
allow_duplicate = True
# Note: on a push to 'stable' branch we also allow duplicates
if GITHUB_BASE_REF == 'stable':
allow_duplicate = True
highest_release = check_version_number(version, allow_duplicate=allow_duplicate)
# Determine which docker tag we are going to use
docker_tags = None
if GITHUB_REF_TYPE == 'tag':
# GITHUB_REF should be of the form /refs/heads/<tag>
version_tag = GITHUB_REF.split('/')[-1]
print(f"Checking requirements for tagged release - '{version_tag}':")
if version_tag != version:
print(f"Version number '{version}' does not match tag '{version_tag}'")
sys.exit
if highest_release:
docker_tags = [version_tag, 'stable']
else:
docker_tags = [version_tag]
elif GITHUB_REF_TYPE == 'branch':
# Otherwise we know we are targeting the 'master' branch
docker_tags = ['latest']
else:
print('Unsupported branch / version combination:')
print(f'InvenTree Version: {version}')
print('GITHUB_REF_TYPE:', GITHUB_REF_TYPE)
print('GITHUB_BASE_REF:', GITHUB_BASE_REF)
print('GITHUB_REF:', GITHUB_REF)
sys.exit(1)
if docker_tags is None:
print('Docker tags could not be determined')
sys.exit(1)
print(f"Version check passed for '{version}'!")
print(f"Docker tags: '{docker_tags}'")
# Ref: https://getridbug.com/python/how-to-set-environment-variables-in-github-actions-using-python/
with open(os.getenv('GITHUB_ENV'), 'a') as env_file:
# Construct tag string
tags = ','.join([f'inventree/inventree:{tag}' for tag in docker_tags])
env_file.write(f'docker_tags={tags}\n')
if GITHUB_REF_TYPE == 'tag' and highest_release:
env_file.write('stable_release=true\n')

View File

@ -16,6 +16,8 @@ jobs:
backport:
name: Backport PR
runs-on: ubuntu-latest
permissions:
contents: write
if: |
github.event.pull_request.merged == true
&& contains(github.event.pull_request.labels.*.name, 'backport')

View File

@ -27,6 +27,7 @@ jobs:
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
INVENTREE_BACKUP_DIR: ./backup
INVENTREE_SITE_URL: http://localhost:8000
steps:
- name: Checkout Code
@ -39,4 +40,4 @@ jobs:
- name: Test Translations
run: invoke translate
- name: Check Migration Files
run: python3 ci/check_migration_files.py
run: python3 .github/scripts/check_migration_files.py

View File

@ -76,28 +76,28 @@ jobs:
python-version: ${{ env.python_version }}
- name: Version Check
run: |
pip install requests
pip install pyyaml
python3 ci/version_check.py
pip install requests==2.31.0
pip install pyyaml==6.0.1
python3 .github/scripts/version_check.py
echo "git_commit_hash=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
echo "git_commit_date=$(git show -s --format=%ci)" >> $GITHUB_ENV
- name: Test Docker Image
id: test-docker
run: |
docker build . --target production --tag inventree-test
docker build . --target production --tag inventree-test -f contrib/container/Dockerfile
docker run --rm inventree-test invoke --version
docker run --rm inventree-test invoke --list
docker run --rm inventree-test gunicorn --version
docker run --rm inventree-test pg_dump --version
- name: Build Docker Image
# Build the development docker image (using docker-compose.yml)
run: docker-compose build --no-cache
run: docker compose --project-directory . -f contrib/container/dev-docker-compose.yml build --no-cache
- name: Update Docker Image
run: |
docker-compose run inventree-dev-server invoke update
docker-compose run inventree-dev-server invoke setup-dev
docker-compose up -d
docker-compose run inventree-dev-server invoke wait
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke update
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke setup-dev
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml up -d
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke wait
- name: Check Data Directory
# The following file structure should have been created by the docker image
run: |
@ -112,10 +112,10 @@ jobs:
test -f data/secret_key.txt
- name: Run Unit Tests
run: |
echo "GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}" >> docker.dev.env
docker-compose run inventree-dev-server invoke test --disable-pty
docker-compose run inventree-dev-server invoke test --migrations --disable-pty
docker-compose down
echo "GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}" >> contrib/container/docker.dev.env
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke test --disable-pty
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke test --migrations --disable-pty
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml down
- name: Clean up test folder
run: |
rm -rf InvenTree/_testfolder
@ -166,6 +166,7 @@ jobs:
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0 # pin@v5.3.0
with:
context: .
file: ./contrib/container/Dockerfile
platforms: linux/amd64,linux/arm64
push: true
sbom: true

View File

@ -42,9 +42,9 @@ jobs:
with:
filters: |
server:
- 'InvenTree/**'
- 'requirements.txt'
- 'requirements-dev.txt'
- 'src/backend/InvenTree/**'
- 'src/backend/requirements.txt'
- 'src/backend/requirements-dev.txt'
migrations:
- '**/migrations/**'
- '.github/workflows**'
@ -68,12 +68,12 @@ jobs:
install: true
- name: Check Templated JS Files
run: |
cd ci
cd .github/scripts
python3 check_js_templates.py
- name: Lint Javascript Files
run: |
python InvenTree/manage.py prerender
npx eslint InvenTree/InvenTree/static_i18n/i18n/*.js
python src/backend/InvenTree/manage.py prerender
npx eslint src/backend/InvenTree/InvenTree/static_i18n/i18n/*.js
pre-commit:
name: Style [pre-commit]
@ -92,8 +92,8 @@ jobs:
uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # pin@v3.0.1
- name: Check Version
run: |
pip install requests
python3 ci/version_check.py
pip install requests==2.31.0
python3 .github/scripts/version_check.py
mkdocs:
name: Style [Documentation]
@ -110,7 +110,7 @@ jobs:
python-version: ${{ env.python_version }}
- name: Check Config
run: |
pip install pyyaml
pip install pyyaml==6.0.1
pip install -r docs/requirements.txt
python docs/ci/check_mkdocs_config.py
- name: Check Links
@ -147,17 +147,17 @@ jobs:
dev-install: true
update: true
- name: Export API Documentation
run: invoke schema --ignore-warnings --filename InvenTree/schema.yml
run: invoke schema --ignore-warnings --filename src/backend/InvenTree/schema.yml
- name: Upload schema
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # pin@v4.3.1
with:
name: schema.yml
path: InvenTree/schema.yml
path: src/backend/InvenTree/schema.yml
- name: Download public schema
if: needs.paths-filter.outputs.api == 'false'
run: |
pip install requests >/dev/null 2>&1
version="$(python3 ci/version_check.py only_version 2>&1)"
pip install requests==2.31.0 >/dev/null 2>&1
version="$(python3 .github/scripts/version_check.py only_version 2>&1)"
echo "Version: $version"
url="https://raw.githubusercontent.com/inventree/schema/main/export/${version}/api.yaml"
echo "URL: $url"
@ -166,8 +166,8 @@ jobs:
- name: Check for differences in API Schema
if: needs.paths-filter.outputs.api == 'false'
run: |
diff --color -u InvenTree/schema.yml api.yaml
diff -u InvenTree/schema.yml api.yaml && echo "no difference in API schema " || exit 2
diff --color -u src/backend/InvenTree/schema.yml api.yaml
diff -u src/backend/InvenTree/schema.yml api.yaml && echo "no difference in API schema " || exit 2
- name: Check schema - including warnings
run: invoke schema
continue-on-error: true
@ -175,8 +175,8 @@ jobs:
id: version
if: github.ref == 'refs/heads/master' && needs.paths-filter.outputs.api == 'true'
run: |
pip install requests >/dev/null 2>&1
version="$(python3 ci/version_check.py only_version 2>&1)"
pip install requests==2.31.0 >/dev/null 2>&1
version="$(python3 .github/scripts/version_check.py only_version 2>&1)"
echo "Version: $version"
echo "version=$version" >> "$GITHUB_OUTPUT"
@ -273,13 +273,15 @@ jobs:
- name: Test Translations
run: invoke translate
- name: Check Migration Files
run: python3 ci/check_migration_files.py
run: python3 .github/scripts/check_migration_files.py
- name: Coverage Tests
run: invoke test --coverage
- name: Upload Coverage Report
uses: coverallsapp/github-action@3dfc5567390f6fa9267c0ee9c251e4c8c3f18949 # pin@v2.2.3
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
flag-name: backend
parallel: true
postgres:
name: Tests - DB [PostgreSQL]
@ -471,6 +473,7 @@ jobs:
INVENTREE_DB_NAME: /home/runner/work/InvenTree/db.sqlite3
INVENTREE_DEBUG: True
INVENTREE_PLUGINS_ENABLED: false
VITE_COVERAGE: true
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
@ -487,13 +490,31 @@ jobs:
- name: Install Playwright Browsers
run: cd src/frontend && npx playwright install --with-deps
- name: Run Playwright tests
run: cd src/frontend && npx playwright test
run: cd src/frontend && npx nyc playwright test
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # pin@v4.3.1
name: Upload playwright report
if: always()
with:
name: playwright-report
path: src/frontend/playwright-report/
retention-days: 30
- name: Report coverage
if: always()
run: cd src/frontend && npx nyc report --report-dir ./coverage --temp-dir .nyc_output --reporter=lcov --exclude-after-remap false
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # pin@v4.3.1
name: Upload coverage report
if: always()
with:
name: coverage
path: src/frontend/coverage/
retention-days: 30
- name: Upload Coverage Report
if: always()
uses: coverallsapp/github-action@3dfc5567390f6fa9267c0ee9c251e4c8c3f18949 # pin@v2.2.3
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
flag-name: pui
parallel: true
platform_ui_build:
name: Build - UI Platform
@ -512,9 +533,23 @@ jobs:
run: cd src/frontend && npm run compile && npm run build
- name: Zip frontend
run: |
cd InvenTree/web/static
cd src/backend/InvenTree/web/static
zip -r frontend-build.zip web/
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # pin@v4.3.1
with:
name: frontend-build
path: InvenTree/web/static/web
path: src/backend/InvenTree/web/static/web
finish_coverage:
name: Finish Coverage
runs-on: ubuntu-20.04
needs: ["platform_ui", "coverage", "paths-filter"]
if: needs.paths-filter.outputs.server == 'true' || needs.paths-filter.outputs.frontend == 'true'
steps:
- name: Finish coverage reporting
uses: coverallsapp/github-action@3dfc5567390f6fa9267c0ee9c251e4c8c3f18949 # pin@v2.2.3
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
carryforward: "pui,backend"
parallel-finished: true

View File

@ -19,8 +19,8 @@ jobs:
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Version Check
run: |
pip install requests
python3 ci/version_check.py
pip install requests==2.31.0
python3 .github/scripts/version_check.py
- name: Push to Stable Branch
uses: ad-m/github-push-action@d91a481090679876dfc4178fef17f286781251df # pin@v0.8.0
if: env.stable_release == 'true'
@ -43,12 +43,12 @@ jobs:
run: cd src/frontend && npm run compile && npm run build
- name: Zip frontend
run: |
cd InvenTree/web/static/web
cd src/backend/InvenTree/web/static/web
zip -r ../frontend-build.zip *
- uses: svenstaro/upload-release-action@04733e069f2d7f7f0b4aebc4fbdbce8613b03ccd # pin@2.9.0
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: InvenTree/web/static/frontend-build.zip
file: src/backend/InvenTree/web/static/frontend-build.zip
asset_name: frontend-build.zip
tag: ${{ github.ref }}
overwrite: true

View File

@ -54,7 +54,7 @@ jobs:
# For private repositories:
# - `publish_results` will always be set to `false`, regardless
# of the value entered here.
publish_results: false
publish_results: true
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.

View File

@ -10,12 +10,14 @@ env:
node_version: 18
permissions:
contents: write
contents: read
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: write
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

5
.gitignore vendored
View File

@ -84,7 +84,7 @@ data/
env/
# Locale stats file
locale_stats.json
src/backend/InvenTree/InvenTree/locale_stats.json
# node.js
node_modules/
@ -93,7 +93,7 @@ node_modules/
maintenance_mode_state.txt
# plugin dev directory
InvenTree/plugins/
src/backend/InvenTree/plugins/
# Compiled translation files
*.mo
@ -103,6 +103,7 @@ messages.ts
api.yaml
# web frontend (static files)
src/backend/InvenTree/web/static
InvenTree/web/static
# Generated docs files

View File

@ -2,12 +2,11 @@
# See https://pre-commit.com/hooks.html for more hooks
exclude: |
(?x)^(
InvenTree/InvenTree/static/.*|
InvenTree/locale/.*|
src/frontend/src/locales/.*|
src/backend/InvenTree/InvenTree/static/.*|
src/backend/InvenTree/locale/.*|
src/frontend/src/locales/.* |
.*/migrations/.* |
src/frontend/yarn.lock |
yarn.lock
src/frontend/yarn.lock
)$
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
@ -32,12 +31,12 @@ repos:
hooks:
- id: pip-compile
name: pip-compile requirements-dev.in
args: [requirements-dev.in, -o, requirements-dev.txt, --python-version=3.9, --no-strip-extras]
files: ^requirements-dev\.(in|txt)$
args: [src/backend/requirements-dev.in, -o, src/backend/requirements-dev.txt, --python-version=3.9, --no-strip-extras]
files: src/backend/requirements-dev\.(in|txt)$
- id: pip-compile
name: pip-compile requirements.txt
args: [requirements.in, -o, requirements.txt,--python-version=3.9, --no-strip-extras]
files: ^requirements\.(in|txt)$
args: [src/backend/requirements.in, -o, src/backend/requirements.txt,--python-version=3.9, --no-strip-extras]
files: src/backend/requirements\.(in|txt)$
- repo: https://github.com/Riverside-Healthcare/djLint
rev: v1.34.1
hooks:

4
.vscode/launch.json vendored
View File

@ -8,7 +8,7 @@
"name": "InvenTree Server",
"type": "python",
"request": "launch",
"program": "${workspaceFolder}/InvenTree/manage.py",
"program": "${workspaceFolder}/src/backend/InvenTree/manage.py",
"args": ["runserver"],
"django": true,
"justMyCode": true
@ -17,7 +17,7 @@
"name": "InvenTree Server - 3rd party",
"type": "python",
"request": "launch",
"program": "${workspaceFolder}/InvenTree/manage.py",
"program": "${workspaceFolder}/src/backend/InvenTree/manage.py",
"args": ["runserver"],
"django": true,
"justMyCode": false

View File

@ -3,4 +3,48 @@
Hi there, thank you for your interest in contributing!
Please read our contribution guidelines, before submitting your first pull request to the InvenTree codebase.
### Project File Structure
The InvenTree project is split into two main components: frontend and backend. This source is located in the `src` directory. All other files are used for project management, documentation, and testing.
```bash
InvenTree/
├─ .devops/ # Files for Azure DevOps
├─ .github/ # Files for GitHub
│ ├─ actions/ # Reused actions
│ ├─ ISSUE_TEMPLATE/ # Templates for issues and pull requests
│ ├─ workflows/ # CI/CD flows
│ ├─ scripts/ # CI scripts
├─ .vscode/ # Settings for Visual Code IDE
├─ assets/ # General project assets
├─ contrib/ # Files needed for deployments
│ ├─ container/ # Files related to building container images
│ ├─ installer/ # Files needed to build single-file installer
│ ├─ packager.io/ # Files needed for Debian/Ubuntu packages
├─ docs/ # Directory for documentation / General helper files
│ ├─ ci/ # CI for documentation
│ ├─ docs/ # Source for documentation
├─ src/ # Source for application
│ ├─ backend/ # Directory for backend parts
│ │ ├─ InvenTree/ # Source for backend
│ │ ├─ requirements.txt # Dependencies for backend
│ │ ├─ package.json # Dependencies for backend HTML linting
│ ├─ frontend/ # Directory for frontend parts
│ │ ├─ src/ # Source for frontend
│ │ │ ├─ main.tsx # Entry point for frontend
│ │ ├─ tests/ # Tests for frontend
│ │ ├─ netlify.toml # Settings for frontend previews (Netlify)
│ │ ├─ package.json # Dependencies for frontend
│ │ ├─ playwright.config.ts # Settings for frontend tests
│ │ ├─ tsconfig.json # Settings for frontend compilation
├─ .pkgr.yml # Build definition for Debian/Ubuntu packages
├─ .pre-commit-config.yaml # Code formatter/linter configuration
├─ CONTRIBUTING.md # Contirbution guidelines and overview
├─ Procfile # Process definition for Debian/Ubuntu packages
├─ README.md # General project information and overview
├─ runtime.txt # Python runtime settings for Debian/Ubuntu packages build
├─ SECURITY.md # Project security policy
├─ tasks.py # Action definitions for development, testing and deployment
```
Refer to our [contribution guidelines](https://docs.inventree.org/en/latest/develop/contributing/) for more information!

View File

@ -1,166 +0,0 @@
# The InvenTree dockerfile provides two build targets:
#
# production:
# - Required files are copied into the image
# - Runs InvenTree web server under gunicorn
#
# dev:
# - Expects source directories to be loaded as a run-time volume
# - Runs InvenTree web server under django development server
# - Monitors source files for any changes, and live-reloads server
ARG base_image=python:3.11-alpine3.18
FROM ${base_image} as inventree_base
# Build arguments for this image
ARG commit_tag=""
ARG commit_hash=""
ARG commit_date=""
ARG data_dir="data"
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK 1
ENV INVOKE_RUN_SHELL="/bin/ash"
ENV INVENTREE_DOCKER="true"
# InvenTree paths
ENV INVENTREE_HOME="/home/inventree"
ENV INVENTREE_MNG_DIR="${INVENTREE_HOME}/InvenTree"
ENV INVENTREE_DATA_DIR="${INVENTREE_HOME}/${data_dir}"
ENV INVENTREE_STATIC_ROOT="${INVENTREE_DATA_DIR}/static"
ENV INVENTREE_MEDIA_ROOT="${INVENTREE_DATA_DIR}/media"
ENV INVENTREE_BACKUP_DIR="${INVENTREE_DATA_DIR}/backup"
ENV INVENTREE_PLUGIN_DIR="${INVENTREE_DATA_DIR}/plugins"
# InvenTree configuration files
ENV INVENTREE_CONFIG_FILE="${INVENTREE_DATA_DIR}/config.yaml"
ENV INVENTREE_SECRET_KEY_FILE="${INVENTREE_DATA_DIR}/secret_key.txt"
ENV INVENTREE_PLUGIN_FILE="${INVENTREE_DATA_DIR}/plugins.txt"
# Worker configuration (can be altered by user)
ENV INVENTREE_GUNICORN_WORKERS="4"
ENV INVENTREE_BACKGROUND_WORKERS="4"
# Default web server address:port
ENV INVENTREE_WEB_ADDR=0.0.0.0
ENV INVENTREE_WEB_PORT=8000
LABEL org.label-schema.schema-version="1.0" \
org.label-schema.build-date=${DATE} \
org.label-schema.vendor="inventree" \
org.label-schema.name="inventree/inventree" \
org.label-schema.url="https://hub.docker.com/r/inventree/inventree" \
org.label-schema.vcs-url="https://github.com/inventree/InvenTree.git" \
org.label-schema.vcs-ref=${commit_tag}
# Install required system level packages
RUN apk add --no-cache \
git gettext py-cryptography \
# Image format support
libjpeg libwebp zlib \
# Weasyprint requirements : https://doc.courtbouillon.org/weasyprint/stable/first_steps.html#alpine-3-12
py3-pip py3-pillow py3-cffi py3-brotli pango poppler-utils openldap \
# Postgres client
postgresql13-client \
# MySQL / MariaDB client
mariadb-client mariadb-connector-c \
&& \
# fonts
apk --update --upgrade --no-cache add fontconfig ttf-freefont font-noto terminus-font && fc-cache -f
EXPOSE 8000
RUN mkdir -p ${INVENTREE_HOME}
WORKDIR ${INVENTREE_HOME}
COPY ./docker/requirements.txt base_requirements.txt
COPY ./requirements.txt ./
COPY ./docker/install_build_packages.sh .
RUN chmod +x install_build_packages.sh
# For ARMv7 architecture, add the piwheels repo (for cryptography library)
# Otherwise, we have to build from source, which is difficult
# Ref: https://github.com/inventree/InvenTree/pull/4598
RUN if [ `apk --print-arch` = "armv7" ]; then \
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf ; \
fi
COPY tasks.py docker/gunicorn.conf.py docker/init.sh ./
RUN chmod +x init.sh
ENTRYPOINT ["/bin/ash", "./init.sh"]
FROM inventree_base as prebuild
ENV PATH=/root/.local/bin:$PATH
RUN ./install_build_packages.sh --no-cache --virtual .build-deps && \
pip install --user -r base_requirements.txt -r requirements.txt --no-cache && \
apk --purge del .build-deps
# Frontend builder image:
FROM prebuild as frontend
RUN apk add --no-cache --update nodejs npm && npm install -g yarn
RUN yarn config set network-timeout 600000 -g
COPY InvenTree ${INVENTREE_HOME}/InvenTree
COPY src ${INVENTREE_HOME}/src
COPY tasks.py ${INVENTREE_HOME}/tasks.py
RUN cd ${INVENTREE_HOME}/InvenTree && inv frontend-compile
# InvenTree production image:
# - Copies required files from local directory
# - Starts a gunicorn webserver
FROM inventree_base as production
ENV INVENTREE_DEBUG=False
# As .git directory is not available in production image, we pass the commit information via ENV
ENV INVENTREE_COMMIT_HASH="${commit_hash}"
ENV INVENTREE_COMMIT_DATE="${commit_date}"
# use dependencies and compiled wheels from the prebuild image
ENV PATH=/root/.local/bin:$PATH
COPY --from=prebuild /root/.local /root/.local
# Copy source code
COPY InvenTree ./InvenTree
COPY --from=frontend ${INVENTREE_HOME}/InvenTree/web/static/web ./InvenTree/web/static/web
# Launch the production server
# TODO: Work out why environment variables cannot be interpolated in this command
# TODO: e.g. -b ${INVENTREE_WEB_ADDR}:${INVENTREE_WEB_PORT} fails here
CMD gunicorn -c ./gunicorn.conf.py InvenTree.wsgi -b 0.0.0.0:8000 --chdir ./InvenTree
FROM inventree_base as dev
# Vite server (for local frontend development)
EXPOSE 5173
# Install packages required for building python packages
RUN ./install_build_packages.sh
RUN pip install uv --no-cache-dir && pip install -r base_requirements.txt --no-cache
# Install nodejs / npm / yarn
RUN apk add --no-cache --update nodejs npm && npm install -g yarn
RUN yarn config set network-timeout 600000 -g
# The development image requires the source code to be mounted to /home/inventree/
# So from here, we don't actually "do" anything, apart from some file management
ENV INVENTREE_DEBUG=True
# Location for python virtual environment
# If the INVENTREE_PY_ENV variable is set, the entrypoint script will use it!
ENV INVENTREE_PY_ENV="${INVENTREE_DATA_DIR}/env"
WORKDIR ${INVENTREE_HOME}
# Entrypoint ensures that we are running in the python virtual environment
ENTRYPOINT ["/bin/ash", "./docker/init.sh"]
# Launch the development server
CMD ["invoke", "server", "-a", "${INVENTREE_WEB_ADDR}:${INVENTREE_WEB_PORT}"]

View File

@ -1,113 +0,0 @@
"""Admin classes."""
from django.contrib import admin
from django.db.models.fields import CharField
from django.http.request import HttpRequest
from djmoney.contrib.exchange.admin import RateAdmin
from djmoney.contrib.exchange.models import Rate
from import_export.exceptions import ImportExportError
from import_export.resources import ModelResource
class InvenTreeResource(ModelResource):
"""Custom subclass of the ModelResource class provided by django-import-export".
Ensures that exported data are escaped to prevent malicious formula injection.
Ref: https://owasp.org/www-community/attacks/CSV_Injection
"""
MAX_IMPORT_ROWS = 1000
MAX_IMPORT_COLS = 100
# List of fields which should be converted to empty strings if they are null
CONVERT_NULL_FIELDS = []
def import_data_inner(
self,
dataset,
dry_run,
raise_errors,
using_transactions,
collect_failed_rows,
rollback_on_validation_errors=None,
**kwargs,
):
"""Override the default import_data_inner function to provide better error handling."""
if len(dataset) > self.MAX_IMPORT_ROWS:
raise ImportExportError(
f'Dataset contains too many rows (max {self.MAX_IMPORT_ROWS})'
)
if len(dataset.headers) > self.MAX_IMPORT_COLS:
raise ImportExportError(
f'Dataset contains too many columns (max {self.MAX_IMPORT_COLS})'
)
return super().import_data_inner(
dataset,
dry_run,
raise_errors,
using_transactions,
collect_failed_rows,
rollback_on_validation_errors=rollback_on_validation_errors,
**kwargs,
)
def export_resource(self, obj):
"""Custom function to override default row export behavior.
Specifically, strip illegal leading characters to prevent formula injection
"""
row = super().export_resource(obj)
illegal_start_vals = ['@', '=', '+', '-', '@', '\t', '\r', '\n']
for idx, val in enumerate(row):
if type(val) is str:
val = val.strip()
# If the value starts with certain 'suspicious' values, remove it!
while len(val) > 0 and val[0] in illegal_start_vals:
# Remove the first character
val = val[1:]
row[idx] = val
return row
def get_fields(self, **kwargs):
"""Return fields, with some common exclusions."""
fields = super().get_fields(**kwargs)
fields_to_exclude = ['metadata', 'lft', 'rght', 'tree_id', 'level']
return [f for f in fields if f.column_name not in fields_to_exclude]
def before_import_row(self, row, row_number=None, **kwargs):
"""Run custom code before importing each row.
- Convert any null fields to empty strings, for fields which do not support null values
"""
# We can automatically determine which fields might need such a conversion
for field in self.Meta.model._meta.fields:
if isinstance(field, CharField):
if field.blank and not field.null:
if field.name not in self.CONVERT_NULL_FIELDS:
self.CONVERT_NULL_FIELDS.append(field.name)
for field in self.CONVERT_NULL_FIELDS:
if field in row and row[field] is None:
row[field] = ''
class CustomRateAdmin(RateAdmin):
"""Admin interface for the Rate class."""
def has_add_permission(self, request: HttpRequest) -> bool:
"""Disable the 'add' permission for Rate objects."""
return False
admin.site.unregister(Rate)
admin.site.register(Rate, CustomRateAdmin)

View File

@ -1,453 +0,0 @@
"""Helper functions for loading InvenTree configuration options."""
import datetime
import json
import logging
import os
import random
import shutil
import string
import warnings
from pathlib import Path
from django.core.files.base import ContentFile
from django.core.files.storage import Storage
logger = logging.getLogger('inventree')
CONFIG_DATA = None
CONFIG_LOOKUPS = {}
def to_list(value, delimiter=','):
"""Take a configuration setting and make sure it is a list.
For example, we might have a configuration setting taken from the .config file,
which is already a list.
However, the same setting may be specified via an environment variable,
using a comma delimited string!
"""
if type(value) in [list, tuple]:
return value
# Otherwise, force string value
value = str(value)
return [x.strip() for x in value.split(delimiter)]
def to_dict(value):
"""Take a configuration setting and make sure it is a dict.
For example, we might have a configuration setting taken from the .config file,
which is already an object/dict.
However, the same setting may be specified via an environment variable,
using a valid JSON string!
"""
if value is None:
return {}
if isinstance(value, dict):
return value
try:
return json.loads(value)
except Exception as error:
logger.exception(
"Failed to parse value '%s' as JSON with error %s. Ensure value is a valid JSON string.",
value,
error,
)
return {}
def is_true(x):
"""Shortcut function to determine if a value "looks" like a boolean."""
return str(x).strip().lower() in ['1', 'y', 'yes', 't', 'true', 'on']
def get_base_dir() -> Path:
"""Returns the base (top-level) InvenTree directory."""
return Path(__file__).parent.parent.resolve()
def ensure_dir(path: Path, storage=None) -> None:
"""Ensure that a directory exists.
If it does not exist, create it.
"""
if storage and isinstance(storage, Storage):
if not storage.exists(str(path)):
storage.save(str(path / '.empty'), ContentFile(''))
return
if not path.exists():
path.mkdir(parents=True, exist_ok=True)
def get_config_file(create=True) -> Path:
"""Returns the path of the InvenTree configuration file.
Note: It will be created it if does not already exist!
"""
base_dir = get_base_dir()
cfg_filename = os.getenv('INVENTREE_CONFIG_FILE')
if cfg_filename:
cfg_filename = Path(cfg_filename.strip()).resolve()
else:
# Config file is *not* specified - use the default
cfg_filename = base_dir.joinpath('config.yaml').resolve()
if not cfg_filename.exists() and create:
print(
"InvenTree configuration file 'config.yaml' not found - creating default file"
)
ensure_dir(cfg_filename.parent)
cfg_template = base_dir.joinpath('config_template.yaml')
shutil.copyfile(cfg_template, cfg_filename)
print(f'Created config file {cfg_filename}')
return cfg_filename
def load_config_data(set_cache: bool = False) -> map:
"""Load configuration data from the config file.
Arguments:
set_cache(bool): If True, the configuration data will be cached for future use after load.
"""
global CONFIG_DATA
# use cache if populated
# skip cache if cache should be set
if CONFIG_DATA is not None and not set_cache:
return CONFIG_DATA
import yaml
cfg_file = get_config_file()
with open(cfg_file, 'r') as cfg:
data = yaml.safe_load(cfg)
# Set the cache if requested
if set_cache:
CONFIG_DATA = data
return data
def do_typecast(value, type, var_name=None):
"""Attempt to typecast a value.
Arguments:
value: Value to typecast
type: Function to use for typecasting the value e.g. int, float, str, list, dict
var_name: Name that should be logged e.g. 'INVENTREE_STATIC_ROOT'. Set if logging is required.
Returns:
Typecasted value or original value if typecasting failed.
"""
# Force 'list' of strings
if type is list:
value = to_list(value)
# Valid JSON string is required
elif type is dict:
value = to_dict(value)
elif type is not None:
# Try to typecast the value
try:
val = type(value)
return val
except Exception as error:
if var_name:
logger.exception(
"Failed to typecast '%s' with value '%s' to type '%s' with error %s",
var_name,
value,
type,
error,
)
return value
def get_setting(env_var=None, config_key=None, default_value=None, typecast=None):
"""Helper function for retrieving a configuration setting value.
- First preference is to look for the environment variable
- Second preference is to look for the value of the settings file
- Third preference is the default value
Arguments:
env_var: Name of the environment variable e.g. 'INVENTREE_STATIC_ROOT'
config_key: Key to lookup in the configuration file
default_value: Value to return if first two options are not provided
typecast: Function to use for typecasting the value e.g. int, float, str, list, dict
"""
def set_metadata(source: str):
"""Set lookup metadata for the setting."""
key = env_var or config_key
CONFIG_LOOKUPS[key] = {
'env_var': env_var,
'config_key': config_key,
'source': source,
'accessed': datetime.datetime.now(),
}
# First, try to load from the environment variables
if env_var is not None:
val = os.getenv(env_var, None)
if val is not None:
set_metadata('env')
return do_typecast(val, typecast, var_name=env_var)
# Next, try to load from configuration file
if config_key is not None:
cfg_data = load_config_data()
result = None
# Hack to allow 'path traversal' in configuration file
for key in config_key.strip().split('.'):
if type(cfg_data) is not dict or key not in cfg_data:
result = None
break
result = cfg_data[key]
cfg_data = cfg_data[key]
if result is not None:
set_metadata('yaml')
return do_typecast(result, typecast, var_name=env_var)
# Finally, return the default value
set_metadata('default')
return do_typecast(default_value, typecast, var_name=env_var)
def get_boolean_setting(env_var=None, config_key=None, default_value=False):
"""Helper function for retrieving a boolean configuration setting."""
return is_true(get_setting(env_var, config_key, default_value))
def get_media_dir(create=True):
"""Return the absolute path for the 'media' directory (where uploaded files are stored)."""
md = get_setting('INVENTREE_MEDIA_ROOT', 'media_root')
if not md:
raise FileNotFoundError('INVENTREE_MEDIA_ROOT not specified')
md = Path(md).resolve()
if create:
md.mkdir(parents=True, exist_ok=True)
return md
def get_static_dir(create=True):
"""Return the absolute path for the 'static' directory (where static files are stored)."""
sd = get_setting('INVENTREE_STATIC_ROOT', 'static_root')
if not sd:
raise FileNotFoundError('INVENTREE_STATIC_ROOT not specified')
sd = Path(sd).resolve()
if create:
sd.mkdir(parents=True, exist_ok=True)
return sd
def get_backup_dir(create=True):
"""Return the absolute path for the backup directory."""
bd = get_setting('INVENTREE_BACKUP_DIR', 'backup_dir')
if not bd:
raise FileNotFoundError('INVENTREE_BACKUP_DIR not specified')
bd = Path(bd).resolve()
if create:
bd.mkdir(parents=True, exist_ok=True)
return bd
def get_plugin_file():
"""Returns the path of the InvenTree plugins specification file.
Note: It will be created if it does not already exist!
"""
# Check if the plugin.txt file (specifying required plugins) is specified
plugin_file = get_setting('INVENTREE_PLUGIN_FILE', 'plugin_file')
if not plugin_file:
# If not specified, look in the same directory as the configuration file
config_dir = get_config_file().parent
plugin_file = config_dir.joinpath('plugins.txt')
else:
# Make sure we are using a modern Path object
plugin_file = Path(plugin_file)
if not plugin_file.exists():
logger.warning(
'Plugin configuration file does not exist - creating default file'
)
logger.info("Creating plugin file at '%s'", plugin_file)
ensure_dir(plugin_file.parent)
# If opening the file fails (no write permission, for example), then this will throw an error
plugin_file.write_text(
'# InvenTree Plugins (uses PIP framework to install)\n\n'
)
return plugin_file
def get_plugin_dir():
"""Returns the path of the custom plugins directory."""
return get_setting('INVENTREE_PLUGIN_DIR', 'plugin_dir')
def get_secret_key():
"""Return the secret key value which will be used by django.
Following options are tested, in descending order of preference:
A) Check for environment variable INVENTREE_SECRET_KEY => Use raw key data
B) Check for environment variable INVENTREE_SECRET_KEY_FILE => Load key data from file
C) Look for default key file "secret_key.txt"
D) Create "secret_key.txt" if it does not exist
"""
# Look for environment variable
if secret_key := get_setting('INVENTREE_SECRET_KEY', 'secret_key'):
logger.info('SECRET_KEY loaded by INVENTREE_SECRET_KEY') # pragma: no cover
return secret_key
# Look for secret key file
if secret_key_file := get_setting('INVENTREE_SECRET_KEY_FILE', 'secret_key_file'):
secret_key_file = Path(secret_key_file).resolve()
else:
# Default location for secret key file
secret_key_file = get_base_dir().joinpath('secret_key.txt').resolve()
if not secret_key_file.exists():
logger.info("Generating random key file at '%s'", secret_key_file)
ensure_dir(secret_key_file.parent)
# Create a random key file
options = string.digits + string.ascii_letters + string.punctuation
key = ''.join([random.choice(options) for i in range(100)])
secret_key_file.write_text(key)
logger.debug("Loading SECRET_KEY from '%s'", secret_key_file)
key_data = secret_key_file.read_text().strip()
return key_data
def get_custom_file(
env_ref: str, conf_ref: str, log_ref: str, lookup_media: bool = False
):
"""Returns the checked path to a custom file.
Set lookup_media to True to also search in the media folder.
"""
from django.contrib.staticfiles.storage import StaticFilesStorage
from django.core.files.storage import default_storage
value = get_setting(env_ref, conf_ref, None)
if not value:
return None
static_storage = StaticFilesStorage()
if static_storage.exists(value):
logger.info('Loading %s from %s directory: %s', log_ref, 'static', value)
elif lookup_media and default_storage.exists(value):
logger.info('Loading %s from %s directory: %s', log_ref, 'media', value)
else:
add_dir_str = ' or media' if lookup_media else ''
logger.warning(
"The %s file '%s' could not be found in the static %s directories",
log_ref,
value,
add_dir_str,
)
value = False
return value
def get_frontend_settings(debug=True):
"""Return a dictionary of settings for the frontend interface.
Note that the new config settings use the 'FRONTEND' key,
whereas the legacy key was 'PUI' (platform UI) which is now deprecated
"""
# Legacy settings
pui_settings = get_setting(
'INVENTREE_PUI_SETTINGS', 'pui_settings', {}, typecast=dict
)
if len(pui_settings) > 0:
warnings.warn(
"The 'INVENTREE_PUI_SETTINGS' key is deprecated. Please use 'INVENTREE_FRONTEND_SETTINGS' instead",
DeprecationWarning,
stacklevel=2,
)
# New settings
frontend_settings = get_setting(
'INVENTREE_FRONTEND_SETTINGS', 'frontend_settings', {}, typecast=dict
)
# Merge settings
settings = {**pui_settings, **frontend_settings}
# Set the base URL
if 'base_url' not in settings:
base_url = get_setting('INVENTREE_PUI_URL_BASE', 'pui_url_base', '')
if base_url:
warnings.warn(
"The 'INVENTREE_PUI_URL_BASE' key is deprecated. Please use 'INVENTREE_FRONTEND_URL_BASE' instead",
DeprecationWarning,
stacklevel=2,
)
else:
base_url = get_setting(
'INVENTREE_FRONTEND_URL_BASE', 'frontend_url_base', 'platform'
)
settings['base_url'] = base_url
# Set the server list
settings['server_list'] = settings.get('server_list', [])
# Set the debug flag
settings['debug'] = debug
if 'environment' not in settings:
settings['environment'] = 'development' if debug else 'production'
if debug and 'show_server_selector' not in settings:
# In debug mode, show server selector by default
settings['show_server_selector'] = True
elif len(settings['server_list']) == 0:
# If no servers are specified, show server selector
settings['show_server_selector'] = True
return settings

View File

@ -1,253 +0,0 @@
"""Helper functions for converting between units."""
import logging
import re
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
import pint
_unit_registry = None
logger = logging.getLogger('inventree')
def get_unit_registry():
"""Return a custom instance of the Pint UnitRegistry."""
global _unit_registry
# Cache the unit registry for speedier access
if _unit_registry is None:
return reload_unit_registry()
return _unit_registry
def reload_unit_registry():
"""Reload the unit registry from the database.
This function is called at startup, and whenever the database is updated.
"""
import time
t_start = time.time()
global _unit_registry
_unit_registry = None
reg = pint.UnitRegistry(autoconvert_offset_to_baseunit=True)
# Aliases for temperature units
reg.define('@alias degC = Celsius')
reg.define('@alias degF = Fahrenheit')
reg.define('@alias degK = Kelvin')
# Define some "standard" additional units
reg.define('piece = 1')
reg.define('each = 1 = ea')
reg.define('dozen = 12 = dz')
reg.define('hundred = 100')
reg.define('thousand = 1000')
# Allow for custom units to be defined in the database
try:
from common.models import CustomUnit
for cu in CustomUnit.objects.all():
try:
reg.define(cu.fmt_string())
except Exception as e:
logger.exception(
'Failed to load custom unit: %s - %s', cu.fmt_string(), e
)
# Once custom units are loaded, save registry
_unit_registry = reg
except Exception:
# Database is not ready, or CustomUnit model is not available
pass
dt = time.time() - t_start
logger.debug('Loaded unit registry in %.3f s', dt)
return reg
def from_engineering_notation(value):
"""Convert a provided value to 'natural' representation from 'engineering' notation.
Ref: https://en.wikipedia.org/wiki/Engineering_notation
In "engineering notation", the unit (or SI prefix) is often combined with the value,
and replaces the decimal point.
Examples:
- 1K2 -> 1.2K
- 3n05 -> 3.05n
- 8R6 -> 8.6R
And, we should also take into account any provided trailing strings:
- 1K2 ohm -> 1.2K ohm
- 10n005F -> 10.005nF
"""
value = str(value).strip()
pattern = f'(\d+)([a-zA-Z]+)(\d+)(.*)'
if match := re.match(pattern, value):
left, prefix, right, suffix = match.groups()
return f'{left}.{right}{prefix}{suffix}'
return value
def convert_value(value, unit):
"""Attempt to convert a value to a specified unit.
Arguments:
value: The value to convert
unit: The target unit to convert to
Returns:
The converted value (ideally a pint.Quantity value)
Raises:
Exception if the value cannot be converted to the specified unit
"""
ureg = get_unit_registry()
# Convert the provided value to a pint.Quantity object
value = ureg.Quantity(value)
# Convert to the specified unit
if unit:
if is_dimensionless(value):
magnitude = value.to_base_units().magnitude
value = ureg.Quantity(magnitude, unit)
else:
value = value.to(unit)
return value
def convert_physical_value(value: str, unit: str = None, strip_units=True):
"""Validate that the provided value is a valid physical quantity.
Arguments:
value: Value to validate (str)
unit: Optional unit to convert to, and validate against
strip_units: If True, strip units from the returned value, and return only the dimension
Raises:
ValidationError: If the value is invalid or cannot be converted to the specified unit
Returns:
The converted quantity, in the specified units
"""
ureg = get_unit_registry()
# Check that the provided unit is available in the unit registry
if unit:
try:
valid = unit in ureg
except Exception as exc:
valid = False
if not valid:
raise ValidationError(_(f'Invalid unit provided ({unit})'))
original = str(value).strip()
# Ensure that the value is a string
value = str(value).strip() if value else ''
unit = str(unit).strip() if unit else ''
# Handle imperial length measurements
if value.count("'") == 1 and value.endswith("'"):
value = value.replace("'", ' feet')
if value.count('"') == 1 and value.endswith('"'):
value = value.replace('"', ' inches')
# Error on blank values
if not value:
raise ValidationError(_('No value provided'))
# Construct a list of values to "attempt" to convert
attempts = [value]
# Attempt to convert from engineering notation
eng = from_engineering_notation(value)
attempts.append(eng)
# Append the unit, if provided
# These are the "final" attempts to convert the value, and *must* appear after previous attempts
if unit:
attempts.append(f'{value}{unit}')
attempts.append(f'{eng}{unit}')
value = None
# Run through the available "attempts", take the first successful result
for attempt in attempts:
try:
value = convert_value(attempt, unit)
break
except Exception as exc:
value = None
pass
if value is None:
if unit:
raise ValidationError(_(f'Could not convert {original} to {unit}'))
else:
raise ValidationError(_('Invalid quantity supplied'))
# Calculate the "magnitude" of the value, as a float
# If the value is specified strangely (e.g. as a fraction or a dozen), this can cause issues
# So, we ensure that it is converted to a floating point value
# If we wish to return a "raw" value, some trickery is required
try:
if unit:
magnitude = ureg.Quantity(value.to(ureg.Unit(unit))).magnitude
else:
magnitude = ureg.Quantity(value.to_base_units()).magnitude
magnitude = float(ureg.Quantity(magnitude).to_base_units().magnitude)
except Exception as exc:
raise ValidationError(_(f'Invalid quantity supplied ({exc})'))
if strip_units:
return magnitude
elif unit or value.units:
return ureg.Quantity(magnitude, unit or value.units)
return ureg.Quantity(magnitude)
def is_dimensionless(value):
"""Determine if the provided value is 'dimensionless'.
A dimensionless value might look like:
0.1
1/2 dozen
three thousand
1.2 dozen
(etc)
"""
ureg = get_unit_registry()
# Ensure the provided value is in the right format
value = ureg.Quantity(value)
if value.units == ureg.dimensionless:
return True
if value.to_base_units().units == ureg.dimensionless:
return True
# At this point, the value is not dimensionless
return False

View File

@ -1,116 +0,0 @@
"""Custom exchange backend which hooks into the InvenTree plugin system to fetch exchange rates from an external API."""
import logging
from django.db.transaction import atomic
from djmoney.contrib.exchange.backends.base import SimpleExchangeBackend
from djmoney.contrib.exchange.models import ExchangeBackend, Rate
from common.settings import currency_code_default, currency_codes
logger = logging.getLogger('inventree')
class InvenTreeExchange(SimpleExchangeBackend):
"""Backend for automatically updating currency exchange rates.
Uses the plugin system to actually fetch the rates from an external API.
"""
name = 'InvenTreeExchange'
def get_rates(self, **kwargs) -> None:
"""Set the requested currency codes and get rates."""
from common.models import InvenTreeSetting
from plugin import registry
base_currency = kwargs.get('base_currency', currency_code_default())
symbols = kwargs.get('symbols', currency_codes())
# Find the selected exchange rate plugin
slug = InvenTreeSetting.get_setting('CURRENCY_UPDATE_PLUGIN', '', create=False)
if slug:
plugin = registry.get_plugin(slug)
else:
plugin = None
if not plugin:
# Find the first active currency exchange plugin
plugins = registry.with_mixin('currencyexchange', active=True)
if len(plugins) > 0:
plugin = plugins[0]
if not plugin:
logger.warning(
'No active currency exchange plugins found - skipping update'
)
return {}
logger.info("Running exchange rate update using plugin '%s'", plugin.name)
# Plugin found - run the update task
try:
rates = plugin.update_exchange_rates(base_currency, symbols)
except Exception as exc:
logger.exception('Exchange rate update failed: %s', exc)
return {}
if not rates:
logger.warning(
'Exchange rate update failed - no data returned from plugin %s', slug
)
return {}
# Update exchange rates based on returned data
if type(rates) is not dict:
logger.warning(
'Invalid exchange rate data returned from plugin %s (type %s)',
slug,
type(rates),
)
return {}
# Ensure base currency is provided
rates[base_currency] = 1.00
return rates
@atomic
def update_rates(self, base_currency=None, **kwargs):
"""Call to update all exchange rates."""
backend, _ = ExchangeBackend.objects.update_or_create(
name=self.name, defaults={'base_currency': base_currency}
)
if base_currency is None:
base_currency = currency_code_default()
symbols = currency_codes()
logger.info(
'Updating exchange rates for %s (%s currencies)',
base_currency,
len(symbols),
)
# Fetch new rates from the backend
# If the backend fails, the existing rates will not be updated
rates = self.get_rates(base_currency=base_currency, symbols=symbols)
if rates:
# Clear out existing rates
backend.clear_rates()
Rate.objects.bulk_create([
Rate(currency=currency, value=amount, backend=backend)
for currency, amount in rates.items()
])
else:
logger.info(
'No exchange rates returned from backend - currencies not updated'
)
logger.info('Updated exchange rates for %s', base_currency)

View File

@ -1,213 +0,0 @@
"""Custom fields used in InvenTree."""
import sys
from decimal import Decimal
from django import forms
from django.db import models
from django.utils.translation import gettext_lazy as _
from djmoney.forms.fields import MoneyField
from djmoney.models.fields import MoneyField as ModelMoneyField
from djmoney.models.validators import MinMoneyValidator
from rest_framework.fields import URLField as RestURLField
from rest_framework.fields import empty
import InvenTree.helpers
from .validators import AllowedURLValidator, allowable_url_schemes
class InvenTreeRestURLField(RestURLField):
"""Custom field for DRF with custom scheme validators."""
def __init__(self, **kwargs):
"""Update schemes."""
# Enforce 'max length' parameter in form validation
if 'max_length' not in kwargs:
kwargs['max_length'] = 200
super().__init__(**kwargs)
self.validators[-1].schemes = allowable_url_schemes()
def run_validation(self, data=empty):
"""Override default validation behaviour for this field type."""
import common.models
strict_urls = common.models.InvenTreeSetting.get_setting(
'INVENTREE_STRICT_URLS', True, cache=False
)
if not strict_urls and data is not empty:
if '://' not in data:
# Validate as if there were a schema provided
data = 'http://' + data
return super().run_validation(data=data)
class InvenTreeURLField(models.URLField):
"""Custom URL field which has custom scheme validators."""
default_validators = [AllowedURLValidator()]
def __init__(self, **kwargs):
"""Initialization method for InvenTreeURLField."""
# Max length for InvenTreeURLField is set to 200
kwargs['max_length'] = 200
super().__init__(**kwargs)
def money_kwargs(**kwargs):
"""Returns the database settings for MoneyFields."""
from common.settings import currency_code_default, currency_code_mappings
# Default values (if not specified)
if 'max_digits' not in kwargs:
kwargs['max_digits'] = 19
if 'decimal_places' not in kwargs:
kwargs['decimal_places'] = 6
if 'currency_choices' not in kwargs:
kwargs['currency_choices'] = currency_code_mappings()
if 'default_currency' not in kwargs:
kwargs['default_currency'] = currency_code_default()
return kwargs
class InvenTreeModelMoneyField(ModelMoneyField):
"""Custom MoneyField for clean migrations while using dynamic currency settings."""
def __init__(self, **kwargs):
"""Overwrite default values and validators."""
# detect if creating migration
if 'migrate' in sys.argv or 'makemigrations' in sys.argv:
# remove currency information for a clean migration
kwargs['default_currency'] = ''
kwargs['currency_choices'] = []
kwargs = money_kwargs(**kwargs)
# Set a minimum value validator
validators = kwargs.get('validators', [])
allow_negative = kwargs.pop('allow_negative', False)
# If no validators are provided, add some "standard" ones
if len(validators) == 0:
if not allow_negative:
validators.append(MinMoneyValidator(0))
kwargs['validators'] = validators
super().__init__(**kwargs)
def formfield(self, **kwargs):
"""Override form class to use own function."""
kwargs['form_class'] = InvenTreeMoneyField
return super().formfield(**kwargs)
def to_python(self, value):
"""Convert value to python type."""
value = super().to_python(value)
return round_decimal(value, self.decimal_places)
def prepare_value(self, value):
"""Override the 'prepare_value' method, to remove trailing zeros when displaying.
Why? It looks nice!
"""
return round_decimal(value, self.decimal_places, normalize=True)
class InvenTreeMoneyField(MoneyField):
"""Custom MoneyField for clean migrations while using dynamic currency settings."""
def __init__(self, *args, **kwargs):
"""Override initial values with the real info from database."""
kwargs = money_kwargs(**kwargs)
super().__init__(*args, **kwargs)
class DatePickerFormField(forms.DateField):
"""Custom date-picker field."""
def __init__(self, **kwargs):
"""Set up custom values."""
help_text = kwargs.get('help_text', _('Enter date'))
label = kwargs.get('label', None)
required = kwargs.get('required', False)
initial = kwargs.get('initial', None)
widget = forms.DateInput(attrs={'type': 'date'})
forms.DateField.__init__(
self,
required=required,
initial=initial,
help_text=help_text,
widget=widget,
label=label,
)
def round_decimal(value, places, normalize=False):
"""Round value to the specified number of places."""
if type(value) in [Decimal, float]:
value = round(value, places)
if normalize:
# Remove any trailing zeroes
value = InvenTree.helpers.normalize(value)
return value
class RoundingDecimalFormField(forms.DecimalField):
"""Custom FormField that automatically rounds inputs."""
def to_python(self, value):
"""Convert value to python type."""
value = super().to_python(value)
return round_decimal(value, self.decimal_places)
def prepare_value(self, value):
"""Override the 'prepare_value' method, to remove trailing zeros when displaying.
Why? It looks nice!
"""
return round_decimal(value, self.decimal_places, normalize=True)
class RoundingDecimalField(models.DecimalField):
"""Custom Field that automatically rounds inputs."""
def to_python(self, value):
"""Convert value to python type."""
value = super().to_python(value)
return round_decimal(value, self.decimal_places)
def formfield(self, **kwargs):
"""Return a Field instance for this field."""
kwargs['form_class'] = RoundingDecimalFormField
return super().formfield(**kwargs)
class InvenTreeNotesField(models.TextField):
"""Custom implementation of a 'notes' field."""
# Maximum character limit for the various 'notes' fields
NOTES_MAX_LENGTH = 50000
def __init__(self, **kwargs):
"""Configure default initial values for this field."""
kwargs['max_length'] = self.NOTES_MAX_LENGTH
kwargs['verbose_name'] = _('Notes')
kwargs['blank'] = True
kwargs['null'] = True
super().__init__(**kwargs)

View File

@ -1,169 +0,0 @@
"""General filters for InvenTree."""
from datetime import datetime
from django.conf import settings
from django.utils import timezone
from django.utils.timezone import make_aware
from django_filters import rest_framework as rest_filters
from rest_framework import filters
import InvenTree.helpers
class InvenTreeDateFilter(rest_filters.DateFilter):
"""Custom DateFilter class which handles timezones correctly."""
def filter(self, qs, value):
"""Override the filter method to handle timezones correctly."""
if settings.USE_TZ:
if value is not None:
tz = timezone.get_current_timezone()
value = datetime(value.year, value.month, value.day)
value = make_aware(value, tz, True)
return super().filter(qs, value)
class InvenTreeSearchFilter(filters.SearchFilter):
"""Custom search filter which allows adjusting of search terms dynamically."""
def get_search_fields(self, view, request):
"""Return a set of search fields for the request, adjusted based on request params.
The following query params are available to 'augment' the search (in decreasing order of priority)
- search_regex: If True, search is performed on 'regex' comparison
"""
regex = InvenTree.helpers.str2bool(
request.query_params.get('search_regex', False)
)
search_fields = super().get_search_fields(view, request)
fields = []
if search_fields:
for field in search_fields:
if regex:
field = '$' + field
fields.append(field)
return fields
def get_search_terms(self, request):
"""Return the search terms for this search request.
Depending on the request parameters, we may "augment" these somewhat
"""
whole = InvenTree.helpers.str2bool(
request.query_params.get('search_whole', False)
)
terms = []
search_terms = super().get_search_terms(request)
if search_terms:
for term in search_terms:
term = term.strip()
if not term:
# Ignore blank inputs
continue
if whole:
# Wrap the search term to enable word-boundary matching
term = r'\y' + term + r'\y'
terms.append(term)
return terms
class InvenTreeOrderingFilter(filters.OrderingFilter):
"""Custom OrderingFilter class which allows aliased filtering of related fields.
To use, simply specify this filter in the "filter_backends" section.
filter_backends = [
InvenTreeOrderingFilter,
]
Then, specify a ordering_field_aliases attribute:
ordering_field_alises = {
'name': 'part__part__name',
'SKU': 'part__SKU',
}
"""
def get_ordering(self, request, queryset, view):
"""Override ordering for supporting aliases."""
ordering = super().get_ordering(request, queryset, view)
aliases = getattr(view, 'ordering_field_aliases', None)
# Attempt to map ordering fields based on provided aliases
if ordering is not None and aliases is not None:
"""Ordering fields should be mapped to separate fields."""
ordering_initial = ordering
ordering = []
for field in ordering_initial:
reverse = field.startswith('-')
if reverse:
field = field[1:]
# Are aliases defined for this field?
if field in aliases:
alias = aliases[field]
else:
alias = field
"""
Potentially, a single field could be "aliased" to multiple field,
(For example to enforce a particular ordering sequence)
e.g. to filter first by the integer value...
ordering_field_aliases = {
"reference": ["integer_ref", "reference"]
}
"""
if type(alias) is str:
alias = [alias]
elif type(alias) in [list, tuple]:
pass
else:
# Unsupported alias type
continue
for a in alias:
if reverse:
a = '-' + a
ordering.append(a)
return ordering
SEARCH_ORDER_FILTER = [
rest_filters.DjangoFilterBackend,
InvenTreeSearchFilter,
filters.OrderingFilter,
]
SEARCH_ORDER_FILTER_ALIAS = [
rest_filters.DjangoFilterBackend,
InvenTreeSearchFilter,
InvenTreeOrderingFilter,
]
ORDER_FILTER = [rest_filters.DjangoFilterBackend, filters.OrderingFilter]

View File

@ -1,219 +0,0 @@
"""Custom string formatting functions and helpers."""
import re
import string
from django.conf import settings
from django.utils import translation
from django.utils.translation import gettext_lazy as _
from babel import Locale
from babel.numbers import parse_pattern
from djmoney.money import Money
def parse_format_string(fmt_string: str) -> dict:
"""Extract formatting information from the provided format string.
Returns a dict object which contains structured information about the format groups
"""
groups = string.Formatter().parse(fmt_string)
info = {}
seen_groups = set()
for group in groups:
# Skip any group which does not have a named value
if not group[1]:
continue
name = group[1]
# Check for duplicate named groups
if name in seen_groups:
raise ValueError(f"Duplicate group '{name}'")
else:
seen_groups.add(name)
info[group[1]] = {'format': group[1], 'prefix': group[0]}
return info
def construct_format_regex(fmt_string: str) -> str:
r"""Construct a regular expression based on a provided format string.
This function turns a python format string into a regular expression,
which can be used for two purposes:
- Ensure that a particular string matches the specified format
- Extract named variables from a matching string
This function also provides support for wildcard characters:
- '?' provides single character matching; is converted to a '.' (period) for regex
- '#' provides single digit matching; is converted to '\d'
Args:
fmt_string: A typical format string e.g. "PO-???-{ref:04d}"
Returns:
str: A regular expression pattern e.g. ^PO\-...\-(?P<ref>.*)$
Raises:
ValueError: Format string is invalid
"""
pattern = '^'
for group in string.Formatter().parse(fmt_string):
prefix = group[0] # Prefix (literal text appearing before this group)
name = group[1] # Name of this format variable
format = group[2] # Format specifier e.g :04d
rep = [
'+',
'-',
'.',
'{',
'}',
'(',
')',
'^',
'$',
'~',
'!',
'@',
':',
';',
'|',
"'",
'"',
]
# Escape any special regex characters
for ch in rep:
prefix = prefix.replace(ch, '\\' + ch)
# Replace ? with single-character match
prefix = prefix.replace('?', '.')
# Replace # with single-digit match
prefix = prefix.replace('#', r'\d')
pattern += prefix
# Add a named capture group for the format entry
if name:
# Check if integer values are required
if format.endswith('d'):
chr = '\d'
else:
chr = '.'
# Specify width
# TODO: Introspect required width
w = '+'
pattern += f'(?P<{name}>{chr}{w})'
pattern += '$'
return pattern
def validate_string(value: str, fmt_string: str) -> str:
"""Validate that the provided string matches the specified format.
Args:
value: The string to be tested e.g. 'SO-1234-ABC',
fmt_string: The required format e.g. 'SO-{ref}-???',
Returns:
bool: True if the value matches the required format, else False
Raises:
ValueError: The provided format string is invalid
"""
pattern = construct_format_regex(fmt_string)
result = re.match(pattern, value)
return result is not None
def extract_named_group(name: str, value: str, fmt_string: str) -> str:
"""Extract a named value from the provided string, given the provided format string.
Args:
name: Name of group to extract e.g. 'ref'
value: Raw string e.g. 'PO-ABC-1234'
fmt_string: Format pattern e.g. 'PO-???-{ref}
Returns:
str: String value of the named group
Raises:
ValueError: format string is incorrectly specified, or provided value does not match format string
NameError: named value does not exist in the format string
IndexError: named value could not be found in the provided entry
"""
info = parse_format_string(fmt_string)
if name not in info.keys():
raise NameError(_(f"Value '{name}' does not appear in pattern format"))
# Construct a regular expression for matching against the provided format string
# Note: This will raise a ValueError if 'fmt_string' is incorrectly specified
pattern = construct_format_regex(fmt_string)
# Run the regex matcher against the raw string
result = re.match(pattern, value)
if not result:
raise ValueError(
_('Provided value does not match required pattern: ') + fmt_string
)
# And return the value we are interested in
# Note: This will raise an IndexError if the named group was not matched
return result.group(name)
def format_money(
money: Money,
decimal_places: int = None,
format: str = None,
include_symbol: bool = True,
) -> str:
"""Format money object according to the currently set local.
Args:
money (Money): The money object to format
decimal_places (int): Number of decimal places to use
format (str): Format pattern according LDML / the babel format pattern syntax (https://babel.pocoo.org/en/latest/numbers.html)
Returns:
str: The formatted string
Raises:
ValueError: format string is incorrectly specified
"""
language = None and translation.get_language() or settings.LANGUAGE_CODE
locale = Locale.parse(translation.to_locale(language))
if format:
pattern = parse_pattern(format)
else:
pattern = locale.currency_formats['standard']
if decimal_places is not None:
pattern.frac_prec = (decimal_places, decimal_places)
result = pattern.apply(
money.amount,
locale,
currency=money.currency.code if include_symbol else '',
currency_digits=decimal_places is None,
decimal_quantization=decimal_places is not None,
)
return result

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,121 +0,0 @@
"""Permission set for InvenTree."""
from functools import wraps
from rest_framework import permissions
import users.models
def get_model_for_view(view, raise_error=True):
"""Attempt to introspect the 'model' type for an API view."""
if hasattr(view, 'get_permission_model'):
return view.get_permission_model()
if hasattr(view, 'serializer_class'):
return view.serializer_class.Meta.model
if hasattr(view, 'get_serializer_class'):
return view.get_serializr_class().Meta.model
raise AttributeError(f'Serializer class not specified for {view.__class__}')
class RolePermission(permissions.BasePermission):
"""Role mixin for API endpoints, allowing us to specify the user "role" which is required for certain operations.
Each endpoint can have one or more of the following actions:
- GET
- POST
- PUT
- PATCH
- DELETE
Specify the required "role" using the role_required attribute.
e.g.
role_required = "part"
The RoleMixin class will then determine if the user has the required permission
to perform the specified action.
For example, a DELETE action will be rejected unless the user has the "part.remove" permission
"""
def has_permission(self, request, view):
"""Determine if the current user has the specified permissions."""
user = request.user
# Superuser can do it all
if user.is_superuser:
return True
# Map the request method to a permission type
rolemap = {
'GET': 'view',
'OPTIONS': 'view',
'POST': 'add',
'PUT': 'change',
'PATCH': 'change',
'DELETE': 'delete',
}
# let the view define a custom rolemap
if hasattr(view, 'rolemap'):
rolemap.update(view.rolemap)
permission = rolemap[request.method]
# The required role may be defined for the view class
if role := getattr(view, 'role_required', None):
# If the role is specified as "role.permission", split it
if '.' in role:
role, permission = role.split('.')
return users.models.check_user_role(user, role, permission)
try:
# Extract the model name associated with this request
model = get_model_for_view(view)
app_label = model._meta.app_label
model_name = model._meta.model_name
table = f'{app_label}_{model_name}'
except AttributeError:
# We will assume that if the serializer class does *not* have a Meta,
# then we don't need a permission
return True
return users.models.RuleSet.check_table_permission(user, table, permission)
class IsSuperuser(permissions.IsAdminUser):
"""Allows access only to superuser users."""
def has_permission(self, request, view):
"""Check if the user is a superuser."""
return bool(request.user and request.user.is_superuser)
class IsStaffOrReadOnly(permissions.IsAdminUser):
"""Allows read-only access to any user, but write access is restricted to staff users."""
def has_permission(self, request, view):
"""Check if the user is a superuser."""
return bool(
request.user
and request.user.is_staff
or request.method in permissions.SAFE_METHODS
)
def auth_exempt(view_func):
"""Mark a view function as being exempt from auth requirements."""
def wrapped_view(*args, **kwargs):
return view_func(*args, **kwargs)
wrapped_view.auth_exempt = True
return wraps(view_func)(wrapped_view)

View File

@ -1,887 +0,0 @@
"""Serializers used in various InvenTree apps."""
import os
from collections import OrderedDict
from copy import deepcopy
from decimal import Decimal
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError as DjangoValidationError
from django.db import models
from django.utils.translation import gettext_lazy as _
import tablib
from djmoney.contrib.django_rest_framework.fields import MoneyField
from djmoney.money import Money
from djmoney.utils import MONEY_CLASSES, get_currency_field_name
from rest_framework import serializers
from rest_framework.exceptions import PermissionDenied, ValidationError
from rest_framework.fields import empty
from rest_framework.serializers import DecimalField
from rest_framework.utils import model_meta
from taggit.serializers import TaggitSerializer
import common.models as common_models
from common.settings import currency_code_default, currency_code_mappings
from InvenTree.fields import InvenTreeRestURLField, InvenTreeURLField
class EmptySerializer(serializers.Serializer):
"""Empty serializer for use in testing."""
class InvenTreeMoneySerializer(MoneyField):
"""Custom serializer for 'MoneyField', which ensures that passed values are numerically valid.
Ref: https://github.com/django-money/django-money/blob/master/djmoney/contrib/django_rest_framework/fields.py
"""
def __init__(self, *args, **kwargs):
"""Override default values."""
kwargs['max_digits'] = kwargs.get('max_digits', 19)
self.decimal_places = kwargs['decimal_places'] = kwargs.get('decimal_places', 6)
kwargs['required'] = kwargs.get('required', False)
super().__init__(*args, **kwargs)
def get_value(self, data):
"""Test that the returned amount is a valid Decimal."""
amount = super(DecimalField, self).get_value(data)
# Convert an empty string to None
if len(str(amount).strip()) == 0:
amount = None
try:
if amount is not None and amount is not empty:
# Convert to a Decimal instance, and round to maximum allowed decimal places
amount = Decimal(amount)
amount = round(amount, self.decimal_places)
except Exception:
raise ValidationError({self.field_name: [_('Must be a valid number')]})
currency = data.get(
get_currency_field_name(self.field_name), self.default_currency
)
if (
currency
and amount is not None
and not isinstance(amount, MONEY_CLASSES)
and amount is not empty
):
return Money(amount, currency)
return amount
class InvenTreeCurrencySerializer(serializers.ChoiceField):
"""Custom serializers for selecting currency option."""
def __init__(self, *args, **kwargs):
"""Initialize the currency serializer."""
choices = currency_code_mappings()
allow_blank = kwargs.get('allow_blank', False) or kwargs.get(
'allow_null', False
)
if allow_blank:
choices = [('', '---------')] + choices
kwargs['choices'] = choices
if 'default' not in kwargs and 'required' not in kwargs:
kwargs['default'] = '' if allow_blank else currency_code_default
if 'label' not in kwargs:
kwargs['label'] = _('Currency')
if 'help_text' not in kwargs:
kwargs['help_text'] = _('Select currency from available options')
super().__init__(*args, **kwargs)
class DependentField(serializers.Field):
"""A dependent field can be used to dynamically return child fields based on the value of other fields."""
child = None
def __init__(self, *args, depends_on, field_serializer, **kwargs):
"""A dependent field can be used to dynamically return child fields based on the value of other fields.
Example:
This example adds two fields. If the client selects integer, an integer field will be shown, but if he
selects char, an char field will be shown. For any other value, nothing will be shown.
class TestSerializer(serializers.Serializer):
select_type = serializers.ChoiceField(choices=[
("integer", "Integer"),
("char", "Char"),
])
my_field = DependentField(depends_on=["select_type"], field_serializer="get_my_field")
def get_my_field(self, fields):
if fields["select_type"] == "integer":
return serializers.IntegerField()
if fields["select_type"] == "char":
return serializers.CharField()
"""
super().__init__(*args, **kwargs)
self.depends_on = depends_on
self.field_serializer = field_serializer
def get_child(self, raise_exception=False):
"""This method tries to extract the child based on the provided data in the request by the client."""
data = deepcopy(self.context['request'].data)
def visit_parent(node):
"""Recursively extract the data for the parent field/serializer in reverse."""
nonlocal data
if node.parent:
visit_parent(node.parent)
# only do for composite fields and stop right before the current field
if hasattr(node, 'child') and node is not self and isinstance(data, dict):
data = data.get(node.field_name, None)
visit_parent(self)
# ensure that data is a dictionary and that a parent exists
if not isinstance(data, dict) or self.parent is None:
return
# check if the request data contains the dependent fields, otherwise skip getting the child
for f in self.depends_on:
if data.get(f, None) is None:
if (
self.parent
and (v := getattr(self.parent.fields[f], 'default', None))
is not None
):
data[f] = v
else:
return
# partially validate the data for options requests that set raise_exception while calling .get_child(...)
if raise_exception:
validation_data = {k: v for k, v in data.items() if k in self.depends_on}
serializer = self.parent.__class__(
context=self.context, data=validation_data, partial=True
)
serializer.is_valid(raise_exception=raise_exception)
# try to get the field serializer
field_serializer = getattr(self.parent, self.field_serializer)
child = field_serializer(data)
if not child:
return
self.child = child
self.child.bind(field_name='', parent=self)
def to_internal_value(self, data):
"""This method tries to convert the data to an internal representation based on the defined to_internal_value method on the child."""
self.get_child()
if self.child:
return self.child.to_internal_value(data)
return None
def to_representation(self, value):
"""This method tries to convert the data to representation based on the defined to_representation method on the child."""
self.get_child()
if self.child:
return self.child.to_representation(value)
return None
class InvenTreeModelSerializer(serializers.ModelSerializer):
"""Inherits the standard Django ModelSerializer class, but also ensures that the underlying model class data are checked on validation."""
# Switch out URLField mapping
serializer_field_mapping = {
**serializers.ModelSerializer.serializer_field_mapping,
models.URLField: InvenTreeRestURLField,
InvenTreeURLField: InvenTreeRestURLField,
}
def __init__(self, instance=None, data=empty, **kwargs):
"""Custom __init__ routine to ensure that *default* values (as specified in the ORM) are used by the DRF serializers, *if* the values are not provided by the user."""
# If instance is None, we are creating a new instance
if instance is None and data is not empty:
if data is None:
data = OrderedDict()
else:
new_data = OrderedDict()
new_data.update(data)
data = new_data
# Add missing fields which have default values
ModelClass = self.Meta.model
fields = model_meta.get_field_info(ModelClass)
for field_name, field in fields.fields.items():
"""
Update the field IF (and ONLY IF):
- The field has a specified default value
- The field does not already have a value set
"""
if field.has_default() and field_name not in data:
value = field.default
# Account for callable functions
if callable(value):
try:
value = value()
except Exception:
continue
data[field_name] = value
super().__init__(instance, data, **kwargs)
def get_initial(self):
"""Construct initial data for the serializer.
Use the 'default' values specified by the django model definition
"""
initials = super().get_initial().copy()
# Are we creating a new instance?
if self.instance is None:
ModelClass = self.Meta.model
fields = model_meta.get_field_info(ModelClass)
for field_name, field in fields.fields.items():
if field.has_default() and field_name not in initials:
value = field.default
# Account for callable functions
if callable(value):
try:
value = value()
except Exception:
continue
initials[field_name] = value
return initials
def skip_create_fields(self):
"""Return a list of 'fields' which should be skipped for model creation.
This is used to 'bypass' a shortcoming of the DRF framework,
which does not allow us to have writeable serializer fields which do not exist on the model.
Default implementation returns an empty list
"""
return []
def save(self, **kwargs):
"""Catch any django ValidationError thrown at the moment `save` is called, and re-throw as a DRF ValidationError."""
try:
super().save(**kwargs)
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=serializers.as_serializer_error(exc))
return self.instance
def create(self, validated_data):
"""Custom create method which supports field adjustment."""
initial_data = validated_data.copy()
# Remove any fields which do not exist on the model
for field in self.skip_create_fields():
initial_data.pop(field, None)
return super().create(initial_data)
def update(self, instance, validated_data):
"""Catch any django ValidationError, and re-throw as a DRF ValidationError."""
try:
instance = super().update(instance, validated_data)
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=serializers.as_serializer_error(exc))
return instance
def run_validation(self, data=empty):
"""Perform serializer validation.
In addition to running validators on the serializer fields,
this class ensures that the underlying model is also validated.
"""
# Run any native validation checks first (may raise a ValidationError)
data = super().run_validation(data)
if not hasattr(self, 'instance') or self.instance is None:
# No instance exists (we are creating a new one)
initial_data = data.copy()
for field in self.skip_create_fields():
# Remove any fields we do not wish to provide to the model
initial_data.pop(field, None)
# Create a (RAM only) instance for extra testing
instance = self.Meta.model(**initial_data)
else:
# Instance already exists (we are updating!)
instance = self.instance
# Update instance fields
for attr, value in data.items():
try:
setattr(instance, attr, value)
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=serializers.as_serializer_error(exc))
# Run a 'full_clean' on the model.
# Note that by default, DRF does *not* perform full model validation!
try:
instance.full_clean()
except (ValidationError, DjangoValidationError) as exc:
if hasattr(exc, 'message_dict'):
data = exc.message_dict
elif hasattr(exc, 'message'):
data = {'non_field_errors': [str(exc.message)]}
else:
data = {'non_field_errors': [str(exc)]}
# Change '__all__' key (django style) to 'non_field_errors' (DRF style)
if '__all__' in data:
data['non_field_errors'] = data['__all__']
del data['__all__']
raise ValidationError(data)
return data
class InvenTreeTaggitSerializer(TaggitSerializer):
"""Updated from https://github.com/glemmaPaul/django-taggit-serializer."""
def update(self, instance, validated_data):
"""Overridden update method to re-add the tagmanager."""
to_be_tagged, validated_data = self._pop_tags(validated_data)
tag_object = super().update(instance, validated_data)
for key in to_be_tagged.keys():
# re-add the tagmanager
new_tagobject = tag_object.__class__.objects.get(id=tag_object.id)
setattr(tag_object, key, getattr(new_tagobject, key))
return self._save_tags(tag_object, to_be_tagged)
class InvenTreeTagModelSerializer(InvenTreeTaggitSerializer, InvenTreeModelSerializer):
"""Combination of InvenTreeTaggitSerializer and InvenTreeModelSerializer."""
pass
class UserSerializer(InvenTreeModelSerializer):
"""Serializer for a User."""
class Meta:
"""Metaclass defines serializer fields."""
model = User
fields = ['pk', 'username', 'first_name', 'last_name', 'email']
read_only_fields = ['username']
class ExendedUserSerializer(UserSerializer):
"""Serializer for a User with a bit more info."""
from users.serializers import GroupSerializer
groups = GroupSerializer(read_only=True, many=True)
class Meta(UserSerializer.Meta):
"""Metaclass defines serializer fields."""
fields = UserSerializer.Meta.fields + [
'groups',
'is_staff',
'is_superuser',
'is_active',
]
read_only_fields = UserSerializer.Meta.read_only_fields + ['groups']
def validate(self, attrs):
"""Expanded validation for changing user role."""
# Check if is_staff or is_superuser is in attrs
role_change = 'is_staff' in attrs or 'is_superuser' in attrs
request_user = self.context['request'].user
if role_change:
if request_user.is_superuser:
# Superusers can change any role
pass
elif request_user.is_staff and 'is_superuser' not in attrs:
# Staff can change any role except is_superuser
pass
else:
raise PermissionDenied(
_('You do not have permission to change this user role.')
)
return super().validate(attrs)
class UserCreateSerializer(ExendedUserSerializer):
"""Serializer for creating a new User."""
def validate(self, attrs):
"""Expanded valiadation for auth."""
# Check that the user trying to create a new user is a superuser
if not self.context['request'].user.is_superuser:
raise serializers.ValidationError(_('Only superusers can create new users'))
# Generate a random password
password = User.objects.make_random_password(length=14)
attrs.update({'password': password})
return super().validate(attrs)
def create(self, validated_data):
"""Send an e email to the user after creation."""
from InvenTree.helpers_model import get_base_url
base_url = get_base_url()
instance = super().create(validated_data)
# Make sure the user cannot login until they have set a password
instance.set_unusable_password()
message = (
_('Your account has been created.')
+ '\n\n'
+ _('Please use the password reset function to login')
)
if base_url:
message += f'\n\nURL: {base_url}'
# Send the user an onboarding email (from current site)
instance.email_user(subject=_('Welcome to InvenTree'), message=message)
return instance
class InvenTreeAttachmentSerializerField(serializers.FileField):
"""Override the DRF native FileField serializer, to remove the leading server path.
For example, the FileField might supply something like:
http://127.0.0.1:8000/media/foo/bar.jpg
Whereas we wish to return:
/media/foo/bar.jpg
If the server process is serving the data at 127.0.0.1,
but a proxy service (e.g. nginx) is then providing DNS lookup to the outside world,
then an attachment which prefixes the "address" of the internal server
will not be accessible from the outside world.
"""
def to_representation(self, value):
"""To json-serializable type."""
if not value:
return None
return os.path.join(str(settings.MEDIA_URL), str(value))
class InvenTreeAttachmentSerializer(InvenTreeModelSerializer):
"""Special case of an InvenTreeModelSerializer, which handles an "attachment" model.
The only real addition here is that we support "renaming" of the attachment file.
"""
@staticmethod
def attachment_fields(extra_fields=None):
"""Default set of fields for an attachment serializer."""
fields = [
'pk',
'attachment',
'filename',
'link',
'comment',
'upload_date',
'user',
'user_detail',
]
if extra_fields:
fields += extra_fields
return fields
user_detail = UserSerializer(source='user', read_only=True, many=False)
attachment = InvenTreeAttachmentSerializerField(required=False, allow_null=False)
# The 'filename' field must be present in the serializer
filename = serializers.CharField(
label=_('Filename'), required=False, source='basename', allow_blank=False
)
upload_date = serializers.DateField(read_only=True)
class InvenTreeImageSerializerField(serializers.ImageField):
"""Custom image serializer.
On upload, validate that the file is a valid image file
"""
def to_representation(self, value):
"""To json-serializable type."""
if not value:
return None
return os.path.join(str(settings.MEDIA_URL), str(value))
class InvenTreeDecimalField(serializers.FloatField):
"""Custom serializer for decimal fields.
Solves the following issues:
- The normal DRF DecimalField renders values with trailing zeros
- Using a FloatField can result in rounding issues: https://code.djangoproject.com/ticket/30290
"""
def to_internal_value(self, data):
"""Convert to python type."""
# Convert the value to a string, and then a decimal
try:
return Decimal(str(data))
except Exception:
raise serializers.ValidationError(_('Invalid value'))
class DataFileUploadSerializer(serializers.Serializer):
"""Generic serializer for uploading a data file, and extracting a dataset.
- Validates uploaded file
- Extracts column names
- Extracts data rows
"""
# Implementing class should register a target model (database model) to be used for import
TARGET_MODEL = None
class Meta:
"""Metaclass options."""
fields = ['data_file']
data_file = serializers.FileField(
label=_('Data File'),
help_text=_('Select data file for upload'),
required=True,
allow_empty_file=False,
)
def validate_data_file(self, data_file):
"""Perform validation checks on the uploaded data file."""
self.filename = data_file.name
name, ext = os.path.splitext(data_file.name)
# Remove the leading . from the extension
ext = ext[1:]
accepted_file_types = ['xls', 'xlsx', 'csv', 'tsv', 'xml']
if ext not in accepted_file_types:
raise serializers.ValidationError(_('Unsupported file type'))
# Impose a 50MB limit on uploaded BOM files
max_upload_file_size = 50 * 1024 * 1024
if data_file.size > max_upload_file_size:
raise serializers.ValidationError(_('File is too large'))
# Read file data into memory (bytes object)
try:
data = data_file.read()
except Exception as e:
raise serializers.ValidationError(str(e))
if ext in ['csv', 'tsv', 'xml']:
try:
data = data.decode()
except Exception as e:
raise serializers.ValidationError(str(e))
# Convert to a tablib dataset (we expect headers)
try:
self.dataset = tablib.Dataset().load(data, ext, headers=True)
except Exception as e:
raise serializers.ValidationError(str(e))
if len(self.dataset.headers) == 0:
raise serializers.ValidationError(_('No columns found in file'))
if len(self.dataset) == 0:
raise serializers.ValidationError(_('No data rows found in file'))
return data_file
def match_column(self, column_name, field_names, exact=False):
"""Attempt to match a column name (from the file) to a field (defined in the model).
Order of matching is:
- Direct match
- Case insensitive match
- Fuzzy match
"""
if not column_name:
return None
column_name = str(column_name).strip()
column_name_lower = column_name.lower()
if column_name in field_names:
return column_name
for field_name in field_names:
if field_name.lower() == column_name_lower:
return field_name
if exact:
# Finished available 'exact' matches
return None
# TODO: Fuzzy pattern matching for column names
# No matches found
return None
def extract_data(self):
"""Returns dataset extracted from the file."""
# Provide a dict of available import fields for the model
model_fields = {}
# Keep track of columns we have already extracted
matched_columns = set()
if self.TARGET_MODEL:
try:
model_fields = self.TARGET_MODEL.get_import_fields()
except Exception:
pass
# Extract a list of valid model field names
model_field_names = list(model_fields.keys())
# Provide a dict of available columns from the dataset
file_columns = {}
for header in self.dataset.headers:
column = {}
# Attempt to "match" file columns to model fields
match = self.match_column(header, model_field_names, exact=True)
if match is not None and match not in matched_columns:
matched_columns.add(match)
column['value'] = match
else:
column['value'] = None
file_columns[header] = column
return {
'file_fields': file_columns,
'model_fields': model_fields,
'rows': [row.values() for row in self.dataset.dict],
'filename': self.filename,
}
def save(self):
"""Empty overwrite for save."""
...
class DataFileExtractSerializer(serializers.Serializer):
"""Generic serializer for extracting data from an imported dataset.
- User provides an array of matched headers
- User provides an array of raw data rows
"""
# Implementing class should register a target model (database model) to be used for import
TARGET_MODEL = None
class Meta:
"""Metaclass options."""
fields = ['columns', 'rows']
# Mapping of columns
columns = serializers.ListField(child=serializers.CharField(allow_blank=True))
rows = serializers.ListField(
child=serializers.ListField(
child=serializers.CharField(allow_blank=True, allow_null=True)
)
)
def validate(self, data):
"""Clean data."""
data = super().validate(data)
self.columns = data.get('columns', [])
self.rows = data.get('rows', [])
if len(self.rows) == 0:
raise serializers.ValidationError(_('No data rows provided'))
if len(self.columns) == 0:
raise serializers.ValidationError(_('No data columns supplied'))
self.validate_extracted_columns()
return data
@property
def data(self):
"""Returns current data."""
if self.TARGET_MODEL:
try:
model_fields = self.TARGET_MODEL.get_import_fields()
except Exception:
model_fields = {}
rows = []
for row in self.rows:
"""Optionally pre-process each row, before sending back to the client."""
processed_row = self.process_row(self.row_to_dict(row))
if processed_row:
rows.append({'original': row, 'data': processed_row})
return {'fields': model_fields, 'columns': self.columns, 'rows': rows}
def process_row(self, row):
"""Process a 'row' of data, which is a mapped column:value dict.
Returns either a mapped column:value dict, or None.
If the function returns None, the column is ignored!
"""
# Default implementation simply returns the original row data
return row
def row_to_dict(self, row):
"""Convert a "row" to a named data dict."""
row_dict = {'errors': {}}
for idx, value in enumerate(row):
if idx < len(self.columns):
col = self.columns[idx]
if col:
row_dict[col] = value
return row_dict
def validate_extracted_columns(self):
"""Perform custom validation of header mapping."""
if self.TARGET_MODEL:
try:
model_fields = self.TARGET_MODEL.get_import_fields()
except Exception:
model_fields = {}
cols_seen = set()
for name, field in model_fields.items():
required = field.get('required', False)
# Check for missing required columns
if required:
if name not in self.columns:
raise serializers.ValidationError(
_(f"Missing required column: '{name}'")
)
for col in self.columns:
if not col:
continue
# Check for duplicated columns
if col in cols_seen:
raise serializers.ValidationError(_(f"Duplicate column: '{col}'"))
cols_seen.add(col)
def save(self):
"""No "save" action for this serializer."""
pass
class RemoteImageMixin(metaclass=serializers.SerializerMetaclass):
"""Mixin class which allows downloading an 'image' from a remote URL.
Adds the optional, write-only `remote_image` field to the serializer
"""
def skip_create_fields(self):
"""Ensure the 'remote_image' field is skipped when creating a new instance."""
return ['remote_image']
remote_image = serializers.URLField(
required=False,
allow_blank=False,
write_only=True,
label=_('Remote Image'),
help_text=_('URL of remote image file'),
)
def validate_remote_image(self, url):
"""Perform custom validation for the remote image URL.
- Attempt to download the image and store it against this object instance
- Catches and re-throws any errors
"""
from InvenTree.helpers_model import download_image_from_url
if not url:
return
if not common_models.InvenTreeSetting.get_setting(
'INVENTREE_DOWNLOAD_FROM_URL'
):
raise ValidationError(
_('Downloading images from remote URL is not enabled')
)
try:
self.remote_image_file = download_image_from_url(url)
except Exception as exc:
self.remote_image_file = None
raise ValidationError(str(exc))
return url

File diff suppressed because it is too large Load Diff

View File

@ -1,269 +0,0 @@
"""API endpoints for social authentication with allauth."""
import logging
from importlib import import_module
from django.urls import NoReverseMatch, include, path, reverse
from allauth.account.models import EmailAddress
from allauth.socialaccount import providers
from allauth.socialaccount.providers.oauth2.views import OAuth2Adapter, OAuth2LoginView
from drf_spectacular.utils import OpenApiResponse, extend_schema
from rest_framework import serializers
from rest_framework.exceptions import NotFound
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
import InvenTree.sso
from common.models import InvenTreeSetting
from InvenTree.mixins import CreateAPI, ListAPI, ListCreateAPI
from InvenTree.serializers import EmptySerializer, InvenTreeModelSerializer
logger = logging.getLogger('inventree')
class GenericOAuth2ApiLoginView(OAuth2LoginView):
"""Api view to login a user with a social account."""
def dispatch(self, request, *args, **kwargs):
"""Dispatch the regular login view directly."""
return self.login(request, *args, **kwargs)
class GenericOAuth2ApiConnectView(GenericOAuth2ApiLoginView):
"""Api view to connect a social account to the current user."""
def dispatch(self, request, *args, **kwargs):
"""Dispatch the connect request directly."""
# Override the request method be in connection mode
request.GET = request.GET.copy()
request.GET['process'] = 'connect'
# Resume the dispatch
return super().dispatch(request, *args, **kwargs)
def handle_oauth2(adapter: OAuth2Adapter):
"""Define urls for oauth2 endpoints."""
return [
path(
'login/',
GenericOAuth2ApiLoginView.adapter_view(adapter),
name=f'{provider.id}_api_login',
),
path(
'connect/',
GenericOAuth2ApiConnectView.adapter_view(adapter),
name=f'{provider.id}_api_connect',
),
]
legacy = {
'twitter': 'twitter_oauth2',
'bitbucket': 'bitbucket_oauth2',
'linkedin': 'linkedin_oauth2',
'vimeo': 'vimeo_oauth2',
'openid': 'openid_connect',
} # legacy connectors
# Collect urls for all loaded providers
social_auth_urlpatterns = []
provider_urlpatterns = []
for name, provider in providers.registry.provider_map.items():
try:
prov_mod = import_module(provider.get_package() + '.views')
except ImportError:
logger.exception('Could not import authentication provider %s', name)
continue
# Try to extract the adapter class
adapters = [
cls
for cls in prov_mod.__dict__.values()
if isinstance(cls, type)
and not cls == OAuth2Adapter
and issubclass(cls, OAuth2Adapter)
]
# Get urls
urls = []
if len(adapters) == 1:
urls = handle_oauth2(adapter=adapters[0])
else:
if provider.id in legacy:
logger.warning(
'`%s` is not supported on platform UI. Use `%s` instead.',
provider.id,
legacy[provider.id],
)
continue
else:
logger.error(
'Found handler that is not yet ready for platform UI: `%s`. Open an feature request on GitHub if you need it implemented.',
provider.id,
)
continue
provider_urlpatterns += [path(f'{provider.id}/', include(urls))]
social_auth_urlpatterns += provider_urlpatterns
class SocialProviderListResponseSerializer(serializers.Serializer):
"""Serializer for the SocialProviderListView."""
class SocialProvider(serializers.Serializer):
"""Serializer for the SocialProviderListResponseSerializer."""
id = serializers.CharField()
name = serializers.CharField()
configured = serializers.BooleanField()
login = serializers.URLField()
connect = serializers.URLField()
display_name = serializers.CharField()
sso_enabled = serializers.BooleanField()
sso_registration = serializers.BooleanField()
mfa_required = serializers.BooleanField()
providers = SocialProvider(many=True)
registration_enabled = serializers.BooleanField()
password_forgotten_enabled = serializers.BooleanField()
class SocialProviderListView(ListAPI):
"""List of available social providers."""
permission_classes = (AllowAny,)
serializer_class = EmptySerializer
@extend_schema(
responses={200: OpenApiResponse(response=SocialProviderListResponseSerializer)}
)
def get(self, request, *args, **kwargs):
"""Get the list of providers."""
provider_list = []
for provider in providers.registry.provider_map.values():
provider_data = {
'id': provider.id,
'name': provider.name,
'configured': False,
}
try:
provider_data['login'] = request.build_absolute_uri(
reverse(f'{provider.id}_api_login')
)
except NoReverseMatch:
provider_data['login'] = None
try:
provider_data['connect'] = request.build_absolute_uri(
reverse(f'{provider.id}_api_connect')
)
except NoReverseMatch:
provider_data['connect'] = None
provider_data['configured'] = InvenTree.sso.check_provider(provider)
provider_data['display_name'] = InvenTree.sso.provider_display_name(
provider
)
provider_list.append(provider_data)
data = {
'sso_enabled': InvenTree.sso.login_enabled(),
'sso_registration': InvenTree.sso.registration_enabled(),
'mfa_required': InvenTreeSetting.get_setting('LOGIN_ENFORCE_MFA'),
'providers': provider_list,
'registration_enabled': InvenTreeSetting.get_setting('LOGIN_ENABLE_REG'),
'password_forgotten_enabled': InvenTreeSetting.get_setting(
'LOGIN_ENABLE_PWD_FORGOT'
),
}
return Response(data)
class EmailAddressSerializer(InvenTreeModelSerializer):
"""Serializer for the EmailAddress model."""
class Meta:
"""Meta options for EmailAddressSerializer."""
model = EmailAddress
fields = '__all__'
class EmptyEmailAddressSerializer(InvenTreeModelSerializer):
"""Empty Serializer for the EmailAddress model."""
class Meta:
"""Meta options for EmailAddressSerializer."""
model = EmailAddress
fields = []
class EmailListView(ListCreateAPI):
"""List of registered email addresses for current users."""
permission_classes = (IsAuthenticated,)
serializer_class = EmailAddressSerializer
def get_queryset(self):
"""Only return data for current user."""
return EmailAddress.objects.filter(user=self.request.user)
class EmailActionMixin(CreateAPI):
"""Mixin to modify email addresses for current users."""
serializer_class = EmptyEmailAddressSerializer
permission_classes = (IsAuthenticated,)
def get_queryset(self):
"""Filter queryset for current user."""
return EmailAddress.objects.filter(
user=self.request.user, pk=self.kwargs['pk']
).first()
@extend_schema(responses={200: OpenApiResponse(response=EmailAddressSerializer)})
def post(self, request, *args, **kwargs):
"""Filter item, run action and return data."""
email = self.get_queryset()
if not email:
raise NotFound
self.special_action(email, request, *args, **kwargs)
return Response(EmailAddressSerializer(email).data)
class EmailVerifyView(EmailActionMixin):
"""Re-verify an email for a currently logged in user."""
def special_action(self, email, request, *args, **kwargs):
"""Send confirmation."""
if email.verified:
return
email.send_confirmation(request)
class EmailPrimaryView(EmailActionMixin):
"""Make an email for a currently logged in user primary."""
def special_action(self, email, *args, **kwargs):
"""Mark email as primary."""
if email.primary:
return
email.set_as_primary()
class EmailRemoveView(EmailActionMixin):
"""Remove an email for a currently logged in user."""
def special_action(self, email, *args, **kwargs):
"""Delete email."""
email.delete()

View File

@ -1,77 +0,0 @@
"""Helper functions for Single Sign On functionality."""
import logging
from common.models import InvenTreeSetting
from InvenTree.helpers import str2bool
logger = logging.getLogger('inventree')
def get_provider_app(provider):
"""Return the SocialApp object for the given provider."""
from allauth.socialaccount.models import SocialApp
try:
apps = SocialApp.objects.filter(provider__iexact=provider.id)
except SocialApp.DoesNotExist:
logger.warning("SSO SocialApp not found for provider '%s'", provider.id)
return None
if apps.count() > 1:
logger.warning("Multiple SocialApps found for provider '%s'", provider.id)
if apps.count() == 0:
logger.warning("SSO SocialApp not found for provider '%s'", provider.id)
return apps.first()
def check_provider(provider, raise_error=False):
"""Check if the given provider is correctly configured.
To be correctly configured, the following must be true:
- Provider must either have a registered SocialApp
- Must have at least one site enabled
"""
import allauth.app_settings
# First, check that the provider is enabled
app = get_provider_app(provider)
if not app:
return False
if allauth.app_settings.SITES_ENABLED:
# At least one matching site must be specified
if not app.sites.exists():
logger.error('SocialApp %s has no sites configured', app)
return False
# At this point, we assume that the provider is correctly configured
return True
def provider_display_name(provider):
"""Return the 'display name' for the given provider."""
if app := get_provider_app(provider):
return app.name
# Fallback value if app not found
return provider.name
def login_enabled() -> bool:
"""Return True if SSO login is enabled."""
return str2bool(InvenTreeSetting.get_setting('LOGIN_ENABLE_SSO'))
def registration_enabled() -> bool:
"""Return True if SSO registration is enabled."""
return str2bool(InvenTreeSetting.get_setting('LOGIN_ENABLE_SSO_REG'))
def auto_registration_enabled() -> bool:
"""Return True if SSO auto-registration is enabled."""
return str2bool(InvenTreeSetting.get_setting('LOGIN_SIGNUP_SSO_AUTO'))

View File

@ -1,163 +0,0 @@
"""This module provides custom translation tags specifically for use with javascript code.
Translated strings are escaped, such that they can be used as string literals in a javascript file.
"""
import django.templatetags.i18n
from django import template
from django.template import TemplateSyntaxError
from django.templatetags.i18n import TranslateNode
import bleach
import InvenTree.translation
register = template.Library()
@register.simple_tag()
def translation_stats(lang_code):
"""Return the translation percentage for the given language code."""
if lang_code is None:
return None
return InvenTree.translation.get_translation_percent(lang_code)
class CustomTranslateNode(TranslateNode):
"""Custom translation node class, which sanitizes the translated strings for javascript use."""
def __init__(self, filter_expression, noop, asvar, message_context, escape=False):
"""Custom constructor for TranslateNode class.
- Adds an 'escape' argument, which is passed to the render function
"""
super().__init__(filter_expression, noop, asvar, message_context)
self.escape = escape
def render(self, context):
"""Custom render function overrides / extends default behaviour."""
result = super().render(context)
result = bleach.clean(result)
# Remove any escape sequences
for seq in ['\a', '\b', '\f', '\n', '\r', '\t', '\v']:
result = result.replace(seq, '')
# Remove other disallowed characters
for c in ['\\', '`', ';', '|', '&']:
result = result.replace(c, '')
# Escape any quotes contained in the string, if the request is for a javascript file
request = context.get('request', None)
template = getattr(context, 'template_name', None)
request = context.get('request', None)
escape = self.escape
if template and str(template).endswith('.js'):
escape = True
if request and str(request.path).endswith('.js'):
escape = True
if escape:
result = result.replace("'", r'\'')
result = result.replace('"', r'\"')
# Return the 'clean' resulting string
return result
@register.tag('translate')
@register.tag('trans')
def do_translate(parser, token):
"""Custom translation function.
- Lifted from https://github.com/django/django/blob/main/django/templatetags/i18n.py.
- The only difference is that we pass this to our custom rendering node class
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'%s' takes at least one argument" % bits[0])
message_string = parser.compile_filter(bits[1])
remaining = bits[2:]
escape = False
noop = False
asvar = None
message_context = None
seen = set()
invalid_context = {'as', 'noop'}
while remaining:
option = remaining.pop(0)
if option in seen:
raise TemplateSyntaxError(
"The '%s' option was specified more than once." % option
)
elif option == 'noop':
noop = True
elif option == 'context':
try:
value = remaining.pop(0)
except IndexError:
raise TemplateSyntaxError(
"No argument provided to the '%s' tag for the context option."
% bits[0]
)
if value in invalid_context:
raise TemplateSyntaxError(
"Invalid argument '%s' provided to the '%s' tag for the context "
'option' % (value, bits[0])
)
message_context = parser.compile_filter(value)
elif option == 'as':
try:
value = remaining.pop(0)
except IndexError:
raise TemplateSyntaxError(
"No argument provided to the '%s' tag for the as option." % bits[0]
)
asvar = value
elif option == 'escape':
escape = True
else:
raise TemplateSyntaxError(
"Unknown argument for '%s' tag: '%s'. The only options "
"available are 'noop', 'context' \"xxx\", and 'as VAR'."
% (bits[0], option)
)
seen.add(option)
return CustomTranslateNode(
message_string, noop, asvar, message_context, escape=escape
)
# Re-register tags which we have not explicitly overridden
register.tag('blocktrans', django.templatetags.i18n.do_block_translate)
register.tag('blocktranslate', django.templatetags.i18n.do_block_translate)
register.tag('language', django.templatetags.i18n.language)
register.tag(
'get_available_languages', django.templatetags.i18n.do_get_available_languages
)
register.tag('get_language_info', django.templatetags.i18n.do_get_language_info)
register.tag(
'get_language_info_list', django.templatetags.i18n.do_get_language_info_list
)
register.tag('get_current_language', django.templatetags.i18n.do_get_current_language)
register.tag(
'get_current_language_bidi', django.templatetags.i18n.do_get_current_language_bidi
)
register.filter('language_name', django.templatetags.i18n.language_name)
register.filter(
'language_name_translated', django.templatetags.i18n.language_name_translated
)
register.filter('language_name_local', django.templatetags.i18n.language_name_local)
register.filter('language_bidi', django.templatetags.i18n.language_bidi)

File diff suppressed because it is too large Load Diff

View File

@ -1,285 +0,0 @@
"""Version information for InvenTree.
Provides information on the current InvenTree version
"""
import os
import pathlib
import platform
import re
import sys
from datetime import datetime as dt
from datetime import timedelta as td
import django
from django.conf import settings
from dulwich.repo import NotGitRepository, Repo
from .api_version import INVENTREE_API_TEXT, INVENTREE_API_VERSION
# InvenTree software version
INVENTREE_SW_VERSION = '0.15.0 dev'
# Discover git
try:
main_repo = Repo(pathlib.Path(__file__).parent.parent.parent)
main_commit = main_repo[main_repo.head()]
except (NotGitRepository, FileNotFoundError):
main_commit = None
def checkMinPythonVersion():
"""Check that the Python version is at least 3.9."""
version = sys.version.split(' ')[0]
docs = 'https://docs.inventree.org/en/stable/start/intro/#python-requirements'
msg = f"""
InvenTree requires Python 3.9 or above - you are running version {version}.
- Refer to the InvenTree documentation for more information:
- {docs}
"""
if sys.version_info.major < 3:
raise RuntimeError(msg)
if sys.version_info.major == 3 and sys.version_info.minor < 9:
raise RuntimeError(msg)
print(f'Python version {version} - {sys.executable}')
def inventreeInstanceName():
"""Returns the InstanceName settings for the current database."""
import common.models
return common.models.InvenTreeSetting.get_setting('INVENTREE_INSTANCE', '')
def inventreeInstanceTitle():
"""Returns the InstanceTitle for the current database."""
import common.models
if common.models.InvenTreeSetting.get_setting('INVENTREE_INSTANCE_TITLE', False):
return common.models.InvenTreeSetting.get_setting('INVENTREE_INSTANCE', '')
return 'InvenTree'
def inventreeVersion():
"""Returns the InvenTree version string."""
return INVENTREE_SW_VERSION.lower().strip()
def inventreeVersionTuple(version=None):
"""Return the InvenTree version string as (maj, min, sub) tuple."""
if version is None:
version = INVENTREE_SW_VERSION
match = re.match(r'^.*(\d+)\.(\d+)\.(\d+).*$', str(version))
return [int(g) for g in match.groups()]
def isInvenTreeDevelopmentVersion():
"""Return True if current InvenTree version is a "development" version."""
return inventreeVersion().endswith('dev')
def inventreeDocsVersion():
"""Return the version string matching the latest documentation.
Development -> "latest"
Release -> "major.minor.sub" e.g. "0.5.2"
"""
if isInvenTreeDevelopmentVersion():
return 'latest'
return INVENTREE_SW_VERSION # pragma: no cover
def inventreeDocUrl():
"""Return URL for InvenTree documentation site."""
tag = inventreeDocsVersion()
return f'https://docs.inventree.org/en/{tag}'
def inventreeAppUrl():
"""Return URL for InvenTree app site."""
return f'{inventreeDocUrl()}/app/app/'
def inventreeCreditsUrl():
"""Return URL for InvenTree credits site."""
return 'https://docs.inventree.org/en/latest/credits/'
def inventreeGithubUrl():
"""Return URL for InvenTree github site."""
return 'https://github.com/InvenTree/InvenTree/'
def isInvenTreeUpToDate():
"""Test if the InvenTree instance is "up to date" with the latest version.
A background task periodically queries GitHub for latest version, and stores it to the database as "_INVENTREE_LATEST_VERSION"
"""
import common.models
latest = common.models.InvenTreeSetting.get_setting(
'_INVENTREE_LATEST_VERSION', backup_value=None, create=False
)
# No record for "latest" version - we must assume we are up to date!
if not latest:
return True
# Extract "tuple" version (Python can directly compare version tuples)
latest_version = inventreeVersionTuple(latest) # pragma: no cover
inventree_version = inventreeVersionTuple() # pragma: no cover
return inventree_version >= latest_version # pragma: no cover
def inventreeApiVersion():
"""Returns current API version of InvenTree."""
return INVENTREE_API_VERSION
def parse_version_text():
"""Parse the version text to structured data."""
patched_data = INVENTREE_API_TEXT.split('\n\n')
# Remove first newline on latest version
patched_data[0] = patched_data[0].replace('\n', '', 1)
version_data = {}
for version in patched_data:
data = version.split('\n')
version_split = data[0].split(' -> ')
version_detail = (
version_split[1].split(':', 1) if len(version_split) > 1 else ['']
)
new_data = {
'version': version_split[0].strip(),
'date': version_detail[0].strip(),
'gh': version_detail[1].strip() if len(version_detail) > 1 else None,
'text': data[1:],
'latest': False,
}
version_data[new_data['version']] = new_data
return version_data
INVENTREE_API_TEXT_DATA = parse_version_text()
"""Pre-processed API version text."""
def inventreeApiText(versions: int = 10, start_version: int = 0):
"""Returns API version descriptors.
Args:
versions: Number of versions to return. Default: 10
start_version: first version to report. Defaults to return the latest {versions} versions.
"""
version_data = INVENTREE_API_TEXT_DATA
# Define the range of versions to return
if start_version == 0:
start_version = INVENTREE_API_VERSION - versions
return {
f'v{a}': version_data.get(f'v{a}', None)
for a in range(start_version, start_version + versions)
}
def inventreeDjangoVersion():
"""Returns the version of Django library."""
return django.get_version()
def inventreePythonVersion():
"""Returns the version of python."""
return sys.version.split(' ')[0]
def inventreeCommitHash():
"""Returns the git commit hash for the running codebase."""
# First look in the environment variables, i.e. if running in docker
commit_hash = os.environ.get('INVENTREE_COMMIT_HASH', '')
if commit_hash:
return commit_hash
if main_commit is None:
return None
return main_commit.sha().hexdigest()[0:7]
def inventreeCommitDate():
"""Returns the git commit date for the running codebase."""
# First look in the environment variables, e.g. if running in docker
commit_date = os.environ.get('INVENTREE_COMMIT_DATE', '')
if commit_date:
return commit_date.split(' ')[0]
if main_commit is None:
return None
commit_dt = dt.fromtimestamp(main_commit.commit_time) + td(
seconds=main_commit.commit_timezone
)
return str(commit_dt.date())
def inventreeInstaller():
"""Returns the installer for the running codebase - if set."""
# First look in the environment variables, e.g. if running in docker
installer = os.environ.get('INVENTREE_PKG_INSTALLER', '')
if installer:
return installer
elif settings.DOCKER:
return 'DOC'
elif main_commit is not None:
return 'GIT'
return None
def inventreeBranch():
"""Returns the branch for the running codebase - if set."""
# First look in the environment variables, e.g. if running in docker
branch = os.environ.get('INVENTREE_PKG_BRANCH', '')
if branch:
return branch
if main_commit is None:
return None
try:
branch = main_repo.refs.follow(b'HEAD')[0][1].decode()
return branch.removeprefix('refs/heads/')
except IndexError:
return None # pragma: no cover
def inventreeTarget():
"""Returns the target platform for the running codebase - if set."""
# First look in the environment variables, e.g. if running in docker
return os.environ.get('INVENTREE_PKG_TARGET', None)
def inventreePlatform():
"""Returns the platform for the instance."""
return platform.platform(aliased=True)
def inventreeDatabase():
"""Return the InvenTree database backend e.g. 'postgresql'."""
db = settings.DATABASES['default']
return db.get('ENGINE', None).replace('django.db.backends.', '')

View File

@ -1,656 +0,0 @@
"""JSON API for the Build app."""
from django.db.models import F, Q
from django.urls import include, path
from django.utils.translation import gettext_lazy as _
from django.contrib.auth.models import User
from rest_framework.exceptions import ValidationError
from django_filters.rest_framework import DjangoFilterBackend
from django_filters import rest_framework as rest_filters
from InvenTree.api import AttachmentMixin, APIDownloadMixin, ListCreateDestroyAPIView, MetadataView
from generic.states.api import StatusView
from InvenTree.helpers import str2bool, isNull, DownloadFile
from InvenTree.status_codes import BuildStatus, BuildStatusGroups
from InvenTree.mixins import CreateAPI, RetrieveUpdateDestroyAPI, ListCreateAPI
import common.models
import build.admin
import build.serializers
from build.models import Build, BuildLine, BuildItem, BuildOrderAttachment
import part.models
from users.models import Owner
from InvenTree.filters import SEARCH_ORDER_FILTER_ALIAS
class BuildFilter(rest_filters.FilterSet):
"""Custom filterset for BuildList API endpoint."""
class Meta:
"""Metaclass options"""
model = Build
fields = [
'parent',
'sales_order',
'part',
'issued_by',
]
status = rest_filters.NumberFilter(label='Status')
active = rest_filters.BooleanFilter(label='Build is active', method='filter_active')
def filter_active(self, queryset, name, value):
"""Filter the queryset to either include or exclude orders which are active."""
if str2bool(value):
return queryset.filter(status__in=BuildStatusGroups.ACTIVE_CODES)
return queryset.exclude(status__in=BuildStatusGroups.ACTIVE_CODES)
overdue = rest_filters.BooleanFilter(label='Build is overdue', method='filter_overdue')
def filter_overdue(self, queryset, name, value):
"""Filter the queryset to either include or exclude orders which are overdue."""
if str2bool(value):
return queryset.filter(Build.OVERDUE_FILTER)
return queryset.exclude(Build.OVERDUE_FILTER)
assigned_to_me = rest_filters.BooleanFilter(label='assigned_to_me', method='filter_assigned_to_me')
def filter_assigned_to_me(self, queryset, name, value):
"""Filter by orders which are assigned to the current user."""
value = str2bool(value)
# Work out who "me" is!
owners = Owner.get_owners_matching_user(self.request.user)
if value:
return queryset.filter(responsible__in=owners)
return queryset.exclude(responsible__in=owners)
assigned_to = rest_filters.NumberFilter(label='responsible', method='filter_responsible')
def filter_responsible(self, queryset, name, value):
"""Filter by orders which are assigned to the specified owner."""
owners = list(Owner.objects.filter(pk=value))
# if we query by a user, also find all ownerships through group memberships
if len(owners) > 0 and owners[0].label() == 'user':
owners = Owner.get_owners_matching_user(User.objects.get(pk=owners[0].owner_id))
return queryset.filter(responsible__in=owners)
# Exact match for reference
reference = rest_filters.CharFilter(
label='Filter by exact reference',
field_name='reference',
lookup_expr="iexact"
)
project_code = rest_filters.ModelChoiceFilter(
queryset=common.models.ProjectCode.objects.all(),
field_name='project_code'
)
has_project_code = rest_filters.BooleanFilter(label='has_project_code', method='filter_has_project_code')
def filter_has_project_code(self, queryset, name, value):
"""Filter by whether or not the order has a project code"""
if str2bool(value):
return queryset.exclude(project_code=None)
return queryset.filter(project_code=None)
class BuildList(APIDownloadMixin, ListCreateAPI):
"""API endpoint for accessing a list of Build objects.
- GET: Return list of objects (with filters)
- POST: Create a new Build object
"""
queryset = Build.objects.all()
serializer_class = build.serializers.BuildSerializer
filterset_class = BuildFilter
filter_backends = SEARCH_ORDER_FILTER_ALIAS
ordering_fields = [
'reference',
'part__name',
'status',
'creation_date',
'target_date',
'completion_date',
'quantity',
'completed',
'issued_by',
'responsible',
'project_code',
'priority',
]
ordering_field_aliases = {
'reference': ['reference_int', 'reference'],
'project_code': ['project_code__code'],
}
ordering = '-reference'
search_fields = [
'reference',
'title',
'part__name',
'part__IPN',
'part__description',
'project_code__code',
'priority',
]
def get_queryset(self):
"""Override the queryset filtering, as some of the fields don't natively play nicely with DRF."""
queryset = super().get_queryset().select_related('part')
queryset = build.serializers.BuildSerializer.annotate_queryset(queryset)
return queryset
def download_queryset(self, queryset, export_format):
"""Download the queryset data as a file."""
dataset = build.admin.BuildResource().export(queryset=queryset)
filedata = dataset.export(export_format)
filename = f"InvenTree_BuildOrders.{export_format}"
return DownloadFile(filedata, filename)
def filter_queryset(self, queryset):
"""Custom query filtering for the BuildList endpoint."""
queryset = super().filter_queryset(queryset)
params = self.request.query_params
# exclude parent tree
exclude_tree = params.get('exclude_tree', None)
if exclude_tree is not None:
try:
build = Build.objects.get(pk=exclude_tree)
queryset = queryset.exclude(
pk__in=[bld.pk for bld in build.get_descendants(include_self=True)]
)
except (ValueError, Build.DoesNotExist):
pass
# Filter by "ancestor" builds
ancestor = params.get('ancestor', None)
if ancestor is not None:
try:
ancestor = Build.objects.get(pk=ancestor)
descendants = ancestor.get_descendants(include_self=True)
queryset = queryset.filter(
parent__pk__in=[b.pk for b in descendants]
)
except (ValueError, Build.DoesNotExist):
pass
# Filter by 'date range'
min_date = params.get('min_date', None)
max_date = params.get('max_date', None)
if min_date is not None and max_date is not None:
queryset = Build.filterByDate(queryset, min_date, max_date)
return queryset
def get_serializer(self, *args, **kwargs):
"""Add extra context information to the endpoint serializer."""
try:
part_detail = str2bool(self.request.GET.get('part_detail', None))
except AttributeError:
part_detail = None
kwargs['part_detail'] = part_detail
return self.serializer_class(*args, **kwargs)
class BuildDetail(RetrieveUpdateDestroyAPI):
"""API endpoint for detail view of a Build object."""
queryset = Build.objects.all()
serializer_class = build.serializers.BuildSerializer
def destroy(self, request, *args, **kwargs):
"""Only allow deletion of a BuildOrder if the build status is CANCELLED"""
build = self.get_object()
if build.status != BuildStatus.CANCELLED:
raise ValidationError({
"non_field_errors": [_("Build must be cancelled before it can be deleted")]
})
return super().destroy(request, *args, **kwargs)
class BuildUnallocate(CreateAPI):
"""API endpoint for unallocating stock items from a build order.
- The BuildOrder object is specified by the URL
- "output" (StockItem) can optionally be specified
- "bom_item" can optionally be specified
"""
queryset = Build.objects.none()
serializer_class = build.serializers.BuildUnallocationSerializer
def get_serializer_context(self):
"""Add extra context information to the endpoint serializer."""
ctx = super().get_serializer_context()
try:
ctx['build'] = Build.objects.get(pk=self.kwargs.get('pk', None))
except Exception:
pass
ctx['request'] = self.request
return ctx
class BuildLineFilter(rest_filters.FilterSet):
"""Custom filterset for the BuildLine API endpoint."""
class Meta:
"""Meta information for the BuildLineFilter class."""
model = BuildLine
fields = [
'build',
'bom_item',
]
# Fields on related models
consumable = rest_filters.BooleanFilter(label=_('Consumable'), field_name='bom_item__consumable')
optional = rest_filters.BooleanFilter(label=_('Optional'), field_name='bom_item__optional')
tracked = rest_filters.BooleanFilter(label=_('Tracked'), field_name='bom_item__sub_part__trackable')
allocated = rest_filters.BooleanFilter(label=_('Allocated'), method='filter_allocated')
def filter_allocated(self, queryset, name, value):
"""Filter by whether each BuildLine is fully allocated"""
if str2bool(value):
return queryset.filter(allocated__gte=F('quantity'))
return queryset.filter(allocated__lt=F('quantity'))
available = rest_filters.BooleanFilter(label=_('Available'), method='filter_available')
def filter_available(self, queryset, name, value):
"""Filter by whether there is sufficient stock available for each BuildLine:
To determine this, we need to know:
- The quantity required for each BuildLine
- The quantity available for each BuildLine
- The quantity allocated for each BuildLine
"""
flt = Q(quantity__lte=F('total_available_stock') + F('allocated'))
if str2bool(value):
return queryset.filter(flt)
return queryset.exclude(flt)
class BuildLineEndpoint:
"""Mixin class for BuildLine API endpoints."""
queryset = BuildLine.objects.all()
serializer_class = build.serializers.BuildLineSerializer
def get_source_build(self) -> Build:
"""Return the source Build object for the BuildLine queryset.
This source build is used to filter the available stock for each BuildLine.
- If this is a "detail" view, use the build associated with the line
- If this is a "list" view, use the build associated with the request
"""
raise NotImplementedError("get_source_build must be implemented in the child class")
def get_queryset(self):
"""Override queryset to select-related and annotate"""
queryset = super().get_queryset()
source_build = self.get_source_build()
queryset = build.serializers.BuildLineSerializer.annotate_queryset(queryset, build=source_build)
return queryset
class BuildLineList(BuildLineEndpoint, ListCreateAPI):
"""API endpoint for accessing a list of BuildLine objects"""
filterset_class = BuildLineFilter
filter_backends = SEARCH_ORDER_FILTER_ALIAS
ordering_fields = [
'part',
'allocated',
'reference',
'quantity',
'consumable',
'optional',
'unit_quantity',
'available_stock',
]
ordering_field_aliases = {
'part': 'bom_item__sub_part__name',
'reference': 'bom_item__reference',
'unit_quantity': 'bom_item__quantity',
'consumable': 'bom_item__consumable',
'optional': 'bom_item__optional',
}
search_fields = [
'bom_item__sub_part__name',
'bom_item__reference',
]
def get_source_build(self) -> Build:
"""Return the target build for the BuildLine queryset."""
try:
build_id = self.request.query_params.get('build', None)
if build_id:
build = Build.objects.get(pk=build_id)
return build
except (Build.DoesNotExist, AttributeError, ValueError):
pass
return None
class BuildLineDetail(BuildLineEndpoint, RetrieveUpdateDestroyAPI):
"""API endpoint for detail view of a BuildLine object."""
def get_source_build(self) -> Build:
"""Return the target source location for the BuildLine queryset."""
return None
class BuildOrderContextMixin:
"""Mixin class which adds build order as serializer context variable."""
def get_serializer_context(self):
"""Add extra context information to the endpoint serializer."""
ctx = super().get_serializer_context()
ctx['request'] = self.request
ctx['to_complete'] = True
try:
ctx['build'] = Build.objects.get(pk=self.kwargs.get('pk', None))
except Exception:
pass
return ctx
class BuildOutputCreate(BuildOrderContextMixin, CreateAPI):
"""API endpoint for creating new build output(s)."""
queryset = Build.objects.none()
serializer_class = build.serializers.BuildOutputCreateSerializer
class BuildOutputScrap(BuildOrderContextMixin, CreateAPI):
"""API endpoint for scrapping build output(s)."""
queryset = Build.objects.none()
serializer_class = build.serializers.BuildOutputScrapSerializer
def get_serializer_context(self):
"""Add extra context information to the endpoint serializer."""
ctx = super().get_serializer_context()
ctx['to_complete'] = False
return ctx
class BuildOutputComplete(BuildOrderContextMixin, CreateAPI):
"""API endpoint for completing build outputs."""
queryset = Build.objects.none()
serializer_class = build.serializers.BuildOutputCompleteSerializer
class BuildOutputDelete(BuildOrderContextMixin, CreateAPI):
"""API endpoint for deleting multiple build outputs."""
def get_serializer_context(self):
"""Add extra context information to the endpoint serializer."""
ctx = super().get_serializer_context()
ctx['to_complete'] = False
return ctx
queryset = Build.objects.none()
serializer_class = build.serializers.BuildOutputDeleteSerializer
class BuildFinish(BuildOrderContextMixin, CreateAPI):
"""API endpoint for marking a build as finished (completed)."""
queryset = Build.objects.none()
serializer_class = build.serializers.BuildCompleteSerializer
class BuildAutoAllocate(BuildOrderContextMixin, CreateAPI):
"""API endpoint for 'automatically' allocating stock against a build order.
- Only looks at 'untracked' parts
- If stock exists in a single location, easy!
- If user decides that stock items are "fungible", allocate against multiple stock items
- If the user wants to, allocate substite parts if the primary parts are not available.
"""
queryset = Build.objects.none()
serializer_class = build.serializers.BuildAutoAllocationSerializer
class BuildAllocate(BuildOrderContextMixin, CreateAPI):
"""API endpoint to allocate stock items to a build order.
- The BuildOrder object is specified by the URL
- Items to allocate are specified as a list called "items" with the following options:
- bom_item: pk value of a given BomItem object (must match the part associated with this build)
- stock_item: pk value of a given StockItem object
- quantity: quantity to allocate
- output: StockItem (build order output) to allocate stock against (optional)
"""
queryset = Build.objects.none()
serializer_class = build.serializers.BuildAllocationSerializer
class BuildCancel(BuildOrderContextMixin, CreateAPI):
"""API endpoint for cancelling a BuildOrder."""
queryset = Build.objects.all()
serializer_class = build.serializers.BuildCancelSerializer
class BuildItemDetail(RetrieveUpdateDestroyAPI):
"""API endpoint for detail view of a BuildItem object."""
queryset = BuildItem.objects.all()
serializer_class = build.serializers.BuildItemSerializer
class BuildItemFilter(rest_filters.FilterSet):
"""Custom filterset for the BuildItemList API endpoint"""
class Meta:
"""Metaclass option"""
model = BuildItem
fields = [
'build_line',
'stock_item',
'install_into',
]
part = rest_filters.ModelChoiceFilter(
queryset=part.models.Part.objects.all(),
field_name='stock_item__part',
)
build = rest_filters.ModelChoiceFilter(
queryset=build.models.Build.objects.all(),
field_name='build_line__build',
)
tracked = rest_filters.BooleanFilter(label='Tracked', method='filter_tracked')
def filter_tracked(self, queryset, name, value):
"""Filter the queryset based on whether build items are tracked"""
if str2bool(value):
return queryset.exclude(install_into=None)
return queryset.filter(install_into=None)
class BuildItemList(ListCreateAPI):
"""API endpoint for accessing a list of BuildItem objects.
- GET: Return list of objects
- POST: Create a new BuildItem object
"""
serializer_class = build.serializers.BuildItemSerializer
filterset_class = BuildItemFilter
def get_serializer(self, *args, **kwargs):
"""Returns a BuildItemSerializer instance based on the request."""
try:
params = self.request.query_params
for key in ['part_detail', 'location_detail', 'stock_detail', 'build_detail']:
if key in params:
kwargs[key] = str2bool(params.get(key, False))
except AttributeError:
pass
return self.serializer_class(*args, **kwargs)
def get_queryset(self):
"""Override the queryset method, to allow filtering by stock_item.part."""
queryset = BuildItem.objects.all()
queryset = queryset.select_related(
'build_line',
'build_line__build',
'install_into',
'stock_item',
'stock_item__location',
'stock_item__part',
)
return queryset
def filter_queryset(self, queryset):
"""Custom query filtering for the BuildItem list."""
queryset = super().filter_queryset(queryset)
params = self.request.query_params
# Filter by output target
output = params.get('output', None)
if output:
if isNull(output):
queryset = queryset.filter(install_into=None)
else:
queryset = queryset.filter(install_into=output)
return queryset
filter_backends = [
DjangoFilterBackend,
]
class BuildAttachmentList(AttachmentMixin, ListCreateDestroyAPIView):
"""API endpoint for listing (and creating) BuildOrderAttachment objects."""
queryset = BuildOrderAttachment.objects.all()
serializer_class = build.serializers.BuildAttachmentSerializer
filterset_fields = [
'build',
]
class BuildAttachmentDetail(AttachmentMixin, RetrieveUpdateDestroyAPI):
"""Detail endpoint for a BuildOrderAttachment object."""
queryset = BuildOrderAttachment.objects.all()
serializer_class = build.serializers.BuildAttachmentSerializer
build_api_urls = [
# Attachments
path('attachment/', include([
path('<int:pk>/', BuildAttachmentDetail.as_view(), name='api-build-attachment-detail'),
path('', BuildAttachmentList.as_view(), name='api-build-attachment-list'),
])),
# Build lines
path('line/', include([
path('<int:pk>/', BuildLineDetail.as_view(), name='api-build-line-detail'),
path('', BuildLineList.as_view(), name='api-build-line-list'),
])),
# Build Items
path('item/', include([
path('<int:pk>/', include([
path('metadata/', MetadataView.as_view(), {'model': BuildItem}, name='api-build-item-metadata'),
path('', BuildItemDetail.as_view(), name='api-build-item-detail'),
])),
path('', BuildItemList.as_view(), name='api-build-item-list'),
])),
# Build Detail
path('<int:pk>/', include([
path('allocate/', BuildAllocate.as_view(), name='api-build-allocate'),
path('auto-allocate/', BuildAutoAllocate.as_view(), name='api-build-auto-allocate'),
path('complete/', BuildOutputComplete.as_view(), name='api-build-output-complete'),
path('create-output/', BuildOutputCreate.as_view(), name='api-build-output-create'),
path('delete-outputs/', BuildOutputDelete.as_view(), name='api-build-output-delete'),
path('scrap-outputs/', BuildOutputScrap.as_view(), name='api-build-output-scrap'),
path('finish/', BuildFinish.as_view(), name='api-build-finish'),
path('cancel/', BuildCancel.as_view(), name='api-build-cancel'),
path('unallocate/', BuildUnallocate.as_view(), name='api-build-unallocate'),
path('metadata/', MetadataView.as_view(), {'model': Build}, name='api-build-metadata'),
path('', BuildDetail.as_view(), name='api-build-detail'),
])),
# Build order status code information
path('status/', StatusView.as_view(), {StatusView.MODEL_REF: BuildStatus}, name='api-build-status-codes'),
# Build List
path('', BuildList.as_view(), name='api-build-list'),
]

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,12 +0,0 @@
"""States are used to track the logical state of an object.
The logic value of a state is stored in the database as an integer. The logic value is used for business logic and should not be easily changed therefore.
There is a rendered state for each state value. The rendered state is used for display purposes and can be changed easily.
States can be extended with custom options for each InvenTree instance - those options are stored in the database and need to link back to state values.
"""
from .states import StatusCode
from .transition import StateTransitionMixin, TransitionMethod, storage
__all__ = [StatusCode, storage, TransitionMethod, StateTransitionMixin]

View File

@ -1,306 +0,0 @@
"""Unit tests for label API."""
import json
from io import StringIO
from django.core.cache import cache
from django.urls import reverse
import label.models as label_models
from build.models import BuildLine
from InvenTree.unit_test import InvenTreeAPITestCase
from part.models import Part
from stock.models import StockItem, StockLocation
class LabelTest(InvenTreeAPITestCase):
"""Base class for unit testing label model API endpoints."""
fixtures = ['category', 'part', 'location', 'stock', 'bom', 'build']
superuser = True
model = None
list_url = None
detail_url = None
metadata_url = None
print_url = None
print_itemname = None
print_itemmodel = None
def setUp(self):
"""Ensure cache is cleared as part of test setup."""
cache.clear()
return super().setUp()
def test_api_url(self):
"""Test returned API Url against URL tag defined in this file."""
if not self.list_url:
return
self.assertEqual(reverse(self.list_url), self.model.get_api_url())
def test_list_endpoint(self):
"""Test that the LIST endpoint works for each model."""
if not self.list_url:
return
url = reverse(self.list_url)
response = self.get(url)
self.assertEqual(response.status_code, 200)
labels = self.model.objects.all()
n = len(labels)
# API endpoint must return correct number of reports
self.assertEqual(len(response.data), n)
# Filter by "enabled" status
response = self.get(url, {'enabled': True})
self.assertEqual(len(response.data), n)
response = self.get(url, {'enabled': False})
self.assertEqual(len(response.data), 0)
# Disable each report
for label in labels:
label.enabled = False
label.save()
# Filter by "enabled" status
response = self.get(url, {'enabled': True})
self.assertEqual(len(response.data), 0)
response = self.get(url, {'enabled': False})
self.assertEqual(len(response.data), n)
def test_create_endpoint(self):
"""Test that creating a new report works for each label."""
if not self.list_url:
return
url = reverse(self.list_url)
# Create a new label
# Django REST API "APITestCase" does not work like requests - to send a file without it existing on disk,
# create it as a StringIO object, and upload it under parameter template
filestr = StringIO(
'{% extends "label/label_base.html" %}{% block content %}<pre>TEST LABEL</pre>{% endblock content %}'
)
filestr.name = 'ExampleTemplate.html'
response = self.post(
url,
data={
'name': 'New label',
'description': 'A fancy new label created through API test',
'label': filestr,
},
format=None,
expected_code=201,
)
# Make sure the expected keys are in the response
self.assertIn('pk', response.data)
self.assertIn('name', response.data)
self.assertIn('description', response.data)
self.assertIn('label', response.data)
self.assertIn('filters', response.data)
self.assertIn('enabled', response.data)
self.assertEqual(response.data['name'], 'New label')
self.assertEqual(
response.data['description'], 'A fancy new label created through API test'
)
self.assertEqual(response.data['label'].count('ExampleTemplate'), 1)
def test_detail_endpoint(self):
"""Test that the DETAIL endpoint works for each label."""
if not self.detail_url:
return
# Create an item first
self.test_create_endpoint()
labels = self.model.objects.all()
n = len(labels)
# Make sure at least one report defined
self.assertGreaterEqual(n, 1)
# Check detail page for first report
response = self.get(
reverse(self.detail_url, kwargs={'pk': labels[0].pk}), expected_code=200
)
# Make sure the expected keys are in the response
self.assertIn('pk', response.data)
self.assertIn('name', response.data)
self.assertIn('description', response.data)
self.assertIn('label', response.data)
self.assertIn('filters', response.data)
self.assertIn('enabled', response.data)
filestr = StringIO(
'{% extends "label/label_base.html" %}{% block content %}<pre>TEST LABEL</pre>{% endblock content %}'
)
filestr.name = 'ExampleTemplate_Updated.html'
# Check PATCH method
response = self.patch(
reverse(self.detail_url, kwargs={'pk': labels[0].pk}),
{
'name': 'Changed name during test',
'description': 'New version of the template',
'label': filestr,
},
format=None,
expected_code=200,
)
# Make sure the expected keys are in the response
self.assertIn('pk', response.data)
self.assertIn('name', response.data)
self.assertIn('description', response.data)
self.assertIn('label', response.data)
self.assertIn('filters', response.data)
self.assertIn('enabled', response.data)
self.assertEqual(response.data['name'], 'Changed name during test')
self.assertEqual(response.data['description'], 'New version of the template')
self.assertEqual(response.data['label'].count('ExampleTemplate_Updated'), 1)
def test_delete(self):
"""Test deleting, after other test are done."""
if not self.detail_url:
return
# Create an item first
self.test_create_endpoint()
labels = self.model.objects.all()
n = len(labels)
# Make sure at least one label defined
self.assertGreaterEqual(n, 1)
# Delete the last report
response = self.delete(
reverse(self.detail_url, kwargs={'pk': labels[n - 1].pk}), expected_code=204
)
def test_print_label(self):
"""Test printing a label."""
if not self.print_url:
return
# Create an item first
self.test_create_endpoint()
labels = self.model.objects.all()
n = len(labels)
# Make sure at least one label defined
self.assertGreaterEqual(n, 1)
url = reverse(self.print_url, kwargs={'pk': labels[0].pk})
# Try to print without providing a valid item
response = self.get(url, expected_code=400)
# Try to print with an invalid item
response = self.get(url, {self.print_itemname: 9999}, expected_code=400)
# Now print with a valid item
print(f'{self.print_itemmodel = }')
print(f'{self.print_itemmodel.objects.all() = }')
item = self.print_itemmodel.objects.first()
self.assertIsNotNone(item)
response = self.get(url, {self.print_itemname: item.pk}, expected_code=200)
response_json = json.loads(response.content.decode('utf-8'))
self.assertIn('file', response_json)
self.assertIn('success', response_json)
self.assertIn('message', response_json)
self.assertTrue(response_json['success'])
def test_metadata_endpoint(self):
"""Unit tests for the metadata field."""
if not self.metadata_url:
return
# Create an item first
self.test_create_endpoint()
labels = self.model.objects.all()
n = len(labels)
# Make sure at least one label defined
self.assertGreaterEqual(n, 1)
# Test getting metadata
response = self.get(
reverse(self.metadata_url, kwargs={'pk': labels[0].pk}), expected_code=200
)
self.assertEqual(response.data, {'metadata': {}})
class TestStockItemLabel(LabelTest):
"""Unit testing class for the StockItemLabel model."""
model = label_models.StockItemLabel
list_url = 'api-stockitem-label-list'
detail_url = 'api-stockitem-label-detail'
metadata_url = 'api-stockitem-label-metadata'
print_url = 'api-stockitem-label-print'
print_itemname = 'item'
print_itemmodel = StockItem
class TestStockLocationLabel(LabelTest):
"""Unit testing class for the StockLocationLabel model."""
model = label_models.StockLocationLabel
list_url = 'api-stocklocation-label-list'
detail_url = 'api-stocklocation-label-detail'
metadata_url = 'api-stocklocation-label-metadata'
print_url = 'api-stocklocation-label-print'
print_itemname = 'location'
print_itemmodel = StockLocation
class TestPartLabel(LabelTest):
"""Unit testing class for the PartLabel model."""
model = label_models.PartLabel
list_url = 'api-part-label-list'
detail_url = 'api-part-label-detail'
metadata_url = 'api-part-label-metadata'
print_url = 'api-part-label-print'
print_itemname = 'part'
print_itemmodel = Part
class TestBuildLineLabel(LabelTest):
"""Unit testing class for the BuildLine model."""
model = label_models.BuildLineLabel
list_url = 'api-buildline-label-list'
detail_url = 'api-buildline-label-detail'
metadata_url = 'api-buildline-label-metadata'
print_url = 'api-buildline-label-print'
print_itemname = 'line'
print_itemmodel = BuildLine

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,375 +0,0 @@
"""Base machine type/base driver."""
from typing import TYPE_CHECKING, Any, Literal, Union
from generic.states import StatusCode
from InvenTree.helpers_mixin import ClassProviderMixin, ClassValidationMixin
# Import only for typechecking, otherwise this throws cyclic import errors
if TYPE_CHECKING:
from common.models import SettingsKeyType
from machine.models import MachineConfig
else: # pragma: no cover
class MachineConfig:
"""Only used if not typechecking currently."""
class SettingsKeyType:
"""Only used if not typechecking currently."""
class MachineStatus(StatusCode):
"""Base class for representing a set of machine status codes.
Use enum syntax to define the status codes, e.g.
```python
CONNECTED = 200, _("Connected"), 'success'
```
The values of the status can be accessed with `MachineStatus.CONNECTED.value`.
Additionally there are helpers to access all additional attributes `text`, `label`, `color`.
Available colors:
primary, secondary, warning, danger, success, warning, info
Status code ranges:
```
1XX - Everything fine
2XX - Warnings (e.g. ink is about to become empty)
3XX - Something wrong with the machine (e.g. no labels are remaining on the spool)
4XX - Something wrong with the driver (e.g. cannot connect to the machine)
5XX - Unknown issues
```
"""
class BaseDriver(ClassValidationMixin, ClassProviderMixin):
"""Base class for all machine drivers.
Attributes:
SLUG: Slug string for identifying the driver in format /[a-z-]+/ (required)
NAME: User friendly name for displaying (required)
DESCRIPTION: Description of what this driver does (required)
MACHINE_SETTINGS: Driver specific settings dict
"""
SLUG: str
NAME: str
DESCRIPTION: str
MACHINE_SETTINGS: dict[str, SettingsKeyType]
machine_type: str
required_attributes = ['SLUG', 'NAME', 'DESCRIPTION', 'machine_type']
def __init__(self) -> None:
"""Base driver __init__ method."""
super().__init__()
self.errors: list[Union[str, Exception]] = []
def init_driver(self):
"""This method gets called after all machines are created and can be used to initialize the driver.
After the driver is initialized, the self.init_machine function is
called for each machine associated with that driver.
"""
def init_machine(self, machine: 'BaseMachineType'):
"""This method gets called for each active machine using that driver while initialization.
If this function raises an Exception, it gets added to the machine.errors
list and the machine does not initialize successfully.
Arguments:
machine: Machine instance
"""
def update_machine(
self, old_machine_state: dict[str, Any], machine: 'BaseMachineType'
):
"""This method gets called for each update of a machine.
Note:
machine.restart_required can be set to True here if the machine needs a manual restart to apply the changes
Arguments:
old_machine_state: Dict holding the old machine state before update
machine: Machine instance with the new state
"""
def restart_machine(self, machine: 'BaseMachineType'):
"""This method gets called on manual machine restart e.g. by using the restart machine action in the Admin Center.
Note:
`machine.restart_required` gets set to False again before this function is called
Arguments:
machine: Machine instance
"""
def get_machines(self, **kwargs):
"""Return all machines using this driver (By default only initialized machines).
Keyword Arguments:
name (str): Machine name
machine_type (BaseMachineType): Machine type definition (class)
initialized (bool | None): use None to get all machines (default: True)
active (bool): machine needs to be active
base_driver (BaseDriver): base driver (class)
"""
from machine import registry
kwargs.pop('driver', None)
return registry.get_machines(driver=self, **kwargs)
def handle_error(self, error: Union[Exception, str]):
"""Handle driver error.
Arguments:
error: Exception or string
"""
self.errors.append(error)
class BaseMachineType(ClassValidationMixin, ClassProviderMixin):
"""Base class for machine types.
Attributes:
SLUG: Slug string for identifying the machine type in format /[a-z-]+/ (required)
NAME: User friendly name for displaying (required)
DESCRIPTION: Description of what this machine type can do (required)
base_driver: Reference to the base driver for this machine type
MACHINE_SETTINGS: Machine type specific settings dict (optional)
MACHINE_STATUS: Set of status codes this machine type can have
default_machine_status: Default machine status with which this machine gets initialized
"""
SLUG: str
NAME: str
DESCRIPTION: str
base_driver: type[BaseDriver]
MACHINE_SETTINGS: dict[str, SettingsKeyType]
MACHINE_STATUS: type[MachineStatus]
default_machine_status: MachineStatus
# used by the ClassValidationMixin
required_attributes = [
'SLUG',
'NAME',
'DESCRIPTION',
'base_driver',
'MACHINE_STATUS',
'default_machine_status',
]
def __init__(self, machine_config: MachineConfig) -> None:
"""Base machine type __init__ function."""
from machine import registry
from machine.models import MachineSetting
self.errors: list[Union[str, Exception]] = []
self.initialized = False
self.status = self.default_machine_status
self.status_text: str = ''
self.pk = machine_config.pk
self.driver = registry.get_driver_instance(machine_config.driver)
if not self.driver:
self.handle_error(f"Driver '{machine_config.driver}' not found")
if self.driver and not isinstance(self.driver, self.base_driver):
self.handle_error(
f"'{self.driver.NAME}' is incompatible with machine type '{self.NAME}'"
)
self.machine_settings: dict[str, SettingsKeyType] = getattr(
self, 'MACHINE_SETTINGS', {}
)
self.driver_settings: dict[str, SettingsKeyType] = getattr(
self.driver, 'MACHINE_SETTINGS', {}
)
self.setting_types: list[
tuple[dict[str, SettingsKeyType], MachineSetting.ConfigType]
] = [
(self.machine_settings, MachineSetting.ConfigType.MACHINE),
(self.driver_settings, MachineSetting.ConfigType.DRIVER),
]
self.restart_required = False
def __str__(self):
"""String representation of a machine."""
return f'{self.name}'
def __repr__(self):
"""Python representation of a machine."""
return f'<{self.__class__.__name__}: {self.name}>'
# --- properties
@property
def machine_config(self):
"""Machine_config property which is a reference to the database entry."""
# always fetch the machine_config if needed to ensure we get the newest reference
from .models import MachineConfig
return MachineConfig.objects.get(pk=self.pk)
@property
def name(self):
"""The machines name."""
return self.machine_config.name
@property
def active(self):
"""The machines active status."""
return self.machine_config.active
# --- hook functions
def initialize(self):
"""Machine initialization function, gets called after all machines are loaded."""
if self.driver is None:
return
# check if all required settings are defined before continue with init process
settings_valid, missing_settings = self.check_settings()
if not settings_valid:
error_parts = []
for config_type, missing in missing_settings.items():
if len(missing) > 0:
error_parts.append(
f'{config_type.name} settings: ' + ', '.join(missing)
)
self.handle_error(f"Missing {' and '.join(error_parts)}")
return
try:
self.driver.init_machine(self)
self.initialized = True
except Exception as e:
self.handle_error(e)
def update(self, old_state: dict[str, Any]):
"""Machine update function, gets called if the machine itself changes or their settings.
Arguments:
old_state: Dict holding the old machine state before update
"""
if self.driver is None:
return
try:
self.driver.update_machine(old_state, self)
except Exception as e:
self.handle_error(e)
def restart(self):
"""Machine restart function, can be used to manually restart the machine from the admin ui."""
if self.driver is None:
return
try:
self.restart_required = False
self.driver.restart_machine(self)
except Exception as e:
self.handle_error(e)
# --- helper functions
def handle_error(self, error: Union[Exception, str]):
"""Helper function for capturing errors with the machine.
Arguments:
error: Exception or string
"""
self.errors.append(error)
def get_setting(
self, key: str, config_type_str: Literal['M', 'D'], cache: bool = False
):
"""Return the 'value' of the setting associated with this machine.
Arguments:
key: The 'name' of the setting value to be retrieved
config_type_str: Either "M" (machine scoped settings) or "D" (driver scoped settings)
cache: Whether to use RAM cached value (default = False)
"""
from machine.models import MachineSetting
config_type = MachineSetting.get_config_type(config_type_str)
return MachineSetting.get_setting(
key,
machine_config=self.machine_config,
config_type=config_type,
cache=cache,
)
def set_setting(self, key: str, config_type_str: Literal['M', 'D'], value: Any):
"""Set plugin setting value by key.
Arguments:
key: The 'name' of the setting to set
config_type_str: Either "M" (machine scoped settings) or "D" (driver scoped settings)
value: The 'value' of the setting
"""
from machine.models import MachineSetting
config_type = MachineSetting.get_config_type(config_type_str)
MachineSetting.set_setting(
key,
value,
None,
machine_config=self.machine_config,
config_type=config_type,
)
def check_settings(self):
"""Check if all required settings for this machine are defined.
Returns:
is_valid: Are all required settings defined
missing_settings: dict[ConfigType, list[str]] of all settings that are missing (empty if is_valid is 'True')
"""
from machine.models import MachineSetting
missing_settings: dict[MachineSetting.ConfigType, list[str]] = {}
for settings, config_type in self.setting_types:
is_valid, missing = MachineSetting.check_all_settings(
settings_definition=settings,
machine_config=self.machine_config,
config_type=config_type,
)
missing_settings[config_type] = missing
return all(
len(missing) == 0 for missing in missing_settings.values()
), missing_settings
def set_status(self, status: MachineStatus):
"""Set the machine status code. There are predefined ones for each MachineType.
Import the MachineType to access it's `MACHINE_STATUS` enum.
Arguments:
status: The new MachineStatus code to set
"""
self.status = status
def set_status_text(self, status_text: str):
"""Set the machine status text. It can be any arbitrary text.
Arguments:
status_text: The new status text to set
"""
self.status_text = status_text

File diff suppressed because it is too large Load Diff

View File

@ -1,619 +0,0 @@
"""Admin class definitions for the 'part' app."""
from django.contrib import admin
from django.utils.translation import gettext_lazy as _
from import_export import widgets
from import_export.admin import ImportExportModelAdmin
from import_export.fields import Field
from company.models import SupplierPart
from InvenTree.admin import InvenTreeResource
from part import models
from stock.models import StockLocation
class PartResource(InvenTreeResource):
"""Class for managing Part data import/export."""
class Meta:
"""Metaclass options."""
model = models.Part
skip_unchanged = True
report_skipped = False
clean_model_instances = True
exclude = [
'bom_checksum',
'bom_checked_by',
'bom_checked_date',
'lft',
'rght',
'tree_id',
'level',
'metadata',
'barcode_data',
'barcode_hash',
]
id = Field(attribute='pk', column_name=_('Part ID'), widget=widgets.IntegerWidget())
name = Field(
attribute='name', column_name=_('Part Name'), widget=widgets.CharWidget()
)
description = Field(
attribute='description',
column_name=_('Part Description'),
widget=widgets.CharWidget(),
)
IPN = Field(attribute='IPN', column_name=_('IPN'), widget=widgets.CharWidget())
revision = Field(
attribute='revision', column_name=_('Revision'), widget=widgets.CharWidget()
)
keywords = Field(
attribute='keywords', column_name=_('Keywords'), widget=widgets.CharWidget()
)
link = Field(attribute='link', column_name=_('Link'), widget=widgets.CharWidget())
units = Field(
attribute='units', column_name=_('Units'), widget=widgets.CharWidget()
)
notes = Field(attribute='notes', column_name=_('Notes'))
image = Field(attribute='image', column_name=_('Part Image'))
category = Field(
attribute='category',
column_name=_('Category ID'),
widget=widgets.ForeignKeyWidget(models.PartCategory),
)
category_name = Field(
attribute='category__name', column_name=_('Category Name'), readonly=True
)
default_location = Field(
attribute='default_location',
column_name=_('Default Location ID'),
widget=widgets.ForeignKeyWidget(StockLocation),
)
default_supplier = Field(
attribute='default_supplier',
column_name=_('Default Supplier ID'),
widget=widgets.ForeignKeyWidget(SupplierPart),
)
variant_of = Field(
attribute='variant_of',
column_name=_('Variant Of'),
widget=widgets.ForeignKeyWidget(models.Part),
)
minimum_stock = Field(attribute='minimum_stock', column_name=_('Minimum Stock'))
# Part Attributes
active = Field(
attribute='active', column_name=_('Active'), widget=widgets.BooleanWidget()
)
assembly = Field(
attribute='assembly', column_name=_('Assembly'), widget=widgets.BooleanWidget()
)
component = Field(
attribute='component',
column_name=_('Component'),
widget=widgets.BooleanWidget(),
)
purchaseable = Field(
attribute='purchaseable',
column_name=_('Purchaseable'),
widget=widgets.BooleanWidget(),
)
salable = Field(
attribute='salable', column_name=_('Salable'), widget=widgets.BooleanWidget()
)
is_template = Field(
attribute='is_template',
column_name=_('Template'),
widget=widgets.BooleanWidget(),
)
trackable = Field(
attribute='trackable',
column_name=_('Trackable'),
widget=widgets.BooleanWidget(),
)
virtual = Field(
attribute='virtual', column_name=_('Virtual'), widget=widgets.BooleanWidget()
)
# Extra calculated meta-data (readonly)
suppliers = Field(
attribute='supplier_count', column_name=_('Suppliers'), readonly=True
)
in_stock = Field(
attribute='total_stock',
column_name=_('In Stock'),
readonly=True,
widget=widgets.IntegerWidget(),
)
on_order = Field(
attribute='on_order',
column_name=_('On Order'),
readonly=True,
widget=widgets.IntegerWidget(),
)
used_in = Field(
attribute='used_in_count',
column_name=_('Used In'),
readonly=True,
widget=widgets.IntegerWidget(),
)
allocated = Field(
attribute='allocation_count',
column_name=_('Allocated'),
readonly=True,
widget=widgets.IntegerWidget(),
)
building = Field(
attribute='quantity_being_built',
column_name=_('Building'),
readonly=True,
widget=widgets.IntegerWidget(),
)
min_cost = Field(
attribute='pricing__overall_min', column_name=_('Minimum Cost'), readonly=True
)
max_cost = Field(
attribute='pricing__overall_max', column_name=_('Maximum Cost'), readonly=True
)
def dehydrate_min_cost(self, part):
"""Render minimum cost value for this Part."""
min_cost = part.pricing.overall_min if part.pricing else None
if min_cost is not None:
return float(min_cost.amount)
def dehydrate_max_cost(self, part):
"""Render maximum cost value for this Part."""
max_cost = part.pricing.overall_max if part.pricing else None
if max_cost is not None:
return float(max_cost.amount)
def get_queryset(self):
"""Prefetch related data for quicker access."""
query = super().get_queryset()
query = query.prefetch_related(
'category',
'used_in',
'builds',
'supplier_parts__purchase_order_line_items',
'stock_items__allocations',
)
return query
def after_import(self, dataset, result, using_transactions, dry_run, **kwargs):
"""Rebuild MPTT tree structure after importing Part data."""
super().after_import(dataset, result, using_transactions, dry_run, **kwargs)
# Rebuild the Part tree(s)
models.Part.objects.rebuild()
class PartImportResource(InvenTreeResource):
"""Class for managing Part data import/export."""
class Meta(PartResource.Meta):
"""Metaclass options."""
skip_unchanged = True
report_skipped = False
clean_model_instances = True
exclude = [
'id',
'category__name',
'creation_date',
'creation_user',
'pricing__overall_min',
'pricing__overall_max',
'bom_checksum',
'bom_checked_by',
'bom_checked_date',
'lft',
'rght',
'tree_id',
'level',
'metadata',
'barcode_data',
'barcode_hash',
]
class PartParameterInline(admin.TabularInline):
"""Inline for part parameter data."""
model = models.PartParameter
class PartAdmin(ImportExportModelAdmin):
"""Admin class for the Part model."""
resource_class = PartResource
list_display = ('full_name', 'description', 'total_stock', 'category')
list_filter = ('active', 'assembly', 'is_template', 'virtual')
search_fields = (
'name',
'description',
'category__name',
'category__description',
'IPN',
)
autocomplete_fields = [
'variant_of',
'category',
'default_location',
'default_supplier',
]
inlines = [PartParameterInline]
class PartPricingAdmin(admin.ModelAdmin):
"""Admin class for PartPricing model."""
list_display = ('part', 'overall_min', 'overall_max')
autcomplete_fields = ['part']
class PartStocktakeAdmin(admin.ModelAdmin):
"""Admin class for PartStocktake model."""
list_display = ['part', 'date', 'quantity', 'user']
class PartStocktakeReportAdmin(admin.ModelAdmin):
"""Admin class for PartStocktakeReport model."""
list_display = ['date', 'user']
class PartCategoryResource(InvenTreeResource):
"""Class for managing PartCategory data import/export."""
class Meta:
"""Metaclass options."""
model = models.PartCategory
skip_unchanged = True
report_skipped = False
clean_model_instances = True
exclude = [
# Exclude MPTT internal model fields
'lft',
'rght',
'tree_id',
'level',
'metadata',
'icon',
]
id = Field(
attribute='pk', column_name=_('Category ID'), widget=widgets.IntegerWidget()
)
name = Field(attribute='name', column_name=_('Category Name'))
description = Field(attribute='description', column_name=_('Description'))
parent = Field(
attribute='parent',
column_name=_('Parent ID'),
widget=widgets.ForeignKeyWidget(models.PartCategory),
)
parent_name = Field(
attribute='parent__name', column_name=_('Parent Name'), readonly=True
)
default_location = Field(
attribute='default_location',
column_name=_('Default Location ID'),
widget=widgets.ForeignKeyWidget(StockLocation),
)
default_keywords = Field(attribute='default_keywords', column_name=_('Keywords'))
pathstring = Field(attribute='pathstring', column_name=_('Category Path'))
# Calculated fields
parts = Field(
attribute='item_count',
column_name=_('Parts'),
widget=widgets.IntegerWidget(),
readonly=True,
)
def after_import(self, dataset, result, using_transactions, dry_run, **kwargs):
"""Rebuild MPTT tree structure after importing PartCategory data."""
super().after_import(dataset, result, using_transactions, dry_run, **kwargs)
# Rebuild the PartCategory tree(s)
models.PartCategory.objects.rebuild()
class PartCategoryAdmin(ImportExportModelAdmin):
"""Admin class for the PartCategory model."""
resource_class = PartCategoryResource
list_display = ('name', 'pathstring', 'description')
search_fields = ('name', 'description')
autocomplete_fields = ('parent', 'default_location')
class PartRelatedAdmin(admin.ModelAdmin):
"""Class to manage PartRelated objects."""
autocomplete_fields = ('part_1', 'part_2')
class PartAttachmentAdmin(admin.ModelAdmin):
"""Admin class for the PartAttachment model."""
list_display = ('part', 'attachment', 'comment')
autocomplete_fields = ('part',)
class PartTestTemplateAdmin(admin.ModelAdmin):
"""Admin class for the PartTestTemplate model."""
list_display = ('part', 'test_name', 'required')
readonly_fields = ['key']
autocomplete_fields = ('part',)
class BomItemResource(InvenTreeResource):
"""Class for managing BomItem data import/export."""
class Meta:
"""Metaclass options."""
model = models.BomItem
skip_unchanged = True
report_skipped = False
clean_model_instances = True
exclude = ['checksum', 'id', 'part', 'sub_part', 'validated']
level = Field(attribute='level', column_name=_('BOM Level'), readonly=True)
bom_id = Field(
attribute='pk', column_name=_('BOM Item ID'), widget=widgets.IntegerWidget()
)
# ID of the parent part
parent_part_id = Field(
attribute='part',
column_name=_('Parent ID'),
widget=widgets.ForeignKeyWidget(models.Part),
)
parent_part_ipn = Field(
attribute='part__IPN', column_name=_('Parent IPN'), readonly=True
)
parent_part_name = Field(
attribute='part__name', column_name=_('Parent Name'), readonly=True
)
part_id = Field(
attribute='sub_part',
column_name=_('Part ID'),
widget=widgets.ForeignKeyWidget(models.Part),
)
part_ipn = Field(
attribute='sub_part__IPN', column_name=_('Part IPN'), readonly=True
)
part_name = Field(
attribute='sub_part__name', column_name=_('Part Name'), readonly=True
)
part_description = Field(
attribute='sub_part__description', column_name=_('Description'), readonly=True
)
quantity = Field(attribute='quantity', column_name=_('Quantity'))
reference = Field(attribute='reference', column_name=_('Reference'))
note = Field(attribute='note', column_name=_('Note'))
min_cost = Field(
attribute='sub_part__pricing__overall_min',
column_name=_('Minimum Price'),
readonly=True,
)
max_cost = Field(
attribute='sub_part__pricing__overall_max',
column_name=_('Maximum Price'),
readonly=True,
)
sub_assembly = Field(
attribute='sub_part__assembly', column_name=_('Assembly'), readonly=True
)
def dehydrate_min_cost(self, item):
"""Render minimum cost value for the BOM line item."""
min_price = item.sub_part.pricing.overall_min if item.sub_part.pricing else None
if min_price is not None:
return float(min_price.amount) * float(item.quantity)
def dehydrate_max_cost(self, item):
"""Render maximum cost value for the BOM line item."""
max_price = item.sub_part.pricing.overall_max if item.sub_part.pricing else None
if max_price is not None:
return float(max_price.amount) * float(item.quantity)
def dehydrate_quantity(self, item):
"""Special consideration for the 'quantity' field on data export. We do not want a spreadsheet full of "1.0000" (we'd rather "1").
Ref: https://django-import-export.readthedocs.io/en/latest/getting_started.html#advanced-data-manipulation-on-export
"""
return float(item.quantity)
def before_export(self, queryset, *args, **kwargs):
"""Perform before exporting data."""
self.is_importing = kwargs.get('importing', False)
self.include_pricing = kwargs.pop('include_pricing', False)
def get_fields(self, **kwargs):
"""If we are exporting for the purposes of generating a 'bom-import' template, there are some fields which we are not interested in."""
fields = super().get_fields(**kwargs)
is_importing = getattr(self, 'is_importing', False)
include_pricing = getattr(self, 'include_pricing', False)
to_remove = ['metadata']
if is_importing or not include_pricing:
# Remove pricing fields in this instance
to_remove += [
'sub_part__pricing__overall_min',
'sub_part__pricing__overall_max',
]
if is_importing:
to_remove += [
'level',
'pk',
'part',
'part__IPN',
'part__name',
'sub_part__name',
'sub_part__description',
'sub_part__assembly',
]
idx = 0
while idx < len(fields):
if fields[idx].attribute in to_remove:
del fields[idx]
else:
idx += 1
return fields
class BomItemAdmin(ImportExportModelAdmin):
"""Admin class for the BomItem model."""
resource_class = BomItemResource
list_display = ('part', 'sub_part', 'quantity')
search_fields = (
'part__name',
'part__description',
'sub_part__name',
'sub_part__description',
)
autocomplete_fields = ('part', 'sub_part')
class ParameterTemplateResource(InvenTreeResource):
"""Class for managing ParameterTemplate import/export."""
# The following fields will be converted from None to ''
CONVERT_NULL_FIELDS = ['choices', 'units']
class Meta:
"""Metaclass options."""
model = models.PartParameterTemplate
skip_unchanged = True
report_skipped = False
clean_model_instances = True
exclude = ['metadata']
class ParameterTemplateAdmin(ImportExportModelAdmin):
"""Admin class for the PartParameterTemplate model."""
resource_class = ParameterTemplateResource
list_display = ('name', 'units')
search_fields = ('name', 'units')
class ParameterResource(InvenTreeResource):
"""Class for managing PartParameter data import/export."""
class Meta:
"""Metaclass options."""
model = models.PartParameter
skip_unchanged = True
report_skipped = False
clean_model_instance = True
part = Field(attribute='part', widget=widgets.ForeignKeyWidget(models.Part))
part_name = Field(attribute='part__name', readonly=True)
template = Field(
attribute='template',
widget=widgets.ForeignKeyWidget(models.PartParameterTemplate),
)
template_name = Field(attribute='template__name', readonly=True)
class ParameterAdmin(ImportExportModelAdmin):
"""Admin class for the PartParameter model."""
resource_class = ParameterResource
list_display = ('part', 'template', 'data')
autocomplete_fields = ('part', 'template')
class PartCategoryParameterAdmin(admin.ModelAdmin):
"""Admin class for the PartCategoryParameterTemplate model."""
autocomplete_fields = ('category', 'parameter_template')
class PartSellPriceBreakAdmin(admin.ModelAdmin):
"""Admin class for the PartSellPriceBreak model."""
class Meta:
"""Metaclass options."""
model = models.PartSellPriceBreak
list_display = ('part', 'quantity', 'price')
class PartInternalPriceBreakAdmin(admin.ModelAdmin):
"""Admin class for the PartInternalPriceBreak model."""
class Meta:
"""Metaclass options."""
model = models.PartInternalPriceBreak
list_display = ('part', 'quantity', 'price')
autocomplete_fields = ('part',)
admin.site.register(models.Part, PartAdmin)
admin.site.register(models.PartCategory, PartCategoryAdmin)
admin.site.register(models.PartRelated, PartRelatedAdmin)
admin.site.register(models.PartAttachment, PartAttachmentAdmin)
admin.site.register(models.BomItem, BomItemAdmin)
admin.site.register(models.PartParameterTemplate, ParameterTemplateAdmin)
admin.site.register(models.PartParameter, ParameterAdmin)
admin.site.register(models.PartCategoryParameterTemplate, PartCategoryParameterAdmin)
admin.site.register(models.PartTestTemplate, PartTestTemplateAdmin)
admin.site.register(models.PartSellPriceBreak, PartSellPriceBreakAdmin)
admin.site.register(models.PartInternalPriceBreak, PartInternalPriceBreakAdmin)
admin.site.register(models.PartPricing, PartPricingAdmin)
admin.site.register(models.PartStocktake, PartStocktakeAdmin)
admin.site.register(models.PartStocktakeReport, PartStocktakeReportAdmin)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,481 +0,0 @@
"""Plugin mixin classes for barcode plugin."""
from __future__ import annotations
import logging
from decimal import Decimal, InvalidOperation
from django.contrib.auth.models import User
from django.db.models import F, Q
from django.utils.translation import gettext_lazy as _
from company.models import Company, SupplierPart
from order.models import PurchaseOrder, PurchaseOrderStatus
from plugin.base.integration.SettingsMixin import SettingsMixin
from stock.models import StockLocation
logger = logging.getLogger('inventree')
class BarcodeMixin:
"""Mixin that enables barcode handling.
Custom barcode plugins should use and extend this mixin as necessary.
"""
ACTION_NAME = ''
class MixinMeta:
"""Meta options for this mixin."""
MIXIN_NAME = 'Barcode'
def __init__(self):
"""Register mixin."""
super().__init__()
self.add_mixin('barcode', 'has_barcode', __class__)
@property
def has_barcode(self):
"""Does this plugin have everything needed to process a barcode."""
return True
def scan(self, barcode_data):
"""Scan a barcode against this plugin.
This method is explicitly called from the /scan/ API endpoint,
and thus it is expected that any barcode which matches this barcode will return a result.
If this plugin finds a match against the provided barcode, it should return a dict object
with the intended result.
Default return value is None
"""
return None
class SupplierBarcodeMixin(BarcodeMixin):
"""Mixin that provides default implementations for scan functions for supplier barcodes.
Custom supplier barcode plugins should use this mixin and implement the
extract_barcode_fields function.
"""
# Set of standard field names which can be extracted from the barcode
CUSTOMER_ORDER_NUMBER = 'customer_order_number'
SUPPLIER_ORDER_NUMBER = 'supplier_order_number'
PACKING_LIST_NUMBER = 'packing_list_number'
SHIP_DATE = 'ship_date'
CUSTOMER_PART_NUMBER = 'customer_part_number'
SUPPLIER_PART_NUMBER = 'supplier_part_number'
PURCHASE_ORDER_LINE = 'purchase_order_line'
QUANTITY = 'quantity'
DATE_CODE = 'date_code'
LOT_CODE = 'lot_code'
COUNTRY_OF_ORIGIN = 'country_of_origin'
MANUFACTURER = 'manufacturer'
MANUFACTURER_PART_NUMBER = 'manufacturer_part_number'
def __init__(self):
"""Register mixin."""
super().__init__()
self.add_mixin('supplier-barcode', True, __class__)
def get_field_value(self, key, backup_value=None):
"""Return the value of a barcode field."""
fields = getattr(self, 'barcode_fields', None) or {}
return fields.get(key, backup_value)
@property
def quantity(self):
"""Return the quantity from the barcode fields."""
return self.get_field_value(self.QUANTITY)
@property
def supplier_part_number(self):
"""Return the supplier part number from the barcode fields."""
return self.get_field_value(self.SUPPLIER_PART_NUMBER)
@property
def manufacturer_part_number(self):
"""Return the manufacturer part number from the barcode fields."""
return self.get_field_value(self.MANUFACTURER_PART_NUMBER)
@property
def customer_order_number(self):
"""Return the customer order number from the barcode fields."""
return self.get_field_value(self.CUSTOMER_ORDER_NUMBER)
@property
def supplier_order_number(self):
"""Return the supplier order number from the barcode fields."""
return self.get_field_value(self.SUPPLIER_ORDER_NUMBER)
def extract_barcode_fields(self, barcode_data) -> dict[str, str]:
"""Method to extract barcode fields from barcode data.
This method should return a dict object where the keys are the field names,
as per the "standard field names" (defined in the SuppliedBarcodeMixin class).
This method *must* be implemented by each plugin
Returns:
A dict object containing the barcode fields.
"""
raise NotImplementedError(
'extract_barcode_fields must be implemented by each plugin'
)
def scan(self, barcode_data):
"""Try to match a supplier barcode to a supplier part."""
barcode_data = str(barcode_data).strip()
self.barcode_fields = self.extract_barcode_fields(barcode_data)
if self.supplier_part_number is None and self.manufacturer_part_number is None:
return None
supplier_parts = self.get_supplier_parts(
sku=self.supplier_part_number,
mpn=self.manufacturer_part_number,
supplier=self.get_supplier(),
)
if len(supplier_parts) > 1:
return {'error': _('Found multiple matching supplier parts for barcode')}
elif not supplier_parts:
return None
supplier_part = supplier_parts[0]
data = {
'pk': supplier_part.pk,
'api_url': f'{SupplierPart.get_api_url()}{supplier_part.pk}/',
'web_url': supplier_part.get_absolute_url(),
}
return {SupplierPart.barcode_model_type(): data}
def scan_receive_item(self, barcode_data, user, purchase_order=None, location=None):
"""Try to scan a supplier barcode to receive a purchase order item."""
barcode_data = str(barcode_data).strip()
self.barcode_fields = self.extract_barcode_fields(barcode_data)
if self.supplier_part_number is None and self.manufacturer_part_number is None:
return None
supplier = self.get_supplier()
supplier_parts = self.get_supplier_parts(
sku=self.supplier_part_number,
mpn=self.manufacturer_part_number,
supplier=supplier,
)
if len(supplier_parts) > 1:
return {'error': _('Found multiple matching supplier parts for barcode')}
elif not supplier_parts:
return None
supplier_part = supplier_parts[0]
# If a purchase order is not provided, extract it from the provided data
if not purchase_order:
matching_orders = self.get_purchase_orders(
self.customer_order_number,
self.supplier_order_number,
supplier=supplier,
)
order = self.customer_order_number or self.supplier_order_number
if len(matching_orders) > 1:
return {
'error': _(f"Found multiple purchase orders matching '{order}'")
}
if len(matching_orders) == 0:
return {'error': _(f"No matching purchase order for '{order}'")}
purchase_order = matching_orders.first()
if supplier and purchase_order:
if purchase_order.supplier != supplier:
return {'error': _('Purchase order does not match supplier')}
return self.receive_purchase_order_item(
supplier_part,
user,
quantity=self.quantity,
purchase_order=purchase_order,
location=location,
barcode=barcode_data,
)
def get_supplier(self) -> Company | None:
"""Get the supplier for the SUPPLIER_ID set in the plugin settings.
If it's not defined, try to guess it and set it if possible.
"""
if not isinstance(self, SettingsMixin):
return None
if supplier_pk := self.get_setting('SUPPLIER_ID'):
try:
return Company.objects.get(pk=supplier_pk)
except Company.DoesNotExist:
logger.error(
'No company with pk %d (set "SUPPLIER_ID" setting to a valid value)',
supplier_pk,
)
return None
if not (supplier_name := getattr(self, 'DEFAULT_SUPPLIER_NAME', None)):
return None
suppliers = Company.objects.filter(
name__icontains=supplier_name, is_supplier=True
)
if len(suppliers) != 1:
return None
self.set_setting('SUPPLIER_ID', suppliers.first().pk)
return suppliers.first()
@classmethod
def ecia_field_map(cls):
"""Return a dict mapping ECIA field names to internal field names.
Ref: https://www.ecianow.org/assets/docs/ECIA_Specifications.pdf
Note that a particular plugin may need to reimplement this method,
if it does not use the standard field names.
"""
return {
'K': cls.CUSTOMER_ORDER_NUMBER,
'1K': cls.SUPPLIER_ORDER_NUMBER,
'11K': cls.PACKING_LIST_NUMBER,
'6D': cls.SHIP_DATE,
'9D': cls.DATE_CODE,
'10D': cls.DATE_CODE,
'4K': cls.PURCHASE_ORDER_LINE,
'14K': cls.PURCHASE_ORDER_LINE,
'P': cls.SUPPLIER_PART_NUMBER,
'1P': cls.MANUFACTURER_PART_NUMBER,
'30P': cls.SUPPLIER_PART_NUMBER,
'1T': cls.LOT_CODE,
'4L': cls.COUNTRY_OF_ORIGIN,
'1V': cls.MANUFACTURER,
'Q': cls.QUANTITY,
}
@classmethod
def parse_ecia_barcode2d(cls, barcode_data: str) -> dict[str, str]:
"""Parse a standard ECIA 2D barcode.
Ref: https://www.ecianow.org/assets/docs/ECIA_Specifications.pdf
Arguments:
barcode_data: The raw barcode data
Returns:
A dict containing the parsed barcode fields
"""
# Split data into separate fields
fields = cls.parse_isoiec_15434_barcode2d(barcode_data)
barcode_fields = {}
if not fields:
return barcode_fields
for field in fields:
for identifier, field_name in cls.ecia_field_map().items():
if field.startswith(identifier):
barcode_fields[field_name] = field[len(identifier) :]
break
return barcode_fields
@staticmethod
def split_fields(
barcode_data: str, delimiter: str = ',', header: str = '', trailer: str = ''
) -> list[str]:
"""Generic method for splitting barcode data into separate fields."""
if header and barcode_data.startswith(header):
barcode_data = barcode_data[len(header) :]
if trailer and barcode_data.endswith(trailer):
barcode_data = barcode_data[: -len(trailer)]
return barcode_data.split(delimiter)
@staticmethod
def parse_isoiec_15434_barcode2d(barcode_data: str) -> list[str]:
"""Parse a ISO/IEC 15434 barcode, returning the split data section."""
OLD_MOUSER_HEADER = '>[)>06\x1d'
HEADER = '[)>\x1e06\x1d'
TRAILER = '\x1e\x04'
DELIMITER = '\x1d'
# Some old mouser barcodes start with this messed up header
if barcode_data.startswith(OLD_MOUSER_HEADER):
barcode_data = barcode_data.replace(OLD_MOUSER_HEADER, HEADER, 1)
# Check that the barcode starts with the necessary header
if not barcode_data.startswith(HEADER):
return
return SupplierBarcodeMixin.split_fields(
barcode_data, delimiter=DELIMITER, header=HEADER, trailer=TRAILER
)
@staticmethod
def get_purchase_orders(
customer_order_number, supplier_order_number, supplier: Company = None
):
"""Attempt to find a purchase order from the extracted customer and supplier order numbers."""
orders = PurchaseOrder.objects.filter(status=PurchaseOrderStatus.PLACED.value)
if supplier:
orders = orders.filter(supplier=supplier)
# this works because reference and supplier_reference are not nullable, so if
# customer_order_number or supplier_order_number is None, the query won't return anything
reference_filter = Q(reference__iexact=customer_order_number)
supplier_reference_filter = Q(supplier_reference__iexact=supplier_order_number)
orders_union = orders.filter(reference_filter | supplier_reference_filter)
if orders_union.count() == 1:
return orders_union
else:
orders_intersection = orders.filter(
reference_filter & supplier_reference_filter
)
return orders_intersection if orders_intersection else orders_union
@staticmethod
def get_supplier_parts(sku: str = None, supplier: Company = None, mpn: str = None):
"""Get a supplier part from SKU or by supplier and MPN."""
if not (sku or supplier or mpn):
return SupplierPart.objects.none()
supplier_parts = SupplierPart.objects.all()
if sku:
supplier_parts = supplier_parts.filter(SKU__iexact=sku)
if len(supplier_parts) == 1:
return supplier_parts
if supplier:
supplier_parts = supplier_parts.filter(supplier=supplier.pk)
if len(supplier_parts) == 1:
return supplier_parts
if mpn:
supplier_parts = supplier_parts.filter(manufacturer_part__MPN__iexact=mpn)
if len(supplier_parts) == 1:
return supplier_parts
logger.warning(
"Found %d supplier parts for SKU '%s', supplier '%s', MPN '%s'",
supplier_parts.count(),
sku,
supplier.name if supplier else None,
mpn,
)
return supplier_parts
@staticmethod
def receive_purchase_order_item(
supplier_part: SupplierPart,
user: User,
quantity: Decimal | str = None,
purchase_order: PurchaseOrder = None,
location: StockLocation = None,
barcode: str = None,
) -> dict:
"""Try to receive a purchase order item.
Returns:
A dict object containing:
- on success: a "success" message
- on partial success: the "lineitem" with quantity and location (both can be None)
- on failure: an "error" message
"""
if quantity:
try:
quantity = Decimal(quantity)
except InvalidOperation:
logger.warning("Failed to parse quantity '%s'", quantity)
quantity = None
# find incomplete line_items that match the supplier_part
line_items = purchase_order.lines.filter(
part=supplier_part.pk, quantity__gt=F('received')
)
if len(line_items) == 1 or not quantity:
line_item = line_items[0]
else:
# if there are multiple line items and the barcode contains a quantity:
# 1. return the first line_item where line_item.quantity == quantity
# 2. return the first line_item where line_item.quantity > quantity
# 3. return the first line_item
for line_item in line_items:
if line_item.quantity == quantity:
break
else:
for line_item in line_items:
if line_item.quantity > quantity:
break
else:
line_item = line_items.first()
if not line_item:
return {'error': _('Failed to find pending line item for supplier part')}
no_stock_locations = False
if not location:
# try to guess the destination were the stock_part should go
# 1. check if it's defined on the line_item
# 2. check if it's defined on the part
# 3. check if there's 1 or 0 stock locations defined in InvenTree
# -> assume all stock is going into that location (or no location)
if location := line_item.destination:
pass
elif location := supplier_part.part.get_default_location():
pass
elif StockLocation.objects.count() <= 1:
if not (location := StockLocation.objects.first()):
no_stock_locations = True
response = {
'lineitem': {'pk': line_item.pk, 'purchase_order': purchase_order.pk}
}
if quantity:
response['lineitem']['quantity'] = quantity
if location:
response['lineitem']['location'] = location.pk
# if either the quantity is missing or no location is defined/found
# -> return the line_item found, so the client can gather the missing
# information and complete the action with an 'api-po-receive' call
if not quantity or (not location and not no_stock_locations):
response['action_required'] = _(
'Further information required to receive line item'
)
return response
purchase_order.receive_line_item(
line_item, location, quantity, user, barcode=barcode
)
response['success'] = _('Received purchase order line item')
return response

View File

@ -1,186 +0,0 @@
"""Functions for triggering and responding to server side events."""
import logging
from django.conf import settings
from django.db import transaction
from django.db.models.signals import post_delete, post_save
from django.dispatch.dispatcher import receiver
import InvenTree.exceptions
from InvenTree.ready import canAppAccessDatabase, isImportingData
from InvenTree.tasks import offload_task
from plugin.registry import registry
logger = logging.getLogger('inventree')
def trigger_event(event, *args, **kwargs):
"""Trigger an event with optional arguments.
This event will be stored in the database,
and the worker will respond to it later on.
"""
from common.models import InvenTreeSetting
if not settings.PLUGINS_ENABLED:
# Do nothing if plugins are not enabled
return # pragma: no cover
if not InvenTreeSetting.get_setting('ENABLE_PLUGINS_EVENTS', False):
# Do nothing if plugin events are not enabled
return
# Make sure the database can be accessed and is not being tested rn
if (
not canAppAccessDatabase(allow_shell=True)
and not settings.PLUGIN_TESTING_EVENTS
):
logger.debug("Ignoring triggered event '%s' - database not ready", event)
return
logger.debug("Event triggered: '%s'", event)
# By default, force the event to be processed asynchronously
if 'force_async' not in kwargs and not settings.PLUGIN_TESTING_EVENTS:
kwargs['force_async'] = True
offload_task(register_event, event, *args, **kwargs)
def register_event(event, *args, **kwargs):
"""Register the event with any interested plugins.
Note: This function is processed by the background worker,
as it performs multiple database access operations.
"""
from common.models import InvenTreeSetting
logger.debug("Registering triggered event: '%s'", event)
# Determine if there are any plugins which are interested in responding
if settings.PLUGIN_TESTING or InvenTreeSetting.get_setting('ENABLE_PLUGINS_EVENTS'):
with transaction.atomic():
for slug, plugin in registry.plugins.items():
if not plugin.mixin_enabled('events'):
continue
# Only allow event registering for 'active' plugins
if not plugin.is_active():
continue
# Let the plugin decide if it wants to process this event
if not plugin.wants_process_event(event):
continue
logger.debug("Registering callback for plugin '%s'", slug)
# This task *must* be processed by the background worker,
# unless we are running CI tests
if 'force_async' not in kwargs and not settings.PLUGIN_TESTING_EVENTS:
kwargs['force_async'] = True
# Offload a separate task for each plugin
offload_task(process_event, slug, event, *args, **kwargs)
def process_event(plugin_slug, event, *args, **kwargs):
"""Respond to a triggered event.
This function is run by the background worker process.
This function may queue multiple functions to be handled by the background worker.
"""
plugin = registry.get_plugin(plugin_slug)
if plugin is None: # pragma: no cover
logger.error("Could not find matching plugin for '%s'", plugin_slug)
return
logger.debug("Plugin '%s' is processing triggered event '%s'", plugin_slug, event)
try:
plugin.process_event(event, *args, **kwargs)
except Exception as e:
# Log the exception to the database
InvenTree.exceptions.log_error(f'plugins.{plugin_slug}.process_event')
# Re-throw the exception so that the background worker tries again
raise Exception
def allow_table_event(table_name):
"""Determine if an automatic event should be fired for a given table.
We *do not* want events to be fired for some tables!
"""
# Prevent table events during the data import process
if isImportingData():
return False # pragma: no cover
# Prevent table events when in testing mode (saves a lot of time)
if settings.TESTING and not settings.TESTING_TABLE_EVENTS:
return False
table_name = table_name.lower().strip()
# Ignore any tables which start with these prefixes
ignore_prefixes = [
'account_',
'auth_',
'authtoken_',
'django_',
'error_',
'exchange_',
'otp_',
'plugin_',
'socialaccount_',
'user_',
'users_',
]
if any(table_name.startswith(prefix) for prefix in ignore_prefixes):
return False
ignore_tables = [
'common_notificationentry',
'common_notificationmessage',
'common_webhookendpoint',
'common_webhookmessage',
'part_partpricing',
'part_partstocktake',
'part_partstocktakereport',
]
if table_name in ignore_tables:
return False
return True
@receiver(post_save)
def after_save(sender, instance, created, **kwargs):
"""Trigger an event whenever a database entry is saved."""
table = sender.objects.model._meta.db_table
instance_id = getattr(instance, 'id', None)
if instance_id is None:
return
if not allow_table_event(table):
return
if created:
trigger_event(f'{table}.created', id=instance.id, model=sender.__name__)
else:
trigger_event(f'{table}.saved', id=instance.id, model=sender.__name__)
@receiver(post_delete)
def after_delete(sender, instance, **kwargs):
"""Trigger an event whenever a database entry is deleted."""
table = sender.objects.model._meta.db_table
if not allow_table_event(table):
return
trigger_event(f'{table}.deleted', model=sender.__name__)

View File

@ -1,217 +0,0 @@
"""Plugin mixin class for ScheduleMixin."""
import logging
from django.conf import settings
from django.db.utils import OperationalError, ProgrammingError
from plugin.helpers import MixinImplementationError
logger = logging.getLogger('inventree')
class ScheduleMixin:
"""Mixin that provides support for scheduled tasks.
Implementing classes must provide a dict object called SCHEDULED_TASKS,
which provides information on the tasks to be scheduled.
SCHEDULED_TASKS = {
# Name of the task (will be prepended with the plugin name)
'test_server': {
'func': 'myplugin.tasks.test_server', # Python function to call (no arguments!)
'schedule': "I", # Schedule type (see django_q.Schedule)
'minutes': 30, # Number of minutes (only if schedule type = Minutes)
'repeats': 5, # Number of repeats (leave blank for 'forever')
},
'member_func': {
'func': 'my_class_func', # Note, without the 'dot' notation, it will call a class member function
'schedule': "H", # Once per hour
},
}
Note: 'schedule' parameter must be one of ['I', 'H', 'D', 'W', 'M', 'Q', 'Y']
Note: The 'func' argument can take two different forms:
- Dotted notation e.g. 'module.submodule.func' - calls a global function with the defined path
- Member notation e.g. 'my_func' (no dots!) - calls a member function of the calling class
"""
ALLOWABLE_SCHEDULE_TYPES = ['I', 'H', 'D', 'W', 'M', 'Q', 'Y']
# Override this in subclass model
SCHEDULED_TASKS = {}
class MixinMeta:
"""Meta options for this mixin."""
MIXIN_NAME = 'Schedule'
def __init__(self):
"""Register mixin."""
super().__init__()
self.scheduled_tasks = []
self.add_mixin('schedule', 'has_scheduled_tasks', __class__)
@classmethod
def _activate_mixin(cls, registry, plugins, *args, **kwargs):
"""Activate schedules from plugins with the ScheduleMixin."""
logger.debug('Activating plugin tasks')
from common.models import InvenTreeSetting
# List of tasks we have activated
task_keys = []
if settings.PLUGIN_TESTING or InvenTreeSetting.get_setting(
'ENABLE_PLUGINS_SCHEDULE'
):
for _key, plugin in plugins:
if plugin.mixin_enabled('schedule'):
if plugin.is_active():
# Only active tasks for plugins which are enabled
plugin.register_tasks()
task_keys += plugin.get_task_names()
if len(task_keys) > 0:
logger.info('Activated %s scheduled tasks', len(task_keys))
# Remove any scheduled tasks which do not match
# This stops 'old' plugin tasks from accumulating
try:
from django_q.models import Schedule
scheduled_plugin_tasks = Schedule.objects.filter(
name__istartswith='plugin.'
)
deleted_count = 0
for task in scheduled_plugin_tasks:
if task.name not in task_keys:
task.delete()
deleted_count += 1
if deleted_count > 0:
logger.info(
'Removed %s old scheduled tasks', deleted_count
) # pragma: no cover
except (ProgrammingError, OperationalError):
# Database might not yet be ready
logger.warning('activate_integration_schedule failed, database not ready')
def get_scheduled_tasks(self):
"""Returns `SCHEDULED_TASKS` context.
Override if you want the scheduled tasks to be dynamic (influenced by settings for example).
"""
return getattr(self, 'SCHEDULED_TASKS', {})
@property
def has_scheduled_tasks(self):
"""Are tasks defined for this plugin."""
return bool(self.get_scheduled_tasks())
def validate_scheduled_tasks(self):
"""Check that the provided scheduled tasks are valid."""
if not self.has_scheduled_tasks:
raise MixinImplementationError('SCHEDULED_TASKS not defined')
for key, task in self.scheduled_tasks.items():
if 'func' not in task:
raise MixinImplementationError(
f"Task '{key}' is missing 'func' parameter"
)
if 'schedule' not in task:
raise MixinImplementationError(
f"Task '{key}' is missing 'schedule' parameter"
)
schedule = task['schedule'].upper().strip()
if schedule not in self.ALLOWABLE_SCHEDULE_TYPES:
raise MixinImplementationError(
f"Task '{key}': Schedule '{schedule}' is not a valid option"
)
# If 'minutes' is selected, it must be provided!
if schedule == 'I' and 'minutes' not in task:
raise MixinImplementationError(
f"Task '{key}' is missing 'minutes' parameter"
)
def get_task_name(self, key):
"""Task name for key."""
# Generate a 'unique' task name
slug = self.plugin_slug()
return f'plugin.{slug}.{key}'
def get_task_names(self):
"""All defined task names."""
# Returns a list of all task names associated with this plugin instance
return [self.get_task_name(key) for key in self.scheduled_tasks.keys()]
def register_tasks(self):
"""Register the tasks with the database."""
self.scheduled_tasks = self.get_scheduled_tasks()
self.validate_scheduled_tasks()
try:
from django_q.models import Schedule
for key, task in self.scheduled_tasks.items():
task_name = self.get_task_name(key)
obj = {
'name': task_name,
'schedule_type': task['schedule'],
'minutes': task.get('minutes', None),
'repeats': task.get('repeats', -1),
}
func_name = task['func'].strip()
if '.' in func_name:
"""Dotted notation indicates that we wish to run a globally defined function, from a specified Python module."""
obj['func'] = func_name
else:
"""Non-dotted notation indicates that we wish to call a 'member function' of the calling plugin. This is managed by the plugin registry itself."""
slug = self.plugin_slug()
obj['func'] = 'plugin.registry.call_plugin_function'
obj['args'] = f"'{slug}', '{func_name}'"
if Schedule.objects.filter(name=task_name).exists():
# Scheduled task already exists - update it!
logger.info("Updating scheduled task '%s'", task_name)
instance = Schedule.objects.get(name=task_name)
for item in obj:
setattr(instance, item, obj[item])
instance.save()
else:
logger.info("Adding scheduled task '%s'", task_name)
# Create a new scheduled task
Schedule.objects.create(**obj)
except (ProgrammingError, OperationalError): # pragma: no cover
# Database might not yet be ready
logger.warning('register_tasks failed, database not ready')
def unregister_tasks(self):
"""Deregister the tasks with the database."""
try:
from django_q.models import Schedule
for key, _ in self.scheduled_tasks.items():
task_name = self.get_task_name(key)
try:
scheduled_task = Schedule.objects.get(name=task_name)
scheduled_task.delete()
except Schedule.DoesNotExist:
pass
except (ProgrammingError, OperationalError): # pragma: no cover
# Database might not yet be ready
logger.warning('unregister_tasks failed, database not ready')

View File

@ -1,229 +0,0 @@
"""Plugin mixin classes for label plugins."""
from typing import Union
from django.core.exceptions import ValidationError
from django.db.models.query import QuerySet
from django.http import JsonResponse
from django.utils.translation import gettext_lazy as _
import pdf2image
from rest_framework import serializers
from rest_framework.request import Request
from build.models import BuildLine
from common.models import InvenTreeSetting
from InvenTree.exceptions import log_error
from InvenTree.tasks import offload_task
from label.models import LabelTemplate
from part.models import Part
from plugin.base.label import label as plugin_label
from plugin.helpers import MixinNotImplementedError
from stock.models import StockItem, StockLocation
LabelItemType = Union[StockItem, StockLocation, Part, BuildLine]
class LabelPrintingMixin:
"""Mixin which enables direct printing of stock labels.
Each plugin must provide a NAME attribute, which is used to uniquely identify the printer.
The plugin *must* also implement the print_label() function for rendering an individual label
Note that the print_labels() function can also be overridden to provide custom behavior.
"""
# If True, the print_label() method will block until the label is printed
# If False, the offload_label() method will be called instead
# By default, this is False, which means that labels will be printed in the background
BLOCKING_PRINT = False
class MixinMeta:
"""Meta options for this mixin."""
MIXIN_NAME = 'Label printing'
def __init__(self): # pragma: no cover
"""Register mixin."""
super().__init__()
self.add_mixin('labels', True, __class__)
def render_to_pdf(self, label: LabelTemplate, request, **kwargs):
"""Render this label to PDF format.
Arguments:
label: The LabelTemplate object to render
request: The HTTP request object which triggered this print job
"""
try:
return label.render(request)
except Exception as e:
log_error('label.render_to_pdf')
raise ValidationError(_('Error rendering label to PDF'))
def render_to_html(self, label: LabelTemplate, request, **kwargs):
"""Render this label to HTML format.
Arguments:
label: The LabelTemplate object to render
request: The HTTP request object which triggered this print job
"""
try:
return label.render_as_string(request)
except Exception as e:
log_error('label.render_to_html')
raise ValidationError(_('Error rendering label to HTML'))
def render_to_png(self, label: LabelTemplate, request=None, **kwargs):
"""Render this label to PNG format.
Arguments:
label: The LabelTemplate object to render
request: The HTTP request object which triggered this print job
Keyword Arguments:
pdf_data: The raw PDF data of the rendered label (if already rendered)
dpi: The DPI to use for the PNG rendering
use_cairo (bool): Whether to use the pdftocairo backend for rendering which provides better results in tests,
see [#6488](https://github.com/inventree/InvenTree/pull/6488) for details. If False, pdftoppm is used (default: True)
pdf2image_kwargs (dict): Additional keyword arguments to pass to the
[`pdf2image.convert_from_bytes`](https://pdf2image.readthedocs.io/en/latest/reference.html#pdf2image.pdf2image.convert_from_bytes) method (optional)
"""
# Check if pdf data is provided
pdf_data = kwargs.get('pdf_data', None)
if not pdf_data:
pdf_data = (
self.render_to_pdf(label, request, **kwargs).get_document().write_pdf()
)
pdf2image_kwargs = {
'dpi': kwargs.get('dpi', InvenTreeSetting.get_setting('LABEL_DPI', 300)),
'use_pdftocairo': kwargs.get('use_cairo', True),
**kwargs.get('pdf2image_kwargs', {}),
}
# Convert to png data
try:
return pdf2image.convert_from_bytes(pdf_data, **pdf2image_kwargs)[0]
except Exception as e:
log_error('label.render_to_png')
raise ValidationError(_('Error rendering label to PNG'))
def print_labels(
self,
label: LabelTemplate,
items: QuerySet[LabelItemType],
request: Request,
**kwargs,
):
"""Print one or more labels with the provided template and items.
Arguments:
label: The LabelTemplate object to use for printing
items: The list of database items to print (e.g. StockItem instances)
request: The HTTP request object which triggered this print job
Keyword Arguments:
printing_options: The printing options set for this print job defined in the PrintingOptionsSerializer
Returns:
A JSONResponse object which indicates outcome to the user
The default implementation simply calls print_label() for each label, producing multiple single label output "jobs"
but this can be overridden by the particular plugin.
"""
try:
user = request.user
except AttributeError:
user = None
# Generate a label output for each provided item
for item in items:
label.object_to_print = item
filename = label.generate_filename(request)
pdf_file = self.render_to_pdf(label, request, **kwargs)
pdf_data = pdf_file.get_document().write_pdf()
png_file = self.render_to_png(label, request, pdf_data=pdf_data, **kwargs)
print_args = {
'pdf_file': pdf_file,
'pdf_data': pdf_data,
'png_file': png_file,
'filename': filename,
'label_instance': label,
'item_instance': item,
'user': user,
'width': label.width,
'height': label.height,
'printing_options': kwargs['printing_options'],
}
if self.BLOCKING_PRINT:
# Blocking print job
self.print_label(**print_args)
else:
# Non-blocking print job
# Offload the print job to a background worker
self.offload_label(**print_args)
# Return a JSON response to the user
return JsonResponse({
'success': True,
'message': f'{len(items)} labels printed',
})
def print_label(self, **kwargs):
"""Print a single label (blocking).
kwargs:
pdf_file: The PDF file object of the rendered label (WeasyTemplateResponse object)
pdf_data: Raw PDF data of the rendered label
filename: The filename of this PDF label
label_instance: The instance of the label model which triggered the print_label() method
item_instance: The instance of the database model against which the label is printed
user: The user who triggered this print job
width: The expected width of the label (in mm)
height: The expected height of the label (in mm)
printing_options: The printing options set for this print job defined in the PrintingOptionsSerializer
Note that the supplied kwargs may be different if the plugin overrides the print_labels() method.
"""
# Unimplemented (to be implemented by the particular plugin class)
raise MixinNotImplementedError(
'This Plugin must implement a `print_label` method'
)
def offload_label(self, **kwargs):
"""Offload a single label (non-blocking).
Instead of immediately printing the label (which is a blocking process),
this method should offload the label to a background worker process.
Offloads a call to the 'print_label' method (of this plugin) to a background worker.
"""
# Exclude the 'pdf_file' object - cannot be pickled
kwargs.pop('pdf_file', None)
offload_task(plugin_label.print_label, self.plugin_slug(), **kwargs)
def get_printing_options_serializer(
self, request: Request, *args, **kwargs
) -> Union[serializers.Serializer, None]:
"""Return a serializer class instance with dynamic printing options.
Arguments:
request: The request made to print a label or interfering the available serializer fields via an OPTIONS request
*args, **kwargs: need to be passed to the serializer instance
Returns:
A class instance of a DRF serializer class, by default this an instance of
self.PrintingOptionsSerializer using the *args, **kwargs if existing for this plugin
"""
serializer = getattr(self, 'PrintingOptionsSerializer', None)
if not serializer:
return None
return serializer(*args, **kwargs)

View File

@ -1,71 +0,0 @@
"""Plugin mixin for locating stock items and locations."""
import logging
from plugin.helpers import MixinNotImplementedError
logger = logging.getLogger('inventree')
class LocateMixin:
"""Mixin class which provides support for 'locating' inventory items, for example identifying the location of a particular StockLocation.
Plugins could implement audible or visual cues to direct attention to the location,
with (for e.g.) LED strips or buzzers, or some other method.
The plugins may also be used to *deliver* a particular stock item to the user.
A class which implements this mixin may implement the following methods:
- locate_stock_item : Used to locate / identify a particular stock item
- locate_stock_location : Used to locate / identify a particular stock location
Refer to the default method implementations below for more information!
"""
class MixinMeta:
"""Meta for mixin."""
MIXIN_NAME = 'Locate'
def __init__(self):
"""Register the mixin."""
super().__init__()
self.add_mixin('locate', True, __class__)
def locate_stock_item(self, item_pk):
"""Attempt to locate a particular StockItem.
Arguments:
item_pk: The PK (primary key) of the StockItem to be located
The default implementation for locating a StockItem
attempts to locate the StockLocation where the item is located.
An attempt is only made if the StockItem is *in stock*
Note: A custom implementation could always change this behaviour
"""
logger.info('LocateMixin: Attempting to locate StockItem pk=%s', item_pk)
from stock.models import StockItem
try:
item = StockItem.objects.get(pk=item_pk)
if item.in_stock and item.location is not None:
self.locate_stock_location(item.location.pk)
except StockItem.DoesNotExist: # pragma: no cover
logger.warning('LocateMixin: StockItem pk={item_pk} not found')
pass
def locate_stock_location(self, location_pk):
"""Attempt to location a particular StockLocation.
Arguments:
location_pk: The PK (primary key) of the StockLocation to be located
Note: The default implementation here does nothing!
"""
raise MixinNotImplementedError

View File

@ -1,49 +0,0 @@
"""Builtin plugin for requesting exchange rates from an external API."""
import logging
from django.utils.translation import gettext_lazy as _
from plugin import InvenTreePlugin
from plugin.mixins import APICallMixin, CurrencyExchangeMixin
logger = logging.getLogger('inventree')
class InvenTreeCurrencyExchange(APICallMixin, CurrencyExchangeMixin, InvenTreePlugin):
"""Default InvenTree plugin for currency exchange rates.
Fetches exchange rate information from frankfurter.app
"""
NAME = 'InvenTreeCurrencyExchange'
SLUG = 'inventreecurrencyexchange'
AUTHOR = _('InvenTree contributors')
TITLE = _('InvenTree Currency Exchange')
DESCRIPTION = _('Default currency exchange integration')
VERSION = '1.0.0'
def update_exchange_rates(self, base_currency: str, symbols: list[str]) -> dict:
"""Request exchange rate data from external API."""
response = self.api_call(
'latest',
url_args={'from': [base_currency], 'to': symbols},
simple_response=False,
)
if response.status_code == 200:
rates = response.json().get('rates', {})
rates[base_currency] = 1.00
return rates
logger.warning(
'Failed to update exchange rates from %s: Server returned status %s',
self.api_url,
response.status_code,
)
return None
@property
def api_url(self):
"""Return the API URL for this plugin."""
return 'https://api.frankfurter.app'

View File

@ -1,73 +0,0 @@
"""The TMEPlugin is meant to integrate the TME API into Inventree.
This plugin can currently only match TME barcodes to supplier parts.
"""
import re
from django.utils.translation import gettext_lazy as _
from plugin import InvenTreePlugin
from plugin.mixins import SettingsMixin, SupplierBarcodeMixin
class TMEPlugin(SupplierBarcodeMixin, SettingsMixin, InvenTreePlugin):
"""Plugin to integrate the TME API into Inventree."""
NAME = 'TMEPlugin'
TITLE = _('Supplier Integration - TME')
DESCRIPTION = _('Provides support for scanning TME barcodes')
VERSION = '1.0.0'
AUTHOR = _('InvenTree contributors')
DEFAULT_SUPPLIER_NAME = 'TME'
SETTINGS = {
'SUPPLIER_ID': {
'name': _('Supplier'),
'description': _("The Supplier which acts as 'TME'"),
'model': 'company.company',
}
}
TME_IS_QRCODE_REGEX = re.compile(r'([^\s:]+:[^\s:]+\s+)+(\S+(\s|$)+)+')
TME_IS_BARCODE2D_REGEX = re.compile(r'(([^\s]+)(\s+|$))+')
# Custom field mapping
TME_QRCODE_FIELDS = {
'PN': SupplierBarcodeMixin.SUPPLIER_PART_NUMBER,
'CPO': SupplierBarcodeMixin.CUSTOMER_ORDER_NUMBER,
'PO': SupplierBarcodeMixin.SUPPLIER_ORDER_NUMBER,
'MPN': SupplierBarcodeMixin.MANUFACTURER_PART_NUMBER,
'QTY': SupplierBarcodeMixin.QUANTITY,
}
def extract_barcode_fields(self, barcode_data: str) -> dict[str, str]:
"""Get supplier_part and barcode_fields from TME QR-Code or DataMatrix-Code."""
barcode_fields = {}
if self.TME_IS_QRCODE_REGEX.fullmatch(barcode_data):
# Custom QR Code format e.g. "QTY: 1 PN:12345"
for item in barcode_data.split(' '):
if ':' in item:
key, value = item.split(':')
if key in self.TME_QRCODE_FIELDS:
barcode_fields[self.TME_QRCODE_FIELDS[key]] = value
return barcode_fields
elif self.TME_IS_BARCODE2D_REGEX.fullmatch(barcode_data):
# 2D Barcode format e.g. "PWBP-302 1PMPNWBP-302 Q1 K19361337/1"
for item in barcode_data.split(' '):
for k, v in self.ecia_field_map().items():
if item.startswith(k):
barcode_fields[v] = item[len(k) :]
else:
return {}
# Custom handling for order number
if SupplierBarcodeMixin.CUSTOMER_ORDER_NUMBER in barcode_fields:
order_number = barcode_fields[SupplierBarcodeMixin.CUSTOMER_ORDER_NUMBER]
order_number = order_number.split('/')[0]
barcode_fields[SupplierBarcodeMixin.CUSTOMER_ORDER_NUMBER] = order_number
return barcode_fields

View File

@ -1,255 +0,0 @@
"""Helpers for plugin app."""
import inspect
import logging
import os
import pathlib
import pkgutil
import sysconfig
import traceback
from importlib.metadata import entry_points
from django import template
from django.conf import settings
from django.core.exceptions import AppRegistryNotReady
from django.db.utils import IntegrityError, OperationalError, ProgrammingError
logger = logging.getLogger('inventree')
# region logging / errors
class IntegrationPluginError(Exception):
"""Error that encapsulates another error and adds the path / reference of the raising plugin."""
def __init__(self, path, message):
"""Init a plugin error.
Args:
path: Path on which the error occurred - used to find out which plugin it was
message: The original error message
"""
self.path = path
self.message = message
def __str__(self):
"""Returns the error message."""
return self.message # pragma: no cover
class MixinImplementationError(ValueError):
"""Error if mixin was implemented wrong in plugin.
Mostly raised if constant is missing
"""
pass
class MixinNotImplementedError(NotImplementedError):
"""Error if necessary mixin function was not overwritten."""
pass
def log_error(error, reference: str = 'general'):
"""Log an plugin error."""
from plugin import registry
# make sure the registry is set up
if reference not in registry.errors:
registry.errors[reference] = []
# add error to stack
registry.errors[reference].append(error)
def handle_error(error, do_raise: bool = True, do_log: bool = True, log_name: str = ''):
"""Handles an error and casts it as an IntegrationPluginError."""
package_path = traceback.extract_tb(error.__traceback__)[-1].filename
install_path = sysconfig.get_paths()['purelib']
try:
package_name = pathlib.Path(package_path).relative_to(install_path).parts[0]
except ValueError:
# is file - loaded -> form a name for that
try:
path_obj = pathlib.Path(package_path).relative_to(settings.BASE_DIR)
path_parts = [*path_obj.parts]
path_parts[-1] = path_parts[-1].replace(
path_obj.suffix, ''
) # remove suffix
# remove path prefixes
if path_parts[0] == 'plugin':
path_parts.remove('plugin')
path_parts.pop(0)
else:
path_parts.remove('plugins') # pragma: no cover
package_name = '.'.join(path_parts)
except Exception:
package_name = package_path
if do_log:
log_kwargs = {}
if log_name:
log_kwargs['reference'] = log_name
log_error({package_name: str(error)}, **log_kwargs)
if do_raise:
# do a straight raise if we are playing with environment variables at execution time, ignore the broken sample
if (
settings.TESTING_ENV
and package_name != 'integration.broken_sample'
and isinstance(error, IntegrityError)
):
raise error # pragma: no cover
raise IntegrationPluginError(package_name, str(error))
def get_entrypoints():
"""Returns list for entrypoints for InvenTree plugins."""
return entry_points().get('inventree_plugins', [])
# endregion
# region git-helpers
def get_git_log(path):
"""Get dict with info of the last commit to file named in path."""
import datetime
from dulwich.repo import NotGitRepository, Repo
from InvenTree.ready import isInTestMode
output = None
path = os.path.abspath(path)
if os.path.exists(path) and os.path.isfile(path):
path = os.path.dirname(path)
# only do this if we are not in test mode
if not isInTestMode(): # pragma: no cover
try:
repo = Repo(path)
head = repo.head()
commit = repo[head]
output = [
head.decode(),
commit.author.decode().split('<')[0][:-1],
commit.author.decode().split('<')[1][:-1],
datetime.datetime.fromtimestamp(commit.author_time).isoformat(),
commit.message.decode().split('\n')[0],
]
except KeyError as err:
logger.debug('No HEAD tag found in git repo at path %s', path)
except NotGitRepository:
pass
if not output:
output = 5 * [''] # pragma: no cover
return {
'hash': output[0],
'author': output[1],
'mail': output[2],
'date': output[3],
'message': output[4],
}
# endregion
# region plugin finders
def get_modules(pkg, path=None):
"""Get all modules in a package."""
context = {}
if path is None:
path = pkg.__path__
elif type(path) is not list:
path = [path]
for loader, name, _ in pkgutil.walk_packages(path):
try:
module = loader.find_module(name).load_module(name)
pkg_names = getattr(module, '__all__', None)
for k, v in vars(module).items():
if not k.startswith('_') and (pkg_names is None or k in pkg_names):
context[k] = v
context[name] = module
except AppRegistryNotReady: # pragma: no cover
pass
except Exception as error:
# this 'protects' against malformed plugin modules by more or less silently failing
# log to stack
log_error({name: str(error)}, 'discovery')
return [v for k, v in context.items()]
def get_classes(module):
"""Get all classes in a given module."""
return inspect.getmembers(module, inspect.isclass)
def get_plugins(pkg, baseclass, path=None):
"""Return a list of all modules under a given package.
- Modules must be a subclass of the provided 'baseclass'
- Modules must have a non-empty NAME parameter
"""
plugins = []
modules = get_modules(pkg, path=path)
# Iterate through each module in the package
for mod in modules:
# Iterate through each class in the module
for item in get_classes(mod):
plugin = item[1]
if issubclass(plugin, baseclass) and plugin.NAME:
plugins.append(plugin)
return plugins
# endregion
# region templates
def render_template(plugin, template_file, context=None):
"""Locate and render a template file, available in the global template context."""
try:
tmp = template.loader.get_template(template_file)
except template.TemplateDoesNotExist:
logger.exception(
"Plugin %s could not locate template '%s'", plugin.slug, template_file
)
return f"""
<div class='alert alert-block alert-danger'>
Template file <em>{template_file}</em> does not exist.
</div>
"""
# Render with the provided context
html = tmp.render(context)
return html
def render_text(text, context=None):
"""Locate a raw string with provided context."""
ctx = template.Context(context)
return template.Template(text).render(ctx)
# endregion

View File

@ -1,257 +0,0 @@
"""Plugin model definitions."""
import inspect
import warnings
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.models import User
from django.db import models
from django.db.utils import IntegrityError
from django.utils.translation import gettext_lazy as _
import common.models
import InvenTree.models
from plugin import InvenTreePlugin, registry
class PluginConfig(InvenTree.models.MetadataMixin, models.Model):
"""A PluginConfig object holds settings for plugins.
Attributes:
key: slug of the plugin (this must be unique across all installed plugins!)
name: PluginName of the plugin - serves for a manual double check if the right plugin is used
active: Should the plugin be loaded?
"""
class Meta:
"""Meta for PluginConfig."""
verbose_name = _('Plugin Configuration')
verbose_name_plural = _('Plugin Configurations')
key = models.CharField(
unique=True, max_length=255, verbose_name=_('Key'), help_text=_('Key of plugin')
)
name = models.CharField(
null=True,
blank=True,
max_length=255,
verbose_name=_('Name'),
help_text=_('PluginName of the plugin'),
)
package_name = models.CharField(
null=True,
blank=True,
max_length=255,
verbose_name=_('Package Name'),
help_text=_(
'Name of the installed package, if the plugin was installed via PIP'
),
)
active = models.BooleanField(
default=False, verbose_name=_('Active'), help_text=_('Is the plugin active')
)
def __str__(self) -> str:
"""Nice name for printing."""
name = f'{self.name} - {self.key}'
if not self.active:
name += '(not active)'
return name
# extra attributes from the registry
def mixins(self):
"""Returns all registered mixins."""
try:
if inspect.isclass(self.plugin):
return self.plugin.get_registered_mixins(
self, with_base=True, with_cls=False
)
return self.plugin.get_registered_mixins(with_base=True, with_cls=False)
except (AttributeError, ValueError): # pragma: no cover
return {}
# functions
def __init__(self, *args, **kwargs):
"""Override to set original state of the plugin-config instance."""
super().__init__(*args, **kwargs)
self.__org_active = self.active
# Append settings from registry
plugin = registry.plugins_full.get(self.key, None)
def get_plugin_meta(name):
"""Return a meta-value associated with this plugin."""
# Ignore if the plugin config is not defined
if not plugin:
return None
# Ignore if the plugin is not active
if not self.active:
return None
result = getattr(plugin, name, None)
if result is not None:
result = str(result)
return result
self.meta = {
key: get_plugin_meta(key)
for key in [
'slug',
'human_name',
'description',
'author',
'pub_date',
'version',
'website',
'license',
'package_path',
'settings_url',
]
}
# Save plugin
self.plugin: InvenTreePlugin = plugin
def __getstate__(self):
"""Customize pickling behavior."""
state = super().__getstate__()
state.pop(
'plugin', None
) # plugin cannot be pickled in some circumstances when used with drf views, remove it (#5408)
return state
def save(self, force_insert=False, force_update=False, *args, **kwargs):
"""Extend save method to reload plugins if the 'active' status changes."""
reload = kwargs.pop('no_reload', False) # check if no_reload flag is set
super().save(force_insert, force_update, *args, **kwargs)
if self.is_builtin():
# Force active if builtin
self.active = True
if not reload:
if self.active != self.__org_active:
if settings.PLUGIN_TESTING:
warnings.warn('A reload was triggered', stacklevel=2)
registry.reload_plugins()
@admin.display(boolean=True, description=_('Installed'))
def is_installed(self) -> bool:
"""Simple check to determine if this plugin is installed.
A plugin might not be installed if it has been removed from the system,
but the PluginConfig associated with it still exists.
"""
return self.plugin is not None
@admin.display(boolean=True, description=_('Sample plugin'))
def is_sample(self) -> bool:
"""Is this plugin a sample app?"""
if not self.plugin:
return False
return self.plugin.check_is_sample()
@admin.display(boolean=True, description=_('Builtin Plugin'))
def is_builtin(self) -> bool:
"""Return True if this is a 'builtin' plugin."""
if not self.plugin:
return False
return self.plugin.check_is_builtin()
@admin.display(boolean=True, description=_('Package Plugin'))
def is_package(self) -> bool:
"""Return True if this is a 'package' plugin."""
if not self.plugin:
return False
return getattr(self.plugin, 'is_package', False)
class PluginSetting(common.models.BaseInvenTreeSetting):
"""This model represents settings for individual plugins."""
typ = 'plugin'
extra_unique_fields = ['plugin']
class Meta:
"""Meta for PluginSetting."""
unique_together = [('plugin', 'key')]
plugin = models.ForeignKey(
PluginConfig,
related_name='settings',
null=False,
verbose_name=_('Plugin'),
on_delete=models.CASCADE,
)
@classmethod
def get_setting_definition(cls, key, **kwargs):
"""In the BaseInvenTreeSetting class, we have a class attribute named 'SETTINGS', which is a dict object that fully defines all the setting parameters.
Here, unlike the BaseInvenTreeSetting, we do not know the definitions of all settings
'ahead of time' (as they are defined externally in the plugins).
Settings can be provided by the caller, as kwargs['settings'].
If not provided, we'll look at the plugin registry to see what settings are available,
(if the plugin is specified!)
"""
if 'settings' not in kwargs:
plugin = kwargs.pop('plugin', None)
if plugin:
mixin_settings = getattr(registry, 'mixins_settings', None)
if mixin_settings:
kwargs['settings'] = mixin_settings.get(plugin.key, {})
return super().get_setting_definition(key, **kwargs)
class NotificationUserSetting(common.models.BaseInvenTreeSetting):
"""This model represents notification settings for a user."""
typ = 'notification'
extra_unique_fields = ['method', 'user']
class Meta:
"""Meta for NotificationUserSetting."""
unique_together = [('method', 'user', 'key')]
@classmethod
def get_setting_definition(cls, key, **kwargs):
"""Override setting_definition to use notification settings."""
from common.notifications import storage
kwargs['settings'] = storage.user_settings
return super().get_setting_definition(key, **kwargs)
method = models.CharField(max_length=255, verbose_name=_('Method'))
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
blank=True,
null=True,
verbose_name=_('User'),
help_text=_('User'),
)
def __str__(self) -> str:
"""Nice name of printing."""
return f'{self.key} (for {self.user}): {self.value}'

View File

@ -1,833 +0,0 @@
"""Registry for loading and managing multiple plugins at run-time.
- Holds the class and the object that contains all code to maintain plugin states
- Manages setup and teardown of plugin class instances
"""
import imp
import importlib
import logging
import os
import time
from collections import OrderedDict
from pathlib import Path
from threading import Lock
from typing import Any
from django.apps import apps
from django.conf import settings
from django.contrib import admin
from django.db.utils import IntegrityError, OperationalError, ProgrammingError
from django.urls import clear_url_caches, path
from django.utils.text import slugify
from django.utils.translation import gettext_lazy as _
from InvenTree.config import get_plugin_dir
from InvenTree.ready import canAppAccessDatabase
from .helpers import (
IntegrationPluginError,
get_entrypoints,
get_plugins,
handle_error,
log_error,
)
from .plugin import InvenTreePlugin
logger = logging.getLogger('inventree')
class PluginsRegistry:
"""The PluginsRegistry class."""
from .base.integration.AppMixin import AppMixin
from .base.integration.ScheduleMixin import ScheduleMixin
from .base.integration.SettingsMixin import SettingsMixin
from .base.integration.UrlsMixin import UrlsMixin
DEFAULT_MIXIN_ORDER = [SettingsMixin, ScheduleMixin, AppMixin, UrlsMixin]
def __init__(self) -> None:
"""Initialize registry.
Set up all needed references for internal and external states.
"""
# plugin registry
self.plugins: dict[str, InvenTreePlugin] = {} # List of active instances
self.plugins_inactive: dict[
str, InvenTreePlugin
] = {} # List of inactive instances
self.plugins_full: dict[
str, InvenTreePlugin
] = {} # List of all plugin instances
# Keep an internal hash of the plugin registry state
self.registry_hash = None
self.plugin_modules: list[InvenTreePlugin] = [] # Holds all discovered plugins
self.mixin_modules: dict[str, Any] = {} # Holds all discovered mixins
self.errors = {} # Holds discovering errors
self.loading_lock = Lock() # Lock to prevent multiple loading at the same time
# flags
self.plugins_loaded = (
False # Marks if the registry fully loaded and all django apps are reloaded
)
self.apps_loading = True # Marks if apps were reloaded yet
self.installed_apps = [] # Holds all added plugin_paths
@property
def is_loading(self):
"""Return True if the plugin registry is currently loading."""
return self.loading_lock.locked()
def get_plugin(self, slug, active=None):
"""Lookup plugin by slug (unique key)."""
# Check if the registry needs to be reloaded
self.check_reload()
if slug not in self.plugins:
logger.warning("Plugin registry has no record of plugin '%s'", slug)
return None
plg = self.plugins[slug]
if active is not None:
if active != plg.is_active():
return None
return plg
def get_plugin_config(self, slug: str, name: [str, None] = None):
"""Return the matching PluginConfig instance for a given plugin.
Args:
slug: The plugin slug
name: The plugin name (optional)
"""
import InvenTree.ready
from plugin.models import PluginConfig
if InvenTree.ready.isImportingData():
return None
try:
cfg = PluginConfig.objects.filter(key=slug).first()
if not cfg:
cfg = PluginConfig.objects.create(key=slug)
except PluginConfig.DoesNotExist:
return None
except (IntegrityError, OperationalError, ProgrammingError): # pragma: no cover
return None
if name and cfg.name != name:
# Update the name if it has changed
try:
cfg.name = name
cfg.save()
except Exception as e:
logger.exception('Failed to update plugin name')
return cfg
def set_plugin_state(self, slug, state):
"""Set the state(active/inactive) of a plugin.
Args:
slug (str): Plugin slug
state (bool): Plugin state - true = active, false = inactive
"""
# Check if the registry needs to be reloaded
self.check_reload()
if slug not in self.plugins_full:
logger.warning("Plugin registry has no record of plugin '%s'", slug)
return
cfg = self.get_plugin_config(slug)
cfg.active = state
cfg.save()
# Update the registry hash value
self.update_plugin_hash()
def call_plugin_function(self, slug, func, *args, **kwargs):
"""Call a member function (named by 'func') of the plugin named by 'slug'.
As this is intended to be run by the background worker,
we do not perform any try/except here.
Instead, any error messages are returned to the worker.
"""
# Check if the registry needs to be reloaded
self.check_reload()
plugin = self.get_plugin(slug)
if not plugin:
return
plugin_func = getattr(plugin, func)
return plugin_func(*args, **kwargs)
# region registry functions
def with_mixin(self, mixin: str, active=True, builtin=None):
"""Returns reference to all plugins that have a specified mixin enabled.
Args:
mixin (str): Mixin name
active (bool, optional): Filter by 'active' status of plugin. Defaults to True.
builtin (bool, optional): Filter by 'builtin' status of plugin. Defaults to None.
"""
# Check if the registry needs to be loaded
self.check_reload()
result = []
for plugin in self.plugins.values():
if plugin.mixin_enabled(mixin):
if active is not None:
# Filter by 'active' status of plugin
if active != plugin.is_active():
continue
if builtin is not None:
# Filter by 'builtin' status of plugin
if builtin != plugin.is_builtin:
continue
result.append(plugin)
return result
# endregion
# region loading / unloading
def _load_plugins(self, full_reload: bool = False):
"""Load and activate all IntegrationPlugins.
Args:
full_reload (bool, optional): Reload everything - including plugin mechanism. Defaults to False.
"""
logger.info('Loading plugins')
registered_successful = False
blocked_plugin = None
retry_counter = settings.PLUGIN_RETRY
while not registered_successful:
try:
# We are using the db so for migrations etc we need to try this block
self._init_plugins(blocked_plugin)
self._activate_plugins(full_reload=full_reload)
registered_successful = True
except (OperationalError, ProgrammingError): # pragma: no cover
# Exception if the database has not been migrated yet
logger.info('Database not accessible while loading plugins')
break
except IntegrationPluginError as error:
logger.exception(
'[PLUGIN] Encountered an error with %s:\n%s',
error.path,
error.message,
)
log_error({error.path: error.message}, 'load')
blocked_plugin = error.path # we will not try to load this app again
# Initialize apps without any plugins
self._clean_registry()
self._clean_installed_apps()
self._activate_plugins(force_reload=True, full_reload=full_reload)
# We do not want to end in an endless loop
retry_counter -= 1
if retry_counter <= 0: # pragma: no cover
if settings.PLUGIN_TESTING:
print('[PLUGIN] Max retries, breaking loading')
break
if settings.PLUGIN_TESTING:
print(
f'[PLUGIN] Above error occurred during testing - {retry_counter}/{settings.PLUGIN_RETRY} retries left'
)
# now the loading will re-start up with init
# disable full reload after the first round
if full_reload:
full_reload = False
# ensure plugins_loaded is True
self.plugins_loaded = True
logger.debug('Finished loading plugins')
# Trigger plugins_loaded event
if canAppAccessDatabase():
from plugin.events import trigger_event
trigger_event('plugins_loaded')
def _unload_plugins(self, force_reload: bool = False):
"""Unload and deactivate all IntegrationPlugins.
Args:
force_reload (bool, optional): Also reload base apps. Defaults to False.
"""
logger.info('Start unloading plugins')
# remove all plugins from registry
self._clean_registry()
# deactivate all integrations
self._deactivate_plugins(force_reload=force_reload)
logger.info('Finished unloading plugins')
def reload_plugins(
self,
full_reload: bool = False,
force_reload: bool = False,
collect: bool = False,
):
"""Reload the plugin registry.
This should be considered the single point of entry for loading plugins!
Args:
full_reload (bool, optional): Reload everything - including plugin mechanism. Defaults to False.
force_reload (bool, optional): Also reload base apps. Defaults to False.
collect (bool, optional): Collect plugins before reloading. Defaults to False.
"""
# Do not reload when currently loading
if self.is_loading:
logger.debug('Skipping reload - plugin registry is currently loading')
return
if self.loading_lock.acquire(blocking=False):
logger.info(
'Plugin Registry: Reloading plugins - Force: %s, Full: %s, Collect: %s',
force_reload,
full_reload,
collect,
)
if collect:
logger.info('Collecting plugins')
self.plugin_modules = self.collect_plugins()
self.plugins_loaded = False
self._unload_plugins(force_reload=force_reload)
self.plugins_loaded = True
self._load_plugins(full_reload=full_reload)
self.update_plugin_hash()
self.loading_lock.release()
logger.info('Plugin Registry: Loaded %s plugins', len(self.plugins))
def plugin_dirs(self):
"""Construct a list of directories from where plugins can be loaded."""
# Builtin plugins are *always* loaded
dirs = ['plugin.builtin']
if settings.PLUGINS_ENABLED:
# Any 'external' plugins are only loaded if PLUGINS_ENABLED is set to True
if settings.TESTING or settings.DEBUG:
# If in TEST or DEBUG mode, load plugins from the 'samples' directory
dirs.append('plugin.samples')
if settings.TESTING:
custom_dirs = os.getenv('INVENTREE_PLUGIN_TEST_DIR', None)
else: # pragma: no cover
custom_dirs = get_plugin_dir()
# Load from user specified directories (unless in testing mode)
dirs.append('plugins')
if custom_dirs is not None:
# Allow multiple plugin directories to be specified
for pd_text in custom_dirs.split(','):
pd = Path(pd_text.strip()).absolute()
# Attempt to create the directory if it does not already exist
if not pd.exists():
try:
pd.mkdir(exist_ok=True)
except Exception: # pragma: no cover
logger.exception(
"Could not create plugin directory '%s'", pd
)
continue
# Ensure the directory has an __init__.py file
init_filename = pd.joinpath('__init__.py')
if not init_filename.exists():
try:
init_filename.write_text('# InvenTree plugin directory\n')
except Exception: # pragma: no cover
logger.exception(
"Could not create file '%s'", init_filename
)
continue
# By this point, we have confirmed that the directory at least exists
if pd.exists() and pd.is_dir():
# Convert to python dot-path
if pd.is_relative_to(settings.BASE_DIR):
pd_path = '.'.join(pd.relative_to(settings.BASE_DIR).parts)
else:
pd_path = str(pd)
# Add path
dirs.append(pd_path)
logger.info("Added plugin directory: '%s' as '%s'", pd, pd_path)
return dirs
def collect_plugins(self):
"""Collect plugins from all possible ways of loading. Returned as list."""
collected_plugins = []
# Collect plugins from paths
for plugin in self.plugin_dirs():
logger.debug("Loading plugins from directory '%s'", plugin)
parent_path = None
parent_obj = Path(plugin)
# If a "path" is provided, some special handling is required
if parent_obj.name is not plugin and len(parent_obj.parts) > 1:
# Ensure PosixPath object is converted to a string, before passing to get_plugins
parent_path = str(parent_obj.parent)
plugin = parent_obj.name
# Gather Modules
if parent_path:
raw_module = imp.load_source(
plugin, str(parent_obj.joinpath('__init__.py'))
)
else:
raw_module = importlib.import_module(plugin)
modules = get_plugins(raw_module, InvenTreePlugin, path=parent_path)
for item in modules or []:
collected_plugins.append(item)
# From this point any plugins are considered "external" and only loaded if plugins are explicitly enabled
if settings.PLUGINS_ENABLED:
# Check if not running in testing mode and apps should be loaded from hooks
if (not settings.PLUGIN_TESTING) or (
settings.PLUGIN_TESTING and settings.PLUGIN_TESTING_SETUP
):
# Collect plugins from setup entry points
for entry in get_entrypoints():
try:
plugin = entry.load()
plugin.is_package = True
plugin.package_name = getattr(entry.dist, 'name', None)
plugin._get_package_metadata()
collected_plugins.append(plugin)
except Exception as error: # pragma: no cover
handle_error(error, do_raise=False, log_name='discovery')
# Log collected plugins
logger.info('Collected %s plugins', len(collected_plugins))
logger.debug(', '.join([a.__module__ for a in collected_plugins]))
return collected_plugins
def discover_mixins(self):
"""Discover all mixins from plugins and register them."""
collected_mixins = {}
for plg in self.plugins.values():
collected_mixins.update(plg.get_registered_mixins())
self.mixin_modules = collected_mixins
def install_plugin_file(self):
"""Make sure all plugins are installed in the current environment."""
if settings.PLUGIN_FILE_CHECKED:
logger.info('Plugin file was already checked')
return True
from plugin.installer import install_plugins_file
if install_plugins_file():
settings.PLUGIN_FILE_CHECKED = True
return 'first_run'
return False
# endregion
# region general internal loading / activating / deactivating / unloading
def _init_plugins(self, disabled: str = None):
"""Initialise all found plugins.
Args:
disabled (str, optional): Loading path of disabled app. Defaults to None.
Raises:
error: IntegrationPluginError
"""
# Imports need to be in this level to prevent early db model imports
from InvenTree import version
from plugin.models import PluginConfig
def safe_reference(plugin, key: str, active: bool = True):
"""Safe reference to plugin dicts."""
if active:
self.plugins[key] = plugin
else:
# Deactivate plugin in db (if currently set as active)
if not settings.PLUGIN_TESTING and plugin.db.active: # pragma: no cover
plugin.db.active = False
plugin.db.save(no_reload=True)
self.plugins_inactive[key] = plugin.db
self.plugins_full[key] = plugin
logger.debug('Starting plugin initialization')
# Fetch and cache list of existing plugin configuration instances
plugin_configs = {cfg.key: cfg for cfg in PluginConfig.objects.all()}
# Initialize plugins
for plg in self.plugin_modules:
# These checks only use attributes - never use plugin supplied functions -> that would lead to arbitrary code execution!!
plg_name = plg.NAME
plg_key = slugify(
plg.SLUG if getattr(plg, 'SLUG', None) else plg_name
) # keys are slugs!
try:
if plg_key in plugin_configs:
# Configuration already exists
plg_db = plugin_configs[plg_key]
else:
# Configuration needs to be created
plg_db = self.get_plugin_config(plg_key, plg_name)
except (OperationalError, ProgrammingError) as error:
# Exception if the database has not been migrated yet - check if test are running - raise if not
if not settings.PLUGIN_TESTING:
raise error # pragma: no cover
plg_db = None
except IntegrityError as error: # pragma: no cover
logger.exception('Error initializing plugin `%s`: %s', plg_name, error)
handle_error(error, log_name='init')
# Append reference to plugin
plg.db = plg_db
# Check if this is a 'builtin' plugin
builtin = plg.check_is_builtin()
package_name = None
# Extract plugin package name
if getattr(plg, 'is_package', False):
package_name = getattr(plg, 'package_name', None)
# Auto-enable builtin plugins
if builtin and plg_db and not plg_db.active:
plg_db.active = True
plg_db.save()
# Save the package_name attribute to the plugin
if plg_db.package_name != package_name:
plg_db.package_name = package_name
plg_db.save()
# Determine if this plugin should be loaded:
# - If PLUGIN_TESTING is enabled
# - If this is a 'builtin' plugin
# - If this plugin has been explicitly enabled by the user
if settings.PLUGIN_TESTING or builtin or (plg_db and plg_db.active):
# Check if the plugin was blocked -> threw an error; option1: package, option2: file-based
if disabled and disabled in (plg.__name__, plg.__module__):
safe_reference(plugin=plg, key=plg_key, active=False)
continue # continue -> the plugin is not loaded
# Initialize package - we can be sure that an admin has activated the plugin
logger.debug('Loading plugin `%s`', plg_name)
try:
t_start = time.time()
plg_i: InvenTreePlugin = plg()
dt = time.time() - t_start
logger.debug('Loaded plugin `%s` in %.3fs', plg_name, dt)
except Exception as error:
handle_error(
error, log_name='init'
) # log error and raise it -> disable plugin
logger.warning('Plugin `%s` could not be loaded', plg_name)
# Safe extra attributes
plg_i.is_package = getattr(plg_i, 'is_package', False)
plg_i.pk = plg_db.pk if plg_db else None
plg_i.db = plg_db
# Run version check for plugin
if (
plg_i.MIN_VERSION or plg_i.MAX_VERSION
) and not plg_i.check_version():
# Disable plugin
safe_reference(plugin=plg_i, key=plg_key, active=False)
p = plg_name
v = version.inventreeVersion()
_msg = _(
f"Plugin '{p}' is not compatible with the current InvenTree version {v}"
)
if v := plg_i.MIN_VERSION:
_msg += _(f'Plugin requires at least version {v}')
if v := plg_i.MAX_VERSION:
_msg += _(f'Plugin requires at most version {v}')
# Log to error stack
log_error(_msg, reference='init')
else:
safe_reference(plugin=plg_i, key=plg_key)
else: # pragma: no cover
safe_reference(plugin=plg, key=plg_key, active=False)
def __get_mixin_order(self):
"""Returns a list of mixin classes, in the order that they should be activated."""
# Preset list of mixins
order = self.DEFAULT_MIXIN_ORDER
# Append mixins that are not defined in the default list
order += [
m.get('cls')
for m in self.mixin_modules.values()
if m.get('cls') not in order
]
# Final list of mixins
return order
def _activate_plugins(self, force_reload=False, full_reload: bool = False):
"""Run activation functions for all plugins.
Args:
force_reload (bool, optional): Also reload base apps. Defaults to False.
full_reload (bool, optional): Reload everything - including plugin mechanism. Defaults to False.
"""
# Collect mixins
self.discover_mixins()
# Activate integrations
plugins = self.plugins.items()
logger.info('Found %s active plugins', len(plugins))
for mixin in self.__get_mixin_order():
if hasattr(mixin, '_activate_mixin'):
mixin._activate_mixin(
self, plugins, force_reload=force_reload, full_reload=full_reload
)
logger.debug('Done activating')
def _deactivate_plugins(self, force_reload: bool = False):
"""Run deactivation functions for all plugins.
Args:
force_reload (bool, optional): Also reload base apps. Defaults to False.
"""
for mixin in reversed(self.__get_mixin_order()):
if hasattr(mixin, '_deactivate_mixin'):
mixin._deactivate_mixin(self, force_reload=force_reload)
logger.debug('Finished deactivating plugins')
# endregion
# region mixin specific loading ...
def _try_reload(self, cmd, *args, **kwargs):
"""Wrapper to try reloading the apps.
Throws an custom error that gets handled by the loading function.
"""
try:
cmd(*args, **kwargs)
return True, []
except Exception as error: # pragma: no cover
handle_error(error, do_raise=False)
def _reload_apps(self, force_reload: bool = False, full_reload: bool = False):
"""Internal: reload apps using django internal functions.
Args:
force_reload (bool, optional): Also reload base apps. Defaults to False.
full_reload (bool, optional): Reload everything - including plugin mechanism. Defaults to False.
"""
if force_reload:
# we can not use the built in functions as we need to brute force the registry
apps.app_configs = OrderedDict()
apps.apps_ready = apps.models_ready = apps.loading = apps.ready = False
apps.clear_cache()
self._try_reload(apps.populate, settings.INSTALLED_APPS)
else:
self._try_reload(apps.set_installed_apps, settings.INSTALLED_APPS)
def _clean_installed_apps(self):
for plugin in self.installed_apps:
if plugin in settings.INSTALLED_APPS:
settings.INSTALLED_APPS.remove(plugin)
self.installed_apps = []
def _clean_registry(self):
"""Remove all plugins from registry."""
self.plugins: dict[str, InvenTreePlugin] = {}
self.plugins_inactive: dict[str, InvenTreePlugin] = {}
self.plugins_full: dict[str, InvenTreePlugin] = {}
def _update_urls(self):
"""Due to the order in which plugins are loaded, the patterns in urls.py may be out of date.
This function updates the patterns in urls.py to ensure that the correct patterns are loaded,
and then refreshes the django url cache.
Note that we also have to refresh the admin site URLS,
as any custom AppMixin plugins require admin integration
"""
from InvenTree.urls import urlpatterns
from plugin.urls import get_plugin_urls
for index, url in enumerate(urlpatterns):
app_name = getattr(url, 'app_name', None)
admin_url = settings.INVENTREE_ADMIN_URL
if app_name == 'admin':
urlpatterns[index] = path(
f'{admin_url}/', admin.site.urls, name='inventree-admin'
)
if app_name == 'plugin':
urlpatterns[index] = get_plugin_urls()
# Refresh the URL cache
clear_url_caches()
# endregion
# region plugin registry hash calculations
def update_plugin_hash(self):
"""When the state of the plugin registry changes, update the hash."""
from common.models import InvenTreeSetting
self.registry_hash = self.calculate_plugin_hash()
try:
old_hash = InvenTreeSetting.get_setting(
'_PLUGIN_REGISTRY_HASH', '', create=False, cache=False
)
except Exception:
old_hash = ''
if old_hash != self.registry_hash:
try:
logger.debug(
'Updating plugin registry hash: %s', str(self.registry_hash)
)
InvenTreeSetting.set_setting(
'_PLUGIN_REGISTRY_HASH', self.registry_hash, change_user=None
)
except (OperationalError, ProgrammingError):
# Exception if the database has not been migrated yet, or is not ready
pass
except Exception as exc:
# Some other exception, we want to know about it
logger.exception('Failed to update plugin registry hash: %s', str(exc))
def calculate_plugin_hash(self):
"""Calculate a 'hash' value for the current registry.
This is used to detect changes in the plugin registry,
and to inform other processes that the plugin registry has changed
"""
from hashlib import md5
from common.models import InvenTreeSetting
data = md5()
# Hash for all loaded plugins
for slug, plug in self.plugins.items():
data.update(str(slug).encode())
data.update(str(plug.name).encode())
data.update(str(plug.version).encode())
data.update(str(plug.is_active()).encode())
# Also hash for all config settings which define plugin behavior
keys = [
'ENABLE_PLUGINS_URL',
'ENABLE_PLUGINS_NAVIGATION',
'ENABLE_PLUGINS_APP',
'ENABLE_PLUGINS_SCHEDULE',
'ENABLE_PLUGINS_EVENTS',
]
for k in keys:
try:
data.update(
str(
InvenTreeSetting.get_setting(
k, False, cache=False, create=False
)
).encode()
)
except Exception:
pass
return str(data.hexdigest())
def check_reload(self):
"""Determine if the registry needs to be reloaded."""
from common.models import InvenTreeSetting
if settings.TESTING:
# Skip if running during unit testing
return
if not canAppAccessDatabase(allow_shell=True):
# Skip check if database cannot be accessed
return
logger.debug('Checking plugin registry hash')
# If not already cached, calculate the hash
if not self.registry_hash:
self.registry_hash = self.calculate_plugin_hash()
try:
reg_hash = InvenTreeSetting.get_setting(
'_PLUGIN_REGISTRY_HASH', '', create=False, cache=False
)
except Exception as exc:
logger.exception('Failed to retrieve plugin registry hash: %s', str(exc))
return
if reg_hash and reg_hash != self.registry_hash:
logger.info('Plugin registry hash has changed - reloading')
self.reload_plugins(full_reload=True, force_reload=True, collect=True)
# endregion
registry: PluginsRegistry = PluginsRegistry()
def call_function(plugin_name, function_name, *args, **kwargs):
"""Global helper function to call a specific member function of a plugin."""
return registry.call_plugin_function(plugin_name, function_name, *args, **kwargs)

View File

@ -1,152 +0,0 @@
"""Sample plugin which demonstrates custom validation functionality."""
from datetime import datetime
from plugin import InvenTreePlugin
from plugin.mixins import SettingsMixin, ValidationMixin
class SampleValidatorPlugin(SettingsMixin, ValidationMixin, InvenTreePlugin):
"""A sample plugin class for demonstrating custom validation functions.
Simple of examples of custom validator code.
"""
NAME = 'SampleValidator'
SLUG = 'validator'
TITLE = 'Sample Validator Plugin'
DESCRIPTION = 'A sample plugin for demonstrating custom validation functionality'
VERSION = '0.3.0'
SETTINGS = {
'ILLEGAL_PART_CHARS': {
'name': 'Illegal Part Characters',
'description': 'Characters which are not allowed to appear in Part names',
'default': '!@#$%^&*()~`',
},
'IPN_MUST_CONTAIN_Q': {
'name': 'IPN Q Requirement',
'description': 'Part IPN field must contain the character Q',
'default': False,
'validator': bool,
},
'SERIAL_MUST_BE_PALINDROME': {
'name': 'Palindromic Serials',
'description': 'Serial numbers must be palindromic',
'default': False,
'validator': bool,
},
'SERIAL_MUST_MATCH_PART': {
'name': 'Serial must match part',
'description': 'First letter of serial number must match first letter of part name',
'default': False,
'validator': bool,
},
'BATCH_CODE_PREFIX': {
'name': 'Batch prefix',
'description': 'Required prefix for batch code',
'default': 'B',
},
'BOM_ITEM_INTEGER': {
'name': 'Integer Bom Quantity',
'description': 'Bom item quantity must be an integer',
'default': False,
'validator': bool,
},
}
def validate_model_instance(self, instance, deltas=None):
"""Run validation against any saved model.
- Check if the instance is a BomItem object
- Test if the quantity is an integer
"""
import part.models
# Print debug message to console (intentional)
print('Validating model instance:', instance.__class__, f'<{instance.pk}>')
if isinstance(instance, part.models.BomItem):
if self.get_setting('BOM_ITEM_INTEGER'):
if float(instance.quantity) != int(instance.quantity):
self.raise_error({
'quantity': 'Bom item quantity must be an integer'
})
if isinstance(instance, part.models.Part):
# If the part description is being updated, prevent it from being reduced in length
if deltas and 'description' in deltas:
old_desc = deltas['description']['old']
new_desc = deltas['description']['new']
if len(new_desc) < len(old_desc):
self.raise_error({
'description': 'Part description cannot be shortened'
})
def validate_part_name(self, name: str, part):
"""Custom validation for Part name field.
Rules:
- Name must be shorter than the description field
- Name cannot contain illegal characters
These examples are silly, but serve to demonstrate how the feature could be used.
"""
if len(part.description) < len(name):
self.raise_error('Part description cannot be shorter than the name')
illegal_chars = self.get_setting('ILLEGAL_PART_CHARS')
for c in illegal_chars:
if c in name:
self.raise_error(f"Illegal character in part name: '{c}'")
def validate_part_ipn(self, ipn: str, part):
"""Validate part IPN.
These examples are silly, but serve to demonstrate how the feature could be used
"""
if self.get_setting('IPN_MUST_CONTAIN_Q') and 'Q' not in ipn:
self.raise_error("IPN must contain 'Q'")
def validate_part_parameter(self, parameter, data):
"""Validate part parameter data.
These examples are silly, but serve to demonstrate how the feature could be used
"""
if parameter.template.name.lower() in ['length', 'width']:
d = int(data)
if d >= 100:
self.raise_error('Value must be less than 100')
def validate_serial_number(self, serial: str, part):
"""Validate serial number for a given StockItem.
These examples are silly, but serve to demonstrate how the feature could be used
"""
if self.get_setting('SERIAL_MUST_BE_PALINDROME'):
if serial != serial[::-1]:
self.raise_error('Serial must be a palindrome')
if self.get_setting('SERIAL_MUST_MATCH_PART'):
# Serial must start with the same letter as the linked part, for some reason
if serial[0] != part.name[0]:
self.raise_error('Serial number must start with same letter as part')
def validate_batch_code(self, batch_code: str, item):
"""Ensure that a particular batch code meets specification.
These examples are silly, but serve to demonstrate how the feature could be used
"""
prefix = self.get_setting('BATCH_CODE_PREFIX')
if len(batch_code) > 0:
if prefix and not batch_code.startswith(prefix):
self.raise_error(f"Batch code must start with '{prefix}'")
def generate_batch_code(self):
"""Generate a new batch code."""
now = datetime.now()
return f'BATCH-{now.year}:{now.month}:{now.day}'

View File

@ -1,28 +0,0 @@
# Generated by Django 3.2.23 on 2023-12-18 18:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stock', '0108_auto_20240219_0252'),
]
operations = [
migrations.AddField(
model_name='stockitemtestresult',
name='finished_datetime',
field=models.DateTimeField(blank=True, help_text='The timestamp of the test finish', verbose_name='Finished'),
),
migrations.AddField(
model_name='stockitemtestresult',
name='started_datetime',
field=models.DateTimeField(blank=True, help_text='The timestamp of the test start', verbose_name='Started'),
),
migrations.AddField(
model_name='stockitemtestresult',
name='test_station',
field=models.CharField(blank=True, help_text='The identifier of the test station where the test was performed', max_length=500, verbose_name='Test station'),
),
]

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
# Web process: gunicorn
web: env/bin/gunicorn --chdir $APP_HOME/InvenTree -c InvenTree/gunicorn.conf.py InvenTree.wsgi -b 0.0.0.0:$PORT
web: env/bin/gunicorn --chdir $APP_HOME/src/backend/InvenTree -c src/backend/InvenTree/gunicorn.conf.py InvenTree.wsgi -b 0.0.0.0:$PORT
# Worker process: qcluster
worker: env/bin/python InvenTree/manage.py qcluster
worker: env/bin/python src/backendInvenTree/manage.py qcluster
# Invoke commands
invoke: echo "" | echo "" && . env/bin/activate && invoke
# CLI: Provided for backwards compatibility

View File

@ -1,5 +1,5 @@
<div align="center">
<img src="images/logo/inventree.png" alt="InvenTree logo" width="200" height="auto" />
<img src="assets/images/logo/inventree.png" alt="InvenTree logo" width="200" height="auto" />
<h1>InvenTree</h1>
<p>Open Source Inventory Management System </p>
@ -11,7 +11,7 @@
[![OpenSSF Best Practices](https://bestpractices.coreinfrastructure.org/projects/7179/badge)](https://bestpractices.coreinfrastructure.org/projects/7179)
[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/inventree/InvenTree/badge)](https://securityscorecards.dev/viewer/?uri=github.com/inventree/InvenTree)
[![Netlify Status](https://api.netlify.com/api/v1/badges/9bbb2101-0a4d-41e7-ad56-b63fb6053094/deploy-status)](https://app.netlify.com/sites/inventree/deploys)
[![DeepSource](https://app.deepsource.com/gh/inventree/InvenTree.svg/?label=active+issues&show_trend=false&token=trZWqixKLk2t-RXtpSIAslVJ)](https://app.deepsource.com/gh/inventree/InvenTree/)
[![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=inventree_InvenTree&metric=sqale_rating)](https://sonarcloud.io/summary/new_code?id=inventree_InvenTree)
[![Coveralls](https://img.shields.io/coveralls/github/inventree/InvenTree)](https://coveralls.io/github/inventree/InvenTree)
[![Crowdin](https://badges.crowdin.net/inventree/localized.svg)](https://crowdin.com/project/inventree)

View File

Before

Width:  |  Height:  |  Size: 198 KiB

After

Width:  |  Height:  |  Size: 198 KiB

Some files were not shown because too many files have changed in this diff Show More