mirror of
https://github.com/inventree/InvenTree
synced 2024-08-30 18:33:04 +00:00
Merge branch 'inventree:master' into add-changelog
This commit is contained in:
commit
ea0aa3d526
@ -7,7 +7,7 @@ services:
|
|||||||
expose:
|
expose:
|
||||||
- 5432/tcp
|
- 5432/tcp
|
||||||
volumes:
|
volumes:
|
||||||
- ../dev:/var/lib/postgresql/data:z
|
- inventreedatabase:/var/lib/postgresql/data:z
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_DB: inventree
|
POSTGRES_DB: inventree
|
||||||
POSTGRES_USER: inventree_user
|
POSTGRES_USER: inventree_user
|
||||||
@ -19,7 +19,6 @@ services:
|
|||||||
target: dev
|
target: dev
|
||||||
args:
|
args:
|
||||||
base_image: "mcr.microsoft.com/vscode/devcontainers/base:alpine-3.18"
|
base_image: "mcr.microsoft.com/vscode/devcontainers/base:alpine-3.18"
|
||||||
workspace: "${containerWorkspaceFolder}"
|
|
||||||
data_dir: "dev"
|
data_dir: "dev"
|
||||||
volumes:
|
volumes:
|
||||||
- ../:/home/inventree:z
|
- ../:/home/inventree:z
|
||||||
@ -32,7 +31,12 @@ services:
|
|||||||
INVENTREE_DB_USER: inventree_user
|
INVENTREE_DB_USER: inventree_user
|
||||||
INVENTREE_DB_PASSWORD: inventree_password
|
INVENTREE_DB_PASSWORD: inventree_password
|
||||||
INVENTREE_PLUGINS_ENABLED: True
|
INVENTREE_PLUGINS_ENABLED: True
|
||||||
|
INVENTREE_SITE_URL: http://localhost:8000
|
||||||
|
INVENTREE_CORS_ORIGIN_ALLOW_ALL: True
|
||||||
INVENTREE_PY_ENV: /home/inventree/dev/venv
|
INVENTREE_PY_ENV: /home/inventree/dev/venv
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- db
|
- db
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
inventreedatabase:
|
||||||
|
@ -7,9 +7,13 @@ git config --global --add safe.directory /home/inventree
|
|||||||
python3 -m venv /home/inventree/dev/venv --system-site-packages --upgrade-deps
|
python3 -m venv /home/inventree/dev/venv --system-site-packages --upgrade-deps
|
||||||
. /home/inventree/dev/venv/bin/activate
|
. /home/inventree/dev/venv/bin/activate
|
||||||
|
|
||||||
# setup InvenTree server
|
# Run initial InvenTree server setup
|
||||||
invoke update -s
|
invoke update -s
|
||||||
|
|
||||||
|
# Configure dev environment
|
||||||
invoke setup-dev
|
invoke setup-dev
|
||||||
|
|
||||||
|
# Install required frontend packages
|
||||||
invoke frontend-install
|
invoke frontend-install
|
||||||
|
|
||||||
# remove existing gitconfig created by "Avoiding Dubious Ownership" step
|
# remove existing gitconfig created by "Avoiding Dubious Ownership" step
|
||||||
|
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
@ -1,4 +1,5 @@
|
|||||||
github: inventree
|
github: inventree
|
||||||
ko_fi: inventree
|
ko_fi: inventree
|
||||||
patreon: inventree
|
patreon: inventree
|
||||||
|
polar: inventree
|
||||||
custom: [paypal.me/inventree]
|
custom: [paypal.me/inventree]
|
||||||
|
2
.github/actions/setup/action.yaml
vendored
2
.github/actions/setup/action.yaml
vendored
@ -40,7 +40,7 @@ runs:
|
|||||||
# Python installs
|
# Python installs
|
||||||
- name: Set up Python ${{ env.python_version }}
|
- name: Set up Python ${{ env.python_version }}
|
||||||
if: ${{ inputs.python == 'true' }}
|
if: ${{ inputs.python == 'true' }}
|
||||||
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # pin@v4.7.1
|
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # pin@v5.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.python_version }}
|
python-version: ${{ env.python_version }}
|
||||||
cache: pip
|
cache: pip
|
||||||
|
36
.github/dependabot.yml
vendored
Normal file
36
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: github-actions
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
|
||||||
|
- package-ecosystem: docker
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
|
||||||
|
- package-ecosystem: pip
|
||||||
|
directory: /docker
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
|
||||||
|
- package-ecosystem: pip
|
||||||
|
directory: /docs
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
|
||||||
|
- package-ecosystem: npm
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
|
||||||
|
- package-ecosystem: pip
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
|
||||||
|
- package-ecosystem: npm
|
||||||
|
directory: /src/frontend
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
5
.github/workflows/backport.yml
vendored
5
.github/workflows/backport.yml
vendored
@ -9,6 +9,9 @@ on:
|
|||||||
pull_request_target:
|
pull_request_target:
|
||||||
types: [ "labeled", "closed" ]
|
types: [ "labeled", "closed" ]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
backport:
|
backport:
|
||||||
name: Backport PR
|
name: Backport PR
|
||||||
@ -22,7 +25,7 @@ jobs:
|
|||||||
)
|
)
|
||||||
steps:
|
steps:
|
||||||
- name: Backport Action
|
- name: Backport Action
|
||||||
uses: sqren/backport-github-action@f54e19901f2a57f8b82360f2490d47ee82ec82c6 # pin@v9.2.2
|
uses: sqren/backport-github-action@f7073a2287aefc1fa12685eb25a712ab5620445c # pin@v9.2.2
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
auto_backport_label_prefix: backport-to-
|
auto_backport_label_prefix: backport-to-
|
||||||
|
3
.github/workflows/check_translations.yaml
vendored
3
.github/workflows/check_translations.yaml
vendored
@ -11,6 +11,9 @@ on:
|
|||||||
env:
|
env:
|
||||||
python_version: 3.9
|
python_version: 3.9
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
check:
|
check:
|
||||||
|
65
.github/workflows/docker.yaml
vendored
65
.github/workflows/docker.yaml
vendored
@ -24,8 +24,14 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- 'master'
|
- 'master'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
paths-filter:
|
paths-filter:
|
||||||
|
permissions:
|
||||||
|
contents: read # for dorny/paths-filter to fetch a list of changed files
|
||||||
|
pull-requests: read # for dorny/paths-filter to read pull requests
|
||||||
name: Filter
|
name: Filter
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
@ -34,7 +40,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
||||||
- uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1
|
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # pin@v3.0.2
|
||||||
id: filter
|
id: filter
|
||||||
with:
|
with:
|
||||||
filters: |
|
filters: |
|
||||||
@ -44,6 +50,7 @@ jobs:
|
|||||||
- docker-compose.yml
|
- docker-compose.yml
|
||||||
- docker.dev.env
|
- docker.dev.env
|
||||||
- Dockerfile
|
- Dockerfile
|
||||||
|
- InvenTree/settings.py
|
||||||
- requirements.txt
|
- requirements.txt
|
||||||
- tasks.py
|
- tasks.py
|
||||||
|
|
||||||
@ -58,21 +65,13 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
python_version: "3.11"
|
python_version: "3.11"
|
||||||
strategy:
|
runs-on: ubuntu-latest # in the future we can try to use alternative runners here
|
||||||
matrix:
|
|
||||||
platform: ["linux/amd64", "linux/arm64"]
|
|
||||||
include:
|
|
||||||
- platform: linux/amd64
|
|
||||||
os: ubuntu-latest
|
|
||||||
- platform: linux/arm64
|
|
||||||
os: ubuntu-latest # in the future we can try to use alternative runners here
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repo
|
- name: Check out repo
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
||||||
- name: Set Up Python ${{ env.python_version }}
|
- name: Set Up Python ${{ env.python_version }}
|
||||||
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # pin@v4.7.1
|
uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # pin@v5.1.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.python_version }}
|
python-version: ${{ env.python_version }}
|
||||||
- name: Version Check
|
- name: Version Check
|
||||||
@ -82,6 +81,14 @@ jobs:
|
|||||||
python3 ci/version_check.py
|
python3 ci/version_check.py
|
||||||
echo "git_commit_hash=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
|
echo "git_commit_hash=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
|
||||||
echo "git_commit_date=$(git show -s --format=%ci)" >> $GITHUB_ENV
|
echo "git_commit_date=$(git show -s --format=%ci)" >> $GITHUB_ENV
|
||||||
|
- name: Test Docker Image
|
||||||
|
id: test-docker
|
||||||
|
run: |
|
||||||
|
docker build . --target production --tag inventree-test
|
||||||
|
docker run --rm inventree-test invoke --version
|
||||||
|
docker run --rm inventree-test invoke --list
|
||||||
|
docker run --rm inventree-test gunicorn --version
|
||||||
|
docker run --rm inventree-test pg_dump --version
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
# Build the development docker image (using docker-compose.yml)
|
# Build the development docker image (using docker-compose.yml)
|
||||||
run: docker-compose build --no-cache
|
run: docker-compose build --no-cache
|
||||||
@ -117,20 +124,28 @@ jobs:
|
|||||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # pin@v3.0.0
|
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # pin@v3.0.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # pin@v3.0.0
|
uses: docker/setup-buildx-action@2b51285047da1547ffb1b2203d8be4c0af6b1f20 # pin@v3.2.0
|
||||||
- name: Set up cosign
|
- name: Set up cosign
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
uses: sigstore/cosign-installer@11086d25041f77fe8fe7b9ea4e48e3b9192b8f19 # pin@v3.1.2
|
uses: sigstore/cosign-installer@e1523de7571e31dbe865fd2e80c5c7c23ae71eb4 # pin@v3.4.0
|
||||||
|
- name: Check if Dockerhub login is required
|
||||||
|
id: docker_login
|
||||||
|
run: |
|
||||||
|
if [ -z "${{ secrets.DOCKER_USERNAME }}" ]; then
|
||||||
|
echo "skip_dockerhub_login=true" >> $GITHUB_ENV
|
||||||
|
else
|
||||||
|
echo "skip_dockerhub_login=false" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
- name: Login to Dockerhub
|
- name: Login to Dockerhub
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request' && steps.docker_login.outputs.skip_dockerhub_login != 'true'
|
||||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # pin@v3.0.0
|
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20 # pin@v3.1.0
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
- name: Log into registry ghcr.io
|
- name: Log into registry ghcr.io
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # pin@v3.0.0
|
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20 # pin@v3.1.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
@ -139,19 +154,19 @@ jobs:
|
|||||||
- name: Extract Docker metadata
|
- name: Extract Docker metadata
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 # pin@v5.0.0
|
uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # pin@v5.5.1
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
inventree/inventree
|
inventree/inventree
|
||||||
ghcr.io/inventree/inventree
|
ghcr.io/${{ github.repository }}
|
||||||
|
|
||||||
- name: Build and Push
|
- name: Push Docker Images
|
||||||
id: build-and-push
|
id: push-docker
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # pin@v5.0.0
|
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0 # pin@v5.3.0
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: ${{ matrix.platform }}
|
platforms: linux/amd64,linux/arm64
|
||||||
push: true
|
push: true
|
||||||
sbom: true
|
sbom: true
|
||||||
provenance: false
|
provenance: false
|
||||||
@ -160,9 +175,3 @@ jobs:
|
|||||||
build-args: |
|
build-args: |
|
||||||
commit_hash=${{ env.git_commit_hash }}
|
commit_hash=${{ env.git_commit_hash }}
|
||||||
commit_date=${{ env.git_commit_date }}
|
commit_date=${{ env.git_commit_date }}
|
||||||
|
|
||||||
- name: Sign the published image
|
|
||||||
if: ${{ false }} # github.event_name != 'pull_request'
|
|
||||||
env:
|
|
||||||
COSIGN_EXPERIMENTAL: "true"
|
|
||||||
run: cosign sign ${{ steps.meta.outputs.tags }}@${{ steps.build-and-push.outputs.digest }}
|
|
||||||
|
38
.github/workflows/qc_checks.yaml
vendored
38
.github/workflows/qc_checks.yaml
vendored
@ -10,7 +10,7 @@ on:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
python_version: 3.9
|
python_version: 3.9
|
||||||
node_version: 16
|
node_version: 18
|
||||||
# The OS version must be set per job
|
# The OS version must be set per job
|
||||||
server_start_sleep: 60
|
server_start_sleep: 60
|
||||||
|
|
||||||
@ -20,7 +20,10 @@ env:
|
|||||||
INVENTREE_MEDIA_ROOT: ../test_inventree_media
|
INVENTREE_MEDIA_ROOT: ../test_inventree_media
|
||||||
INVENTREE_STATIC_ROOT: ../test_inventree_static
|
INVENTREE_STATIC_ROOT: ../test_inventree_static
|
||||||
INVENTREE_BACKUP_DIR: ../test_inventree_backup
|
INVENTREE_BACKUP_DIR: ../test_inventree_backup
|
||||||
|
INVENTREE_SITE_URL: http://localhost:8000
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
jobs:
|
jobs:
|
||||||
paths-filter:
|
paths-filter:
|
||||||
name: Filter
|
name: Filter
|
||||||
@ -34,7 +37,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
||||||
- uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1
|
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # pin@v3.0.2
|
||||||
id: filter
|
id: filter
|
||||||
with:
|
with:
|
||||||
filters: |
|
filters: |
|
||||||
@ -81,12 +84,12 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
||||||
- name: Set up Python ${{ env.python_version }}
|
- name: Set up Python ${{ env.python_version }}
|
||||||
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # pin@v4.7.1
|
uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # pin@v5.1.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.python_version }}
|
python-version: ${{ env.python_version }}
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
- name: Run pre-commit Checks
|
- name: Run pre-commit Checks
|
||||||
uses: pre-commit/action@646c83fcd040023954eafda54b4db0192ce70507 # pin@v3.0.0
|
uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # pin@v3.0.1
|
||||||
- name: Check Version
|
- name: Check Version
|
||||||
run: |
|
run: |
|
||||||
pip install requests
|
pip install requests
|
||||||
@ -102,7 +105,7 @@ jobs:
|
|||||||
- name: Checkout Code
|
- name: Checkout Code
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
||||||
- name: Set up Python ${{ env.python_version }}
|
- name: Set up Python ${{ env.python_version }}
|
||||||
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # pin@v4.7.1
|
uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # pin@v5.1.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.python_version }}
|
python-version: ${{ env.python_version }}
|
||||||
- name: Check Config
|
- name: Check Config
|
||||||
@ -111,7 +114,7 @@ jobs:
|
|||||||
pip install -r docs/requirements.txt
|
pip install -r docs/requirements.txt
|
||||||
python docs/ci/check_mkdocs_config.py
|
python docs/ci/check_mkdocs_config.py
|
||||||
- name: Check Links
|
- name: Check Links
|
||||||
uses: gaurav-nelson/github-action-markdown-link-check@v1
|
uses: gaurav-nelson/github-action-markdown-link-check@5c5dfc0ac2e225883c0e5f03a85311ec2830d368 # v1
|
||||||
with:
|
with:
|
||||||
folder-path: docs
|
folder-path: docs
|
||||||
config-file: docs/mlc_config.json
|
config-file: docs/mlc_config.json
|
||||||
@ -144,9 +147,9 @@ jobs:
|
|||||||
dev-install: true
|
dev-install: true
|
||||||
update: true
|
update: true
|
||||||
- name: Export API Documentation
|
- name: Export API Documentation
|
||||||
run: invoke schema --ignore-warnings
|
run: invoke schema --ignore-warnings --filename InvenTree/schema.yml
|
||||||
- name: Upload schema
|
- name: Upload schema
|
||||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # pin@v3.1.3
|
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # pin@v4.3.1
|
||||||
with:
|
with:
|
||||||
name: schema.yml
|
name: schema.yml
|
||||||
path: InvenTree/schema.yml
|
path: InvenTree/schema.yml
|
||||||
@ -160,7 +163,7 @@ jobs:
|
|||||||
echo "URL: $url"
|
echo "URL: $url"
|
||||||
curl -s -o api.yaml $url
|
curl -s -o api.yaml $url
|
||||||
echo "Downloaded api.yaml"
|
echo "Downloaded api.yaml"
|
||||||
- name: Check for differences in schemas
|
- name: Check for differences in API Schema
|
||||||
if: needs.paths-filter.outputs.api == 'false'
|
if: needs.paths-filter.outputs.api == 'false'
|
||||||
run: |
|
run: |
|
||||||
diff --color -u InvenTree/schema.yml api.yaml
|
diff --color -u InvenTree/schema.yml api.yaml
|
||||||
@ -181,17 +184,17 @@ jobs:
|
|||||||
name: Push new schema
|
name: Push new schema
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [paths-filter, schema]
|
needs: [paths-filter, schema]
|
||||||
if: needs.schema.result == 'success' && github.ref == 'refs/heads/master' && needs.paths-filter.outputs.api == 'true'
|
if: needs.schema.result == 'success' && github.ref == 'refs/heads/master' && needs.paths-filter.outputs.api == 'true' && github.repository_owner == 'inventree'
|
||||||
env:
|
env:
|
||||||
version: ${{ needs.schema.outputs.version }}
|
version: ${{ needs.schema.outputs.version }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||||
with:
|
with:
|
||||||
repository: inventree/schema
|
repository: inventree/schema
|
||||||
token: ${{ secrets.SCHEMA_PAT }}
|
token: ${{ secrets.SCHEMA_PAT }}
|
||||||
- name: Download schema artifact
|
- name: Download schema artifact
|
||||||
uses: actions/download-artifact@v3
|
uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4
|
||||||
with:
|
with:
|
||||||
name: schema.yml
|
name: schema.yml
|
||||||
- name: Move schema to correct location
|
- name: Move schema to correct location
|
||||||
@ -199,7 +202,7 @@ jobs:
|
|||||||
echo "Version: $version"
|
echo "Version: $version"
|
||||||
mkdir export/${version}
|
mkdir export/${version}
|
||||||
mv schema.yml export/${version}/api.yaml
|
mv schema.yml export/${version}/api.yaml
|
||||||
- uses: stefanzweifel/git-auto-commit-action@v5
|
- uses: stefanzweifel/git-auto-commit-action@8756aa072ef5b4a080af5dc8fef36c5d586e521d # v5.0.0
|
||||||
with:
|
with:
|
||||||
commit_message: "Update API schema for ${version}"
|
commit_message: "Update API schema for ${version}"
|
||||||
|
|
||||||
@ -216,9 +219,10 @@ jobs:
|
|||||||
INVENTREE_ADMIN_USER: testuser
|
INVENTREE_ADMIN_USER: testuser
|
||||||
INVENTREE_ADMIN_PASSWORD: testpassword
|
INVENTREE_ADMIN_PASSWORD: testpassword
|
||||||
INVENTREE_ADMIN_EMAIL: test@test.com
|
INVENTREE_ADMIN_EMAIL: test@test.com
|
||||||
INVENTREE_PYTHON_TEST_SERVER: http://localhost:12345
|
INVENTREE_PYTHON_TEST_SERVER: http://127.0.0.1:12345
|
||||||
INVENTREE_PYTHON_TEST_USERNAME: testuser
|
INVENTREE_PYTHON_TEST_USERNAME: testuser
|
||||||
INVENTREE_PYTHON_TEST_PASSWORD: testpassword
|
INVENTREE_PYTHON_TEST_PASSWORD: testpassword
|
||||||
|
INVENTREE_SITE_URL: http://127.0.0.1:12345
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
||||||
@ -484,7 +488,7 @@ jobs:
|
|||||||
run: cd src/frontend && npx playwright install --with-deps
|
run: cd src/frontend && npx playwright install --with-deps
|
||||||
- name: Run Playwright tests
|
- name: Run Playwright tests
|
||||||
run: cd src/frontend && npx playwright test
|
run: cd src/frontend && npx playwright test
|
||||||
- uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # pin@v3.1.3
|
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # pin@v4.3.1
|
||||||
if: always()
|
if: always()
|
||||||
with:
|
with:
|
||||||
name: playwright-report
|
name: playwright-report
|
||||||
@ -505,12 +509,12 @@ jobs:
|
|||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: cd src/frontend && yarn install
|
run: cd src/frontend && yarn install
|
||||||
- name: Build frontend
|
- name: Build frontend
|
||||||
run: cd src/frontend && npm run build
|
run: cd src/frontend && npm run compile && npm run build
|
||||||
- name: Zip frontend
|
- name: Zip frontend
|
||||||
run: |
|
run: |
|
||||||
cd InvenTree/web/static
|
cd InvenTree/web/static
|
||||||
zip -r frontend-build.zip web/
|
zip -r frontend-build.zip web/
|
||||||
- uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # pin@v3.1.3
|
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # pin@v4.3.1
|
||||||
with:
|
with:
|
||||||
name: frontend-build
|
name: frontend-build
|
||||||
path: InvenTree/web/static/web
|
path: InvenTree/web/static/web
|
||||||
|
7
.github/workflows/release.yml
vendored
7
.github/workflows/release.yml
vendored
@ -5,6 +5,9 @@ on:
|
|||||||
release:
|
release:
|
||||||
types: [ published ]
|
types: [ published ]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
stable:
|
stable:
|
||||||
@ -37,12 +40,12 @@ jobs:
|
|||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: cd src/frontend && yarn install
|
run: cd src/frontend && yarn install
|
||||||
- name: Build frontend
|
- name: Build frontend
|
||||||
run: cd src/frontend && npm run build
|
run: cd src/frontend && npm run compile && npm run build
|
||||||
- name: Zip frontend
|
- name: Zip frontend
|
||||||
run: |
|
run: |
|
||||||
cd InvenTree/web/static/web
|
cd InvenTree/web/static/web
|
||||||
zip -r ../frontend-build.zip *
|
zip -r ../frontend-build.zip *
|
||||||
- uses: svenstaro/upload-release-action@1beeb572c19a9242f4361f4cee78f8e0d9aec5df # pin@2.7.0
|
- uses: svenstaro/upload-release-action@04733e069f2d7f7f0b4aebc4fbdbce8613b03ccd # pin@2.9.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
file: InvenTree/web/static/frontend-build.zip
|
file: InvenTree/web/static/frontend-build.zip
|
||||||
|
72
.github/workflows/scorecard.yml
vendored
Normal file
72
.github/workflows/scorecard.yml
vendored
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
# This workflow uses actions that are not certified by GitHub. They are provided
|
||||||
|
# by a third-party and are governed by separate terms of service, privacy
|
||||||
|
# policy, and support documentation.
|
||||||
|
|
||||||
|
name: Scorecard supply-chain security
|
||||||
|
on:
|
||||||
|
# For Branch-Protection check. Only the default branch is supported. See
|
||||||
|
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
|
||||||
|
branch_protection_rule:
|
||||||
|
# To guarantee Maintained check is occasionally updated. See
|
||||||
|
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
|
||||||
|
schedule:
|
||||||
|
- cron: '32 0 * * 0'
|
||||||
|
push:
|
||||||
|
branches: [ "master" ]
|
||||||
|
|
||||||
|
# Declare default permissions as read only.
|
||||||
|
permissions: read-all
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
name: Scorecard analysis
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
# Needed to upload the results to code-scanning dashboard.
|
||||||
|
security-events: write
|
||||||
|
# Needed to publish results and get a badge (see publish_results below).
|
||||||
|
id-token: write
|
||||||
|
# Uncomment the permissions below if installing in a private repository.
|
||||||
|
# contents: read
|
||||||
|
# actions: read
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Checkout code"
|
||||||
|
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: "Run analysis"
|
||||||
|
uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1
|
||||||
|
with:
|
||||||
|
results_file: results.sarif
|
||||||
|
results_format: sarif
|
||||||
|
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if:
|
||||||
|
# - you want to enable the Branch-Protection check on a *public* repository, or
|
||||||
|
# - you are installing Scorecard on a *private* repository
|
||||||
|
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat.
|
||||||
|
# repo_token: ${{ secrets.SCORECARD_TOKEN }}
|
||||||
|
|
||||||
|
# Public repositories:
|
||||||
|
# - Publish results to OpenSSF REST API for easy access by consumers
|
||||||
|
# - Allows the repository to include the Scorecard badge.
|
||||||
|
# - See https://github.com/ossf/scorecard-action#publishing-results.
|
||||||
|
# For private repositories:
|
||||||
|
# - `publish_results` will always be set to `false`, regardless
|
||||||
|
# of the value entered here.
|
||||||
|
publish_results: false
|
||||||
|
|
||||||
|
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
|
||||||
|
# format to the repository Actions tab.
|
||||||
|
- name: "Upload artifact"
|
||||||
|
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
|
||||||
|
with:
|
||||||
|
name: SARIF file
|
||||||
|
path: results.sarif
|
||||||
|
retention-days: 5
|
||||||
|
|
||||||
|
# Upload the results to GitHub's code scanning dashboard.
|
||||||
|
- name: "Upload to code-scanning"
|
||||||
|
uses: github/codeql-action/upload-sarif@1b1aada464948af03b950897e5eb522f92603cc2 # v3.24.9
|
||||||
|
with:
|
||||||
|
sarif_file: results.sarif
|
5
.github/workflows/stale.yml
vendored
5
.github/workflows/stale.yml
vendored
@ -5,6 +5,9 @@ on:
|
|||||||
schedule:
|
schedule:
|
||||||
- cron: '24 11 * * *'
|
- cron: '24 11 * * *'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
|
|
||||||
@ -14,7 +17,7 @@ jobs:
|
|||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@1160a2240286f5da8ec72b1c0816ce2481aabf84 # pin@v8.0.0
|
- uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # pin@v9.0.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
stale-issue-message: 'This issue seems stale. Please react to show this is still important.'
|
stale-issue-message: 'This issue seems stale. Please react to show this is still important.'
|
||||||
|
6
.github/workflows/translations.yml
vendored
6
.github/workflows/translations.yml
vendored
@ -7,7 +7,10 @@ on:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
python_version: 3.9
|
python_version: 3.9
|
||||||
node_version: 16
|
node_version: 18
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
@ -22,6 +25,7 @@ jobs:
|
|||||||
INVENTREE_MEDIA_ROOT: ./media
|
INVENTREE_MEDIA_ROOT: ./media
|
||||||
INVENTREE_STATIC_ROOT: ./static
|
INVENTREE_STATIC_ROOT: ./static
|
||||||
INVENTREE_BACKUP_DIR: ./backup
|
INVENTREE_BACKUP_DIR: ./backup
|
||||||
|
INVENTREE_SITE_URL: http://localhost:8000
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Code
|
- name: Checkout Code
|
||||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -104,3 +104,7 @@ api.yaml
|
|||||||
|
|
||||||
# web frontend (static files)
|
# web frontend (static files)
|
||||||
InvenTree/web/static
|
InvenTree/web/static
|
||||||
|
|
||||||
|
# Generated docs files
|
||||||
|
docs/docs/api/*.yml
|
||||||
|
docs/docs/api/schema/*.yml
|
||||||
|
@ -19,9 +19,9 @@ before:
|
|||||||
- contrib/packager.io/before.sh
|
- contrib/packager.io/before.sh
|
||||||
dependencies:
|
dependencies:
|
||||||
- curl
|
- curl
|
||||||
- python3.9
|
- "python3.9 | python3.10 | python3.11"
|
||||||
- python3.9-venv
|
- "python3.9-venv | python3.10-venv | python3.11-venv"
|
||||||
- python3.9-dev
|
- "python3.9-dev | python3.10-dev | python3.11-dev"
|
||||||
- python3-pip
|
- python3-pip
|
||||||
- python3-cffi
|
- python3-cffi
|
||||||
- python3-brotli
|
- python3-brotli
|
||||||
@ -36,4 +36,3 @@ dependencies:
|
|||||||
targets:
|
targets:
|
||||||
ubuntu-20.04: true
|
ubuntu-20.04: true
|
||||||
debian-11: true
|
debian-11: true
|
||||||
debian-12: true
|
|
||||||
|
@ -5,7 +5,9 @@ exclude: |
|
|||||||
InvenTree/InvenTree/static/.*|
|
InvenTree/InvenTree/static/.*|
|
||||||
InvenTree/locale/.*|
|
InvenTree/locale/.*|
|
||||||
src/frontend/src/locales/.*|
|
src/frontend/src/locales/.*|
|
||||||
.*/migrations/.*
|
.*/migrations/.* |
|
||||||
|
src/frontend/yarn.lock |
|
||||||
|
yarn.lock
|
||||||
)$
|
)$
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
@ -16,7 +18,7 @@ repos:
|
|||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
- id: mixed-line-ending
|
- id: mixed-line-ending
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.2.2
|
rev: v0.3.4
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
args: [--preview]
|
args: [--preview]
|
||||||
@ -25,16 +27,16 @@ repos:
|
|||||||
--fix,
|
--fix,
|
||||||
--preview
|
--preview
|
||||||
]
|
]
|
||||||
- repo: https://github.com/matmair/ruff-pre-commit
|
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||||
rev: 830893bf46db844d9c99b6c468e285199adf2de6 # uv-018
|
rev: v0.1.24
|
||||||
hooks:
|
hooks:
|
||||||
- id: pip-compile
|
- id: pip-compile
|
||||||
name: pip-compile requirements-dev.in
|
name: pip-compile requirements-dev.in
|
||||||
args: [requirements-dev.in, -o, requirements-dev.txt, --python-version=3.9]
|
args: [requirements-dev.in, -o, requirements-dev.txt, --python-version=3.9, --no-strip-extras]
|
||||||
files: ^requirements-dev\.(in|txt)$
|
files: ^requirements-dev\.(in|txt)$
|
||||||
- id: pip-compile
|
- id: pip-compile
|
||||||
name: pip-compile requirements.txt
|
name: pip-compile requirements.txt
|
||||||
args: [requirements.in, -o, requirements.txt,--python-version=3.9]
|
args: [requirements.in, -o, requirements.txt,--python-version=3.9, --no-strip-extras]
|
||||||
files: ^requirements\.(in|txt)$
|
files: ^requirements\.(in|txt)$
|
||||||
- repo: https://github.com/Riverside-Healthcare/djLint
|
- repo: https://github.com/Riverside-Healthcare/djLint
|
||||||
rev: v1.34.1
|
rev: v1.34.1
|
||||||
@ -60,7 +62,7 @@ repos:
|
|||||||
- "prettier@^2.4.1"
|
- "prettier@^2.4.1"
|
||||||
- "@trivago/prettier-plugin-sort-imports"
|
- "@trivago/prettier-plugin-sort-imports"
|
||||||
- repo: https://github.com/pre-commit/mirrors-eslint
|
- repo: https://github.com/pre-commit/mirrors-eslint
|
||||||
rev: "v9.0.0-beta.0"
|
rev: "v9.0.0-rc.0"
|
||||||
hooks:
|
hooks:
|
||||||
- id: eslint
|
- id: eslint
|
||||||
additional_dependencies:
|
additional_dependencies:
|
||||||
@ -71,3 +73,11 @@ repos:
|
|||||||
- "@typescript-eslint/eslint-plugin@latest"
|
- "@typescript-eslint/eslint-plugin@latest"
|
||||||
- "@typescript-eslint/parser"
|
- "@typescript-eslint/parser"
|
||||||
files: ^src/frontend/.*\.(js|jsx|ts|tsx)$
|
files: ^src/frontend/.*\.(js|jsx|ts|tsx)$
|
||||||
|
- repo: https://github.com/gitleaks/gitleaks
|
||||||
|
rev: v8.18.2
|
||||||
|
hooks:
|
||||||
|
- id: gitleaks
|
||||||
|
#- repo: https://github.com/jumanjihouse/pre-commit-hooks
|
||||||
|
# rev: 3.0.0
|
||||||
|
# hooks:
|
||||||
|
# - id: shellcheck
|
||||||
|
12
Dockerfile
12
Dockerfile
@ -23,7 +23,6 @@ ENV PYTHONUNBUFFERED 1
|
|||||||
ENV PIP_DISABLE_PIP_VERSION_CHECK 1
|
ENV PIP_DISABLE_PIP_VERSION_CHECK 1
|
||||||
ENV INVOKE_RUN_SHELL="/bin/ash"
|
ENV INVOKE_RUN_SHELL="/bin/ash"
|
||||||
|
|
||||||
ENV INVENTREE_LOG_LEVEL="WARNING"
|
|
||||||
ENV INVENTREE_DOCKER="true"
|
ENV INVENTREE_DOCKER="true"
|
||||||
|
|
||||||
# InvenTree paths
|
# InvenTree paths
|
||||||
@ -48,8 +47,6 @@ ENV INVENTREE_BACKGROUND_WORKERS="4"
|
|||||||
ENV INVENTREE_WEB_ADDR=0.0.0.0
|
ENV INVENTREE_WEB_ADDR=0.0.0.0
|
||||||
ENV INVENTREE_WEB_PORT=8000
|
ENV INVENTREE_WEB_PORT=8000
|
||||||
|
|
||||||
ENV VIRTUAL_ENV=/usr/local
|
|
||||||
|
|
||||||
LABEL org.label-schema.schema-version="1.0" \
|
LABEL org.label-schema.schema-version="1.0" \
|
||||||
org.label-schema.build-date=${DATE} \
|
org.label-schema.build-date=${DATE} \
|
||||||
org.label-schema.vendor="inventree" \
|
org.label-schema.vendor="inventree" \
|
||||||
@ -65,8 +62,11 @@ RUN apk add --no-cache \
|
|||||||
libjpeg libwebp zlib \
|
libjpeg libwebp zlib \
|
||||||
# Weasyprint requirements : https://doc.courtbouillon.org/weasyprint/stable/first_steps.html#alpine-3-12
|
# Weasyprint requirements : https://doc.courtbouillon.org/weasyprint/stable/first_steps.html#alpine-3-12
|
||||||
py3-pip py3-pillow py3-cffi py3-brotli pango poppler-utils openldap \
|
py3-pip py3-pillow py3-cffi py3-brotli pango poppler-utils openldap \
|
||||||
# Core database packages
|
# Postgres client
|
||||||
postgresql13-client && \
|
postgresql13-client \
|
||||||
|
# MySQL / MariaDB client
|
||||||
|
mariadb-client mariadb-connector-c \
|
||||||
|
&& \
|
||||||
# fonts
|
# fonts
|
||||||
apk --update --upgrade --no-cache add fontconfig ttf-freefont font-noto terminus-font && fc-cache -f
|
apk --update --upgrade --no-cache add fontconfig ttf-freefont font-noto terminus-font && fc-cache -f
|
||||||
|
|
||||||
@ -96,7 +96,7 @@ FROM inventree_base as prebuild
|
|||||||
|
|
||||||
ENV PATH=/root/.local/bin:$PATH
|
ENV PATH=/root/.local/bin:$PATH
|
||||||
RUN ./install_build_packages.sh --no-cache --virtual .build-deps && \
|
RUN ./install_build_packages.sh --no-cache --virtual .build-deps && \
|
||||||
pip install --user uv --no-cache-dir && pip install -r base_requirements.txt -r requirements.txt --no-cache && \
|
pip install --user -r base_requirements.txt -r requirements.txt --no-cache && \
|
||||||
apk --purge del .build-deps
|
apk --purge del .build-deps
|
||||||
|
|
||||||
# Frontend builder image:
|
# Frontend builder image:
|
||||||
|
@ -91,7 +91,7 @@ class VersionView(APIView):
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
class VersionSerializer(serializers.Serializer):
|
class VersionInformationSerializer(serializers.Serializer):
|
||||||
"""Serializer for a single version."""
|
"""Serializer for a single version."""
|
||||||
|
|
||||||
version = serializers.CharField()
|
version = serializers.CharField()
|
||||||
@ -101,21 +101,21 @@ class VersionSerializer(serializers.Serializer):
|
|||||||
latest = serializers.BooleanField()
|
latest = serializers.BooleanField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
"""Meta class for VersionSerializer."""
|
"""Meta class for VersionInformationSerializer."""
|
||||||
|
|
||||||
fields = ['version', 'date', 'gh', 'text', 'latest']
|
fields = '__all__'
|
||||||
|
|
||||||
|
|
||||||
class VersionApiSerializer(serializers.Serializer):
|
class VersionApiSerializer(serializers.Serializer):
|
||||||
"""Serializer for the version api endpoint."""
|
"""Serializer for the version api endpoint."""
|
||||||
|
|
||||||
VersionSerializer(many=True)
|
VersionInformationSerializer(many=True)
|
||||||
|
|
||||||
|
|
||||||
class VersionTextView(ListAPI):
|
class VersionTextView(ListAPI):
|
||||||
"""Simple JSON endpoint for InvenTree version text."""
|
"""Simple JSON endpoint for InvenTree version text."""
|
||||||
|
|
||||||
serializer_class = VersionSerializer
|
serializer_class = VersionInformationSerializer
|
||||||
|
|
||||||
permission_classes = [permissions.IsAdminUser]
|
permission_classes = [permissions.IsAdminUser]
|
||||||
|
|
||||||
|
@ -1,11 +1,41 @@
|
|||||||
"""InvenTree API version information."""
|
"""InvenTree API version information."""
|
||||||
|
|
||||||
# InvenTree API version
|
# InvenTree API version
|
||||||
INVENTREE_API_VERSION = 178
|
INVENTREE_API_VERSION = 185
|
||||||
"""Increment this API version number whenever there is a significant change to the API that any clients need to know about."""
|
"""Increment this API version number whenever there is a significant change to the API that any clients need to know about."""
|
||||||
|
|
||||||
INVENTREE_API_TEXT = """
|
INVENTREE_API_TEXT = """
|
||||||
|
|
||||||
|
v185 - 2024-03-24 : https://github.com/inventree/InvenTree/pull/6836
|
||||||
|
- Remove /plugin/activate endpoint
|
||||||
|
- Update docstrings and typing for various API endpoints (no functional changes)
|
||||||
|
|
||||||
|
v184 - 2024-03-17 : https://github.com/inventree/InvenTree/pull/10464
|
||||||
|
- Add additional fields for tests (start/end datetime, test station)
|
||||||
|
|
||||||
|
v183 - 2024-03-14 : https://github.com/inventree/InvenTree/pull/5972
|
||||||
|
- Adds "category_default_location" annotated field to part serializer
|
||||||
|
- Adds "part_detail.category_default_location" annotated field to stock item serializer
|
||||||
|
- Adds "part_detail.category_default_location" annotated field to purchase order line serializer
|
||||||
|
- Adds "parent_default_location" annotated field to category serializer
|
||||||
|
|
||||||
|
v182 - 2024-03-13 : https://github.com/inventree/InvenTree/pull/6714
|
||||||
|
- Expose ReportSnippet model to the /report/snippet/ API endpoint
|
||||||
|
- Expose ReportAsset model to the /report/asset/ API endpoint
|
||||||
|
|
||||||
|
v181 - 2024-02-21 : https://github.com/inventree/InvenTree/pull/6541
|
||||||
|
- Adds "width" and "height" fields to the LabelTemplate API endpoint
|
||||||
|
- Adds "page_size" and "landscape" fields to the ReportTemplate API endpoint
|
||||||
|
|
||||||
|
v180 - 2024-3-02 : https://github.com/inventree/InvenTree/pull/6463
|
||||||
|
- Tweaks to API documentation to allow automatic documentation generation
|
||||||
|
|
||||||
|
v179 - 2024-03-01 : https://github.com/inventree/InvenTree/pull/6605
|
||||||
|
- Adds "subcategories" count to PartCategory serializer
|
||||||
|
- Adds "sublocations" count to StockLocation serializer
|
||||||
|
- Adds "image" field to PartBrief serializer
|
||||||
|
- Adds "image" field to CompanyBrief serializer
|
||||||
|
|
||||||
v178 - 2024-02-29 : https://github.com/inventree/InvenTree/pull/6604
|
v178 - 2024-02-29 : https://github.com/inventree/InvenTree/pull/6604
|
||||||
- Adds "external_stock" field to the Part API endpoint
|
- Adds "external_stock" field to the Part API endpoint
|
||||||
- Adds "external_stock" field to the BomItem API endpoint
|
- Adds "external_stock" field to the BomItem API endpoint
|
||||||
|
@ -39,9 +39,9 @@ def reload_unit_registry():
|
|||||||
reg = pint.UnitRegistry(autoconvert_offset_to_baseunit=True)
|
reg = pint.UnitRegistry(autoconvert_offset_to_baseunit=True)
|
||||||
|
|
||||||
# Aliases for temperature units
|
# Aliases for temperature units
|
||||||
reg.define('@alias degC = celsius = Celsius')
|
reg.define('@alias degC = Celsius')
|
||||||
reg.define('@alias degF = fahrenheit = Fahrenheit')
|
reg.define('@alias degF = Fahrenheit')
|
||||||
reg.define('@alias degK = kelvin = Kelvin')
|
reg.define('@alias degK = Kelvin')
|
||||||
|
|
||||||
# Define some "standard" additional units
|
# Define some "standard" additional units
|
||||||
reg.define('piece = 1')
|
reg.define('piece = 1')
|
||||||
@ -165,6 +165,13 @@ def convert_physical_value(value: str, unit: str = None, strip_units=True):
|
|||||||
value = str(value).strip() if value else ''
|
value = str(value).strip() if value else ''
|
||||||
unit = str(unit).strip() if unit else ''
|
unit = str(unit).strip() if unit else ''
|
||||||
|
|
||||||
|
# Handle imperial length measurements
|
||||||
|
if value.count("'") == 1 and value.endswith("'"):
|
||||||
|
value = value.replace("'", ' feet')
|
||||||
|
|
||||||
|
if value.count('"') == 1 and value.endswith('"'):
|
||||||
|
value = value.replace('"', ' inches')
|
||||||
|
|
||||||
# Error on blank values
|
# Error on blank values
|
||||||
if not value:
|
if not value:
|
||||||
raise ValidationError(_('No value provided'))
|
raise ValidationError(_('No value provided'))
|
||||||
|
8
InvenTree/InvenTree/files.py
Normal file
8
InvenTree/InvenTree/files.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
"""Helpers for file handling in InvenTree."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
TEMPLATES_DIR = Path(__file__).parent.parent
|
||||||
|
MEDIA_STORAGE_DIR = settings.MEDIA_ROOT
|
@ -1,5 +1,6 @@
|
|||||||
"""Provides helper functions used throughout the InvenTree project."""
|
"""Provides helper functions used throughout the InvenTree project."""
|
||||||
|
|
||||||
|
import datetime
|
||||||
import hashlib
|
import hashlib
|
||||||
import io
|
import io
|
||||||
import json
|
import json
|
||||||
@ -8,16 +9,19 @@ import os
|
|||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
from decimal import Decimal, InvalidOperation
|
from decimal import Decimal, InvalidOperation
|
||||||
from typing import TypeVar
|
from pathlib import Path
|
||||||
|
from typing import TypeVar, Union
|
||||||
from wsgiref.util import FileWrapper
|
from wsgiref.util import FileWrapper
|
||||||
|
|
||||||
|
import django.utils.timezone as timezone
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.staticfiles.storage import StaticFilesStorage
|
from django.contrib.staticfiles.storage import StaticFilesStorage
|
||||||
from django.core.exceptions import FieldError, ValidationError
|
from django.core.exceptions import FieldError, ValidationError
|
||||||
from django.core.files.storage import default_storage
|
from django.core.files.storage import Storage, default_storage
|
||||||
from django.http import StreamingHttpResponse
|
from django.http import StreamingHttpResponse
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
import pytz
|
||||||
import regex
|
import regex
|
||||||
from bleach import clean
|
from bleach import clean
|
||||||
from djmoney.money import Money
|
from djmoney.money import Money
|
||||||
@ -87,11 +91,24 @@ def generateTestKey(test_name: str) -> str:
|
|||||||
key = test_name.strip().lower()
|
key = test_name.strip().lower()
|
||||||
key = key.replace(' ', '')
|
key = key.replace(' ', '')
|
||||||
|
|
||||||
# Remove any characters that cannot be used to represent a variable
|
def valid_char(char: str):
|
||||||
key = re.sub(r'[^a-zA-Z0-9_]', '', key)
|
"""Determine if a particular character is valid for use in a test key."""
|
||||||
|
if not char.isprintable():
|
||||||
|
return False
|
||||||
|
|
||||||
# If the key starts with a digit, prefix with an underscore
|
if char.isidentifier():
|
||||||
if key[0].isdigit():
|
return True
|
||||||
|
|
||||||
|
if char.isalnum():
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Remove any characters that cannot be used to represent a variable
|
||||||
|
key = ''.join([c for c in key if valid_char(c)])
|
||||||
|
|
||||||
|
# If the key starts with a non-identifier character, prefix with an underscore
|
||||||
|
if len(key) > 0 and not key[0].isidentifier():
|
||||||
key = '_' + key
|
key = '_' + key
|
||||||
|
|
||||||
return key
|
return key
|
||||||
@ -845,9 +862,87 @@ def hash_barcode(barcode_data):
|
|||||||
return str(hash.hexdigest())
|
return str(hash.hexdigest())
|
||||||
|
|
||||||
|
|
||||||
def hash_file(filename: str):
|
def hash_file(filename: Union[str, Path], storage: Union[Storage, None] = None):
|
||||||
"""Return the MD5 hash of a file."""
|
"""Return the MD5 hash of a file."""
|
||||||
return hashlib.md5(open(filename, 'rb').read()).hexdigest()
|
content = (
|
||||||
|
open(filename, 'rb').read()
|
||||||
|
if storage is None
|
||||||
|
else storage.open(str(filename), 'rb').read()
|
||||||
|
)
|
||||||
|
return hashlib.md5(content).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def current_time(local=True):
|
||||||
|
"""Return the current date and time as a datetime object.
|
||||||
|
|
||||||
|
- If timezone support is active, returns a timezone aware time
|
||||||
|
- If timezone support is not active, returns a timezone naive time
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
local: Return the time in the local timezone, otherwise UTC (default = True)
|
||||||
|
|
||||||
|
"""
|
||||||
|
if settings.USE_TZ:
|
||||||
|
now = timezone.now()
|
||||||
|
now = to_local_time(now, target_tz=server_timezone() if local else 'UTC')
|
||||||
|
return now
|
||||||
|
else:
|
||||||
|
return datetime.datetime.now()
|
||||||
|
|
||||||
|
|
||||||
|
def current_date(local=True):
|
||||||
|
"""Return the current date."""
|
||||||
|
return current_time(local=local).date()
|
||||||
|
|
||||||
|
|
||||||
|
def server_timezone() -> str:
|
||||||
|
"""Return the timezone of the server as a string.
|
||||||
|
|
||||||
|
e.g. "UTC" / "Australia/Sydney" etc
|
||||||
|
"""
|
||||||
|
return settings.TIME_ZONE
|
||||||
|
|
||||||
|
|
||||||
|
def to_local_time(time, target_tz: str = None):
|
||||||
|
"""Convert the provided time object to the local timezone.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
time: The time / date to convert
|
||||||
|
target_tz: The desired timezone (string) - defaults to server time
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A timezone aware datetime object, with the desired timezone
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TypeError: If the provided time object is not a datetime or date object
|
||||||
|
"""
|
||||||
|
if isinstance(time, datetime.datetime):
|
||||||
|
pass
|
||||||
|
elif isinstance(time, datetime.date):
|
||||||
|
time = timezone.datetime(year=time.year, month=time.month, day=time.day)
|
||||||
|
else:
|
||||||
|
raise TypeError(
|
||||||
|
f'Argument must be a datetime or date object (found {type(time)}'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Extract timezone information from the provided time
|
||||||
|
source_tz = getattr(time, 'tzinfo', None)
|
||||||
|
|
||||||
|
if not source_tz:
|
||||||
|
# Default to UTC if not provided
|
||||||
|
source_tz = pytz.utc
|
||||||
|
|
||||||
|
if not target_tz:
|
||||||
|
target_tz = server_timezone()
|
||||||
|
|
||||||
|
try:
|
||||||
|
target_tz = pytz.timezone(str(target_tz))
|
||||||
|
except pytz.UnknownTimeZoneError:
|
||||||
|
target_tz = pytz.utc
|
||||||
|
|
||||||
|
target_time = time.replace(tzinfo=source_tz).astimezone(target_tz)
|
||||||
|
|
||||||
|
return target_time
|
||||||
|
|
||||||
|
|
||||||
def get_objectreference(
|
def get_objectreference(
|
||||||
|
@ -2,12 +2,14 @@
|
|||||||
|
|
||||||
If a new language translation is supported, it must be added here
|
If a new language translation is supported, it must be added here
|
||||||
After adding a new language, run the following command:
|
After adding a new language, run the following command:
|
||||||
|
|
||||||
python manage.py makemessages -l <language_code> -e html,js,py --no-wrap
|
python manage.py makemessages -l <language_code> -e html,js,py --no-wrap
|
||||||
where <language_code> is the code for the new language
|
- where <language_code> is the code for the new language
|
||||||
|
|
||||||
Additionally, update the following files with the new locale code:
|
Additionally, update the following files with the new locale code:
|
||||||
|
|
||||||
- /src/frontend/.linguirc file
|
- /src/frontend/.linguirc file
|
||||||
- /src/frontend/src/context/LanguageContext.tsx
|
- /src/frontend/src/contexts/LanguageContext.tsx
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
@ -30,6 +32,7 @@ LOCALES = [
|
|||||||
('it', _('Italian')),
|
('it', _('Italian')),
|
||||||
('ja', _('Japanese')),
|
('ja', _('Japanese')),
|
||||||
('ko', _('Korean')),
|
('ko', _('Korean')),
|
||||||
|
('lv', _('Latvian')),
|
||||||
('nl', _('Dutch')),
|
('nl', _('Dutch')),
|
||||||
('no', _('Norwegian')),
|
('no', _('Norwegian')),
|
||||||
('pl', _('Polish')),
|
('pl', _('Polish')),
|
||||||
|
@ -74,6 +74,7 @@ class AuthRequiredMiddleware(object):
|
|||||||
|
|
||||||
# Is the function exempt from auth requirements?
|
# Is the function exempt from auth requirements?
|
||||||
path_func = resolve(request.path).func
|
path_func = resolve(request.path).func
|
||||||
|
|
||||||
if getattr(path_func, 'auth_exempt', False) is True:
|
if getattr(path_func, 'auth_exempt', False) is True:
|
||||||
return self.get_response(request)
|
return self.get_response(request)
|
||||||
|
|
||||||
@ -119,7 +120,13 @@ class AuthRequiredMiddleware(object):
|
|||||||
]
|
]
|
||||||
|
|
||||||
# Do not redirect requests to any of these paths
|
# Do not redirect requests to any of these paths
|
||||||
paths_ignore = ['/api/', '/js/', '/media/', '/static/']
|
paths_ignore = [
|
||||||
|
'/api/',
|
||||||
|
'/auth/',
|
||||||
|
'/js/',
|
||||||
|
settings.MEDIA_URL,
|
||||||
|
settings.STATIC_URL,
|
||||||
|
]
|
||||||
|
|
||||||
if path not in urls and not any(
|
if path not in urls and not any(
|
||||||
path.startswith(p) for p in paths_ignore
|
path.startswith(p) for p in paths_ignore
|
||||||
|
@ -352,7 +352,12 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
|
|||||||
try:
|
try:
|
||||||
instance.full_clean()
|
instance.full_clean()
|
||||||
except (ValidationError, DjangoValidationError) as exc:
|
except (ValidationError, DjangoValidationError) as exc:
|
||||||
|
if hasattr(exc, 'message_dict'):
|
||||||
data = exc.message_dict
|
data = exc.message_dict
|
||||||
|
elif hasattr(exc, 'message'):
|
||||||
|
data = {'non_field_errors': [str(exc.message)]}
|
||||||
|
else:
|
||||||
|
data = {'non_field_errors': [str(exc)]}
|
||||||
|
|
||||||
# Change '__all__' key (django style) to 'non_field_errors' (DRF style)
|
# Change '__all__' key (django style) to 'non_field_errors' (DRF style)
|
||||||
if '__all__' in data:
|
if '__all__' in data:
|
||||||
|
@ -22,9 +22,11 @@ from django.http import Http404
|
|||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
import moneyed
|
import moneyed
|
||||||
|
import pytz
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
from InvenTree.config import get_boolean_setting, get_custom_file, get_setting
|
from InvenTree.config import get_boolean_setting, get_custom_file, get_setting
|
||||||
|
from InvenTree.ready import isInMainThread
|
||||||
from InvenTree.sentry import default_sentry_dsn, init_sentry
|
from InvenTree.sentry import default_sentry_dsn, init_sentry
|
||||||
from InvenTree.version import checkMinPythonVersion, inventreeApiVersion
|
from InvenTree.version import checkMinPythonVersion, inventreeApiVersion
|
||||||
|
|
||||||
@ -130,6 +132,9 @@ DATA_UPLOAD_MAX_NUMBER_FIELDS = 10000
|
|||||||
# Web URL endpoint for served static files
|
# Web URL endpoint for served static files
|
||||||
STATIC_URL = '/static/'
|
STATIC_URL = '/static/'
|
||||||
|
|
||||||
|
# Web URL endpoint for served media files
|
||||||
|
MEDIA_URL = '/media/'
|
||||||
|
|
||||||
STATICFILES_DIRS = []
|
STATICFILES_DIRS = []
|
||||||
|
|
||||||
# Translated Template settings
|
# Translated Template settings
|
||||||
@ -155,9 +160,6 @@ STATFILES_I18_PROCESSORS = ['InvenTree.context.status_codes']
|
|||||||
# Color Themes Directory
|
# Color Themes Directory
|
||||||
STATIC_COLOR_THEMES_DIR = STATIC_ROOT.joinpath('css', 'color-themes').resolve()
|
STATIC_COLOR_THEMES_DIR = STATIC_ROOT.joinpath('css', 'color-themes').resolve()
|
||||||
|
|
||||||
# Web URL endpoint for served media files
|
|
||||||
MEDIA_URL = '/media/'
|
|
||||||
|
|
||||||
# Database backup options
|
# Database backup options
|
||||||
# Ref: https://django-dbbackup.readthedocs.io/en/master/configuration.html
|
# Ref: https://django-dbbackup.readthedocs.io/en/master/configuration.html
|
||||||
DBBACKUP_SEND_EMAIL = False
|
DBBACKUP_SEND_EMAIL = False
|
||||||
@ -205,6 +207,7 @@ INSTALLED_APPS = [
|
|||||||
'django.contrib.auth',
|
'django.contrib.auth',
|
||||||
'django.contrib.contenttypes',
|
'django.contrib.contenttypes',
|
||||||
'user_sessions', # db user sessions
|
'user_sessions', # db user sessions
|
||||||
|
'whitenoise.runserver_nostatic',
|
||||||
'django.contrib.messages',
|
'django.contrib.messages',
|
||||||
'django.contrib.staticfiles',
|
'django.contrib.staticfiles',
|
||||||
'django.contrib.sites',
|
'django.contrib.sites',
|
||||||
@ -249,6 +252,7 @@ MIDDLEWARE = CONFIG.get(
|
|||||||
'django.middleware.locale.LocaleMiddleware',
|
'django.middleware.locale.LocaleMiddleware',
|
||||||
'django.middleware.csrf.CsrfViewMiddleware',
|
'django.middleware.csrf.CsrfViewMiddleware',
|
||||||
'corsheaders.middleware.CorsMiddleware',
|
'corsheaders.middleware.CorsMiddleware',
|
||||||
|
'whitenoise.middleware.WhiteNoiseMiddleware',
|
||||||
'django.middleware.common.CommonMiddleware',
|
'django.middleware.common.CommonMiddleware',
|
||||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||||
'InvenTree.middleware.InvenTreeRemoteUserMiddleware', # Remote / proxy auth
|
'InvenTree.middleware.InvenTreeRemoteUserMiddleware', # Remote / proxy auth
|
||||||
@ -294,7 +298,10 @@ if LDAP_AUTH:
|
|||||||
|
|
||||||
# get global options from dict and use ldap.OPT_* as keys and values
|
# get global options from dict and use ldap.OPT_* as keys and values
|
||||||
global_options_dict = get_setting(
|
global_options_dict = get_setting(
|
||||||
'INVENTREE_LDAP_GLOBAL_OPTIONS', 'ldap.global_options', {}, dict
|
'INVENTREE_LDAP_GLOBAL_OPTIONS',
|
||||||
|
'ldap.global_options',
|
||||||
|
default_value=None,
|
||||||
|
typecast=dict,
|
||||||
)
|
)
|
||||||
global_options = {}
|
global_options = {}
|
||||||
for k, v in global_options_dict.items():
|
for k, v in global_options_dict.items():
|
||||||
@ -364,24 +371,16 @@ if LDAP_AUTH:
|
|||||||
)
|
)
|
||||||
AUTH_LDAP_DENY_GROUP = get_setting('INVENTREE_LDAP_DENY_GROUP', 'ldap.deny_group')
|
AUTH_LDAP_DENY_GROUP = get_setting('INVENTREE_LDAP_DENY_GROUP', 'ldap.deny_group')
|
||||||
AUTH_LDAP_USER_FLAGS_BY_GROUP = get_setting(
|
AUTH_LDAP_USER_FLAGS_BY_GROUP = get_setting(
|
||||||
'INVENTREE_LDAP_USER_FLAGS_BY_GROUP', 'ldap.user_flags_by_group', {}, dict
|
'INVENTREE_LDAP_USER_FLAGS_BY_GROUP',
|
||||||
|
'ldap.user_flags_by_group',
|
||||||
|
default_value=None,
|
||||||
|
typecast=dict,
|
||||||
)
|
)
|
||||||
AUTH_LDAP_FIND_GROUP_PERMS = True
|
AUTH_LDAP_FIND_GROUP_PERMS = True
|
||||||
|
|
||||||
# Internal IP addresses allowed to see the debug toolbar
|
|
||||||
INTERNAL_IPS = ['127.0.0.1']
|
|
||||||
|
|
||||||
# Internal flag to determine if we are running in docker mode
|
# Internal flag to determine if we are running in docker mode
|
||||||
DOCKER = get_boolean_setting('INVENTREE_DOCKER', default_value=False)
|
DOCKER = get_boolean_setting('INVENTREE_DOCKER', default_value=False)
|
||||||
|
|
||||||
if DOCKER: # pragma: no cover
|
|
||||||
# Internal IP addresses are different when running under docker
|
|
||||||
hostname, ___, ips = socket.gethostbyname_ex(socket.gethostname())
|
|
||||||
INTERNAL_IPS = [ip[: ip.rfind('.')] + '.1' for ip in ips] + [
|
|
||||||
'127.0.0.1',
|
|
||||||
'10.0.2.2',
|
|
||||||
]
|
|
||||||
|
|
||||||
# Allow secure http developer server in debug mode
|
# Allow secure http developer server in debug mode
|
||||||
if DEBUG:
|
if DEBUG:
|
||||||
INSTALLED_APPS.append('sslserver')
|
INSTALLED_APPS.append('sslserver')
|
||||||
@ -469,21 +468,6 @@ if USE_JWT:
|
|||||||
INSTALLED_APPS.append('rest_framework_simplejwt')
|
INSTALLED_APPS.append('rest_framework_simplejwt')
|
||||||
|
|
||||||
# WSGI default setting
|
# WSGI default setting
|
||||||
SPECTACULAR_SETTINGS = {
|
|
||||||
'TITLE': 'InvenTree API',
|
|
||||||
'DESCRIPTION': 'API for InvenTree - the intuitive open source inventory management system',
|
|
||||||
'LICENSE': {
|
|
||||||
'name': 'MIT',
|
|
||||||
'url': 'https://github.com/inventree/InvenTree/blob/master/LICENSE',
|
|
||||||
},
|
|
||||||
'EXTERNAL_DOCS': {
|
|
||||||
'description': 'More information about InvenTree in the official docs',
|
|
||||||
'url': 'https://docs.inventree.org',
|
|
||||||
},
|
|
||||||
'VERSION': str(inventreeApiVersion()),
|
|
||||||
'SERVE_INCLUDE_SCHEMA': False,
|
|
||||||
}
|
|
||||||
|
|
||||||
WSGI_APPLICATION = 'InvenTree.wsgi.application'
|
WSGI_APPLICATION = 'InvenTree.wsgi.application'
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@ -497,7 +481,7 @@ Configure the database backend based on the user-specified values.
|
|||||||
logger.debug('Configuring database backend:')
|
logger.debug('Configuring database backend:')
|
||||||
|
|
||||||
# Extract database configuration from the config.yaml file
|
# Extract database configuration from the config.yaml file
|
||||||
db_config = CONFIG.get('database', {})
|
db_config = CONFIG.get('database', None)
|
||||||
|
|
||||||
if not db_config:
|
if not db_config:
|
||||||
db_config = {}
|
db_config = {}
|
||||||
@ -573,7 +557,10 @@ Ref: https://docs.djangoproject.com/en/3.2/ref/settings/#std:setting-OPTIONS
|
|||||||
# connecting to the database server (such as a replica failover) don't sit and
|
# connecting to the database server (such as a replica failover) don't sit and
|
||||||
# wait for possibly an hour or more, just tell the client something went wrong
|
# wait for possibly an hour or more, just tell the client something went wrong
|
||||||
# and let the client retry when they want to.
|
# and let the client retry when they want to.
|
||||||
db_options = db_config.get('OPTIONS', db_config.get('options', {}))
|
db_options = db_config.get('OPTIONS', db_config.get('options', None))
|
||||||
|
|
||||||
|
if db_options is None:
|
||||||
|
db_options = {}
|
||||||
|
|
||||||
# Specific options for postgres backend
|
# Specific options for postgres backend
|
||||||
if 'postgres' in db_engine: # pragma: no cover
|
if 'postgres' in db_engine: # pragma: no cover
|
||||||
@ -736,7 +723,10 @@ if TRACING_ENABLED: # pragma: no cover
|
|||||||
logger.info('OpenTelemetry tracing enabled')
|
logger.info('OpenTelemetry tracing enabled')
|
||||||
|
|
||||||
_t_resources = get_setting(
|
_t_resources = get_setting(
|
||||||
'INVENTREE_TRACING_RESOURCES', 'tracing.resources', {}, dict
|
'INVENTREE_TRACING_RESOURCES',
|
||||||
|
'tracing.resources',
|
||||||
|
default_value=None,
|
||||||
|
typecast=dict,
|
||||||
)
|
)
|
||||||
cstm_tags = {'inventree.env.' + k: v for k, v in inventree_tags.items()}
|
cstm_tags = {'inventree.env.' + k: v for k, v in inventree_tags.items()}
|
||||||
tracing_resources = {**cstm_tags, **_t_resources}
|
tracing_resources = {**cstm_tags, **_t_resources}
|
||||||
@ -748,7 +738,12 @@ if TRACING_ENABLED: # pragma: no cover
|
|||||||
console=get_boolean_setting(
|
console=get_boolean_setting(
|
||||||
'INVENTREE_TRACING_CONSOLE', 'tracing.console', False
|
'INVENTREE_TRACING_CONSOLE', 'tracing.console', False
|
||||||
),
|
),
|
||||||
auth=get_setting('INVENTREE_TRACING_AUTH', 'tracing.auth', {}),
|
auth=get_setting(
|
||||||
|
'INVENTREE_TRACING_AUTH',
|
||||||
|
'tracing.auth',
|
||||||
|
default_value=None,
|
||||||
|
typecast=dict,
|
||||||
|
),
|
||||||
is_http=get_setting('INVENTREE_TRACING_IS_HTTP', 'tracing.is_http', True),
|
is_http=get_setting('INVENTREE_TRACING_IS_HTTP', 'tracing.is_http', True),
|
||||||
append_http=get_boolean_setting(
|
append_http=get_boolean_setting(
|
||||||
'INVENTREE_TRACING_APPEND_HTTP', 'tracing.append_http', True
|
'INVENTREE_TRACING_APPEND_HTTP', 'tracing.append_http', True
|
||||||
@ -945,13 +940,20 @@ LOCALE_PATHS = (BASE_DIR.joinpath('locale/'),)
|
|||||||
|
|
||||||
TIME_ZONE = get_setting('INVENTREE_TIMEZONE', 'timezone', 'UTC')
|
TIME_ZONE = get_setting('INVENTREE_TIMEZONE', 'timezone', 'UTC')
|
||||||
|
|
||||||
USE_I18N = True
|
# Check that the timezone is valid
|
||||||
|
try:
|
||||||
|
pytz.timezone(TIME_ZONE)
|
||||||
|
except pytz.exceptions.UnknownTimeZoneError: # pragma: no cover
|
||||||
|
raise ValueError(f"Specified timezone '{TIME_ZONE}' is not valid")
|
||||||
|
|
||||||
|
USE_I18N = True
|
||||||
|
|
||||||
# Do not use native timezone support in "test" mode
|
# Do not use native timezone support in "test" mode
|
||||||
# It generates a *lot* of cruft in the logs
|
# It generates a *lot* of cruft in the logs
|
||||||
if not TESTING:
|
if not TESTING:
|
||||||
USE_TZ = True # pragma: no cover
|
USE_TZ = True # pragma: no cover
|
||||||
|
else:
|
||||||
|
USE_TZ = False
|
||||||
|
|
||||||
DATE_INPUT_FORMATS = ['%Y-%m-%d']
|
DATE_INPUT_FORMATS = ['%Y-%m-%d']
|
||||||
|
|
||||||
@ -990,13 +992,33 @@ if not SITE_MULTI:
|
|||||||
ALLOWED_HOSTS = get_setting(
|
ALLOWED_HOSTS = get_setting(
|
||||||
'INVENTREE_ALLOWED_HOSTS',
|
'INVENTREE_ALLOWED_HOSTS',
|
||||||
config_key='allowed_hosts',
|
config_key='allowed_hosts',
|
||||||
default_value=['*'],
|
default_value=[],
|
||||||
typecast=list,
|
typecast=list,
|
||||||
)
|
)
|
||||||
|
|
||||||
if SITE_URL and SITE_URL not in ALLOWED_HOSTS:
|
if SITE_URL and SITE_URL not in ALLOWED_HOSTS:
|
||||||
ALLOWED_HOSTS.append(SITE_URL)
|
ALLOWED_HOSTS.append(SITE_URL)
|
||||||
|
|
||||||
|
if not ALLOWED_HOSTS:
|
||||||
|
if DEBUG:
|
||||||
|
logger.info(
|
||||||
|
'No ALLOWED_HOSTS specified. Defaulting to ["*"] for debug mode. This is not recommended for production use'
|
||||||
|
)
|
||||||
|
ALLOWED_HOSTS = ['*']
|
||||||
|
elif not TESTING:
|
||||||
|
logger.error(
|
||||||
|
'No ALLOWED_HOSTS specified. Please provide a list of allowed hosts, or specify INVENTREE_SITE_URL'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Server cannot run without ALLOWED_HOSTS
|
||||||
|
if isInMainThread():
|
||||||
|
sys.exit(-1)
|
||||||
|
|
||||||
|
# Ensure that the ALLOWED_HOSTS do not contain any scheme info
|
||||||
|
for i, host in enumerate(ALLOWED_HOSTS):
|
||||||
|
if '://' in host:
|
||||||
|
ALLOWED_HOSTS[i] = host.split('://')[1]
|
||||||
|
|
||||||
# List of trusted origins for unsafe requests
|
# List of trusted origins for unsafe requests
|
||||||
# Ref: https://docs.djangoproject.com/en/4.2/ref/settings/#csrf-trusted-origins
|
# Ref: https://docs.djangoproject.com/en/4.2/ref/settings/#csrf-trusted-origins
|
||||||
CSRF_TRUSTED_ORIGINS = get_setting(
|
CSRF_TRUSTED_ORIGINS = get_setting(
|
||||||
@ -1010,6 +1032,20 @@ CSRF_TRUSTED_ORIGINS = get_setting(
|
|||||||
if SITE_URL and SITE_URL not in CSRF_TRUSTED_ORIGINS:
|
if SITE_URL and SITE_URL not in CSRF_TRUSTED_ORIGINS:
|
||||||
CSRF_TRUSTED_ORIGINS.append(SITE_URL)
|
CSRF_TRUSTED_ORIGINS.append(SITE_URL)
|
||||||
|
|
||||||
|
if not TESTING and len(CSRF_TRUSTED_ORIGINS) == 0:
|
||||||
|
if DEBUG:
|
||||||
|
logger.warning(
|
||||||
|
'No CSRF_TRUSTED_ORIGINS specified. Defaulting to http://* for debug mode. This is not recommended for production use'
|
||||||
|
)
|
||||||
|
CSRF_TRUSTED_ORIGINS = ['http://*']
|
||||||
|
|
||||||
|
elif isInMainThread():
|
||||||
|
# Server thread cannot run without CSRF_TRUSTED_ORIGINS
|
||||||
|
logger.error(
|
||||||
|
'No CSRF_TRUSTED_ORIGINS specified. Please provide a list of trusted origins, or specify INVENTREE_SITE_URL'
|
||||||
|
)
|
||||||
|
sys.exit(-1)
|
||||||
|
|
||||||
USE_X_FORWARDED_HOST = get_boolean_setting(
|
USE_X_FORWARDED_HOST = get_boolean_setting(
|
||||||
'INVENTREE_USE_X_FORWARDED_HOST',
|
'INVENTREE_USE_X_FORWARDED_HOST',
|
||||||
config_key='use_x_forwarded_host',
|
config_key='use_x_forwarded_host',
|
||||||
@ -1037,8 +1073,8 @@ CORS_ALLOW_CREDENTIALS = get_boolean_setting(
|
|||||||
default_value=True,
|
default_value=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Only allow CORS access to API and media endpoints
|
# Only allow CORS access to the following URL endpoints
|
||||||
CORS_URLS_REGEX = r'^/(api|media|static)/.*$'
|
CORS_URLS_REGEX = r'^/(api|auth|media|static)/.*$'
|
||||||
|
|
||||||
CORS_ALLOWED_ORIGINS = get_setting(
|
CORS_ALLOWED_ORIGINS = get_setting(
|
||||||
'INVENTREE_CORS_ORIGIN_WHITELIST',
|
'INVENTREE_CORS_ORIGIN_WHITELIST',
|
||||||
@ -1051,6 +1087,27 @@ CORS_ALLOWED_ORIGINS = get_setting(
|
|||||||
if SITE_URL and SITE_URL not in CORS_ALLOWED_ORIGINS:
|
if SITE_URL and SITE_URL not in CORS_ALLOWED_ORIGINS:
|
||||||
CORS_ALLOWED_ORIGINS.append(SITE_URL)
|
CORS_ALLOWED_ORIGINS.append(SITE_URL)
|
||||||
|
|
||||||
|
CORS_ALLOWED_ORIGIN_REGEXES = get_setting(
|
||||||
|
'INVENTREE_CORS_ORIGIN_REGEX',
|
||||||
|
config_key='cors.regex',
|
||||||
|
default_value=[],
|
||||||
|
typecast=list,
|
||||||
|
)
|
||||||
|
|
||||||
|
# In debug mode allow CORS requests from localhost
|
||||||
|
# This allows connection from the frontend development server
|
||||||
|
if DEBUG:
|
||||||
|
CORS_ALLOWED_ORIGIN_REGEXES.append(r'^http://localhost:\d+$')
|
||||||
|
|
||||||
|
if CORS_ALLOW_ALL_ORIGINS:
|
||||||
|
logger.info('CORS: All origins allowed')
|
||||||
|
else:
|
||||||
|
if CORS_ALLOWED_ORIGINS:
|
||||||
|
logger.info('CORS: Whitelisted origins: %s', CORS_ALLOWED_ORIGINS)
|
||||||
|
|
||||||
|
if CORS_ALLOWED_ORIGIN_REGEXES:
|
||||||
|
logger.info('CORS: Whitelisted origin regexes: %s', CORS_ALLOWED_ORIGIN_REGEXES)
|
||||||
|
|
||||||
for app in SOCIAL_BACKENDS:
|
for app in SOCIAL_BACKENDS:
|
||||||
# Ensure that the app starts with 'allauth.socialaccount.providers'
|
# Ensure that the app starts with 'allauth.socialaccount.providers'
|
||||||
social_prefix = 'allauth.socialaccount.providers.'
|
social_prefix = 'allauth.socialaccount.providers.'
|
||||||
@ -1075,14 +1132,35 @@ SOCIALACCOUNT_OPENID_CONNECT_URL_PREFIX = ''
|
|||||||
ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS = get_setting(
|
ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS = get_setting(
|
||||||
'INVENTREE_LOGIN_CONFIRM_DAYS', 'login_confirm_days', 3, typecast=int
|
'INVENTREE_LOGIN_CONFIRM_DAYS', 'login_confirm_days', 3, typecast=int
|
||||||
)
|
)
|
||||||
ACCOUNT_LOGIN_ATTEMPTS_LIMIT = get_setting(
|
|
||||||
'INVENTREE_LOGIN_ATTEMPTS', 'login_attempts', 5, typecast=int
|
# allauth rate limiting: https://docs.allauth.org/en/latest/account/rate_limits.html
|
||||||
)
|
# The default login rate limit is "5/m/user,5/m/ip,5/m/key"
|
||||||
|
login_attempts = get_setting('INVENTREE_LOGIN_ATTEMPTS', 'login_attempts', 5)
|
||||||
|
|
||||||
|
try:
|
||||||
|
login_attempts = int(login_attempts)
|
||||||
|
login_attempts = f'{login_attempts}/m/ip,{login_attempts}/m/key'
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
ACCOUNT_RATE_LIMITS = {'login_failed': login_attempts}
|
||||||
|
|
||||||
|
# Default protocol for login
|
||||||
ACCOUNT_DEFAULT_HTTP_PROTOCOL = get_setting(
|
ACCOUNT_DEFAULT_HTTP_PROTOCOL = get_setting(
|
||||||
'INVENTREE_LOGIN_DEFAULT_HTTP_PROTOCOL', 'login_default_protocol', 'http'
|
'INVENTREE_LOGIN_DEFAULT_HTTP_PROTOCOL', 'login_default_protocol', None
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if ACCOUNT_DEFAULT_HTTP_PROTOCOL is None:
|
||||||
|
if SITE_URL and SITE_URL.startswith('https://'):
|
||||||
|
# auto-detect HTTPS prtoocol
|
||||||
|
ACCOUNT_DEFAULT_HTTP_PROTOCOL = 'https'
|
||||||
|
else:
|
||||||
|
# default to http
|
||||||
|
ACCOUNT_DEFAULT_HTTP_PROTOCOL = 'http'
|
||||||
|
|
||||||
ACCOUNT_LOGOUT_ON_PASSWORD_CHANGE = True
|
ACCOUNT_LOGOUT_ON_PASSWORD_CHANGE = True
|
||||||
ACCOUNT_PREVENT_ENUMERATION = True
|
ACCOUNT_PREVENT_ENUMERATION = True
|
||||||
|
ACCOUNT_EMAIL_SUBJECT_PREFIX = EMAIL_SUBJECT_PREFIX
|
||||||
# 2FA
|
# 2FA
|
||||||
REMOVE_SUCCESS_URL = 'settings'
|
REMOVE_SUCCESS_URL = 'settings'
|
||||||
|
|
||||||
@ -1165,6 +1243,9 @@ PLUGIN_RETRY = get_setting(
|
|||||||
) # How often should plugin loading be tried?
|
) # How often should plugin loading be tried?
|
||||||
PLUGIN_FILE_CHECKED = False # Was the plugin file checked?
|
PLUGIN_FILE_CHECKED = False # Was the plugin file checked?
|
||||||
|
|
||||||
|
# Flag to allow table events during testing
|
||||||
|
TESTING_TABLE_EVENTS = False
|
||||||
|
|
||||||
# User interface customization values
|
# User interface customization values
|
||||||
CUSTOM_LOGO = get_custom_file(
|
CUSTOM_LOGO = get_custom_file(
|
||||||
'INVENTREE_CUSTOM_LOGO', 'customize.logo', 'custom logo', lookup_media=True
|
'INVENTREE_CUSTOM_LOGO', 'customize.logo', 'custom logo', lookup_media=True
|
||||||
@ -1173,7 +1254,9 @@ CUSTOM_SPLASH = get_custom_file(
|
|||||||
'INVENTREE_CUSTOM_SPLASH', 'customize.splash', 'custom splash'
|
'INVENTREE_CUSTOM_SPLASH', 'customize.splash', 'custom splash'
|
||||||
)
|
)
|
||||||
|
|
||||||
CUSTOMIZE = get_setting('INVENTREE_CUSTOMIZE', 'customize', {})
|
CUSTOMIZE = get_setting(
|
||||||
|
'INVENTREE_CUSTOMIZE', 'customize', default_value=None, typecast=dict
|
||||||
|
)
|
||||||
|
|
||||||
# Load settings for the frontend interface
|
# Load settings for the frontend interface
|
||||||
FRONTEND_SETTINGS = config.get_frontend_settings(debug=DEBUG)
|
FRONTEND_SETTINGS = config.get_frontend_settings(debug=DEBUG)
|
||||||
@ -1208,3 +1291,23 @@ if CUSTOM_FLAGS:
|
|||||||
# Magic login django-sesame
|
# Magic login django-sesame
|
||||||
SESAME_MAX_AGE = 300
|
SESAME_MAX_AGE = 300
|
||||||
LOGIN_REDIRECT_URL = '/api/auth/login-redirect/'
|
LOGIN_REDIRECT_URL = '/api/auth/login-redirect/'
|
||||||
|
|
||||||
|
# Configuratino for API schema generation
|
||||||
|
SPECTACULAR_SETTINGS = {
|
||||||
|
'TITLE': 'InvenTree API',
|
||||||
|
'DESCRIPTION': 'API for InvenTree - the intuitive open source inventory management system',
|
||||||
|
'LICENSE': {
|
||||||
|
'name': 'MIT',
|
||||||
|
'url': 'https://github.com/inventree/InvenTree/blob/master/LICENSE',
|
||||||
|
},
|
||||||
|
'EXTERNAL_DOCS': {
|
||||||
|
'description': 'More information about InvenTree in the official docs',
|
||||||
|
'url': 'https://docs.inventree.org',
|
||||||
|
},
|
||||||
|
'VERSION': str(inventreeApiVersion()),
|
||||||
|
'SERVE_INCLUDE_SCHEMA': False,
|
||||||
|
'SCHEMA_PATH_PREFIX': '/api/',
|
||||||
|
}
|
||||||
|
|
||||||
|
if SITE_URL and not TESTING:
|
||||||
|
SPECTACULAR_SETTINGS['SERVERS'] = [{'url': SITE_URL}]
|
||||||
|
@ -180,6 +180,8 @@ def offload_task(
|
|||||||
Returns:
|
Returns:
|
||||||
bool: True if the task was offloaded (or ran), False otherwise
|
bool: True if the task was offloaded (or ran), False otherwise
|
||||||
"""
|
"""
|
||||||
|
from InvenTree.exceptions import log_error
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import importlib
|
import importlib
|
||||||
|
|
||||||
@ -213,6 +215,7 @@ def offload_task(
|
|||||||
return False
|
return False
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise_warning(f"WARNING: '{taskname}' not offloaded due to {str(exc)}")
|
raise_warning(f"WARNING: '{taskname}' not offloaded due to {str(exc)}")
|
||||||
|
log_error('InvenTree.offload_task')
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
if callable(taskname):
|
if callable(taskname):
|
||||||
@ -233,6 +236,7 @@ def offload_task(
|
|||||||
try:
|
try:
|
||||||
_mod = importlib.import_module(app_mod)
|
_mod = importlib.import_module(app_mod)
|
||||||
except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
|
log_error('InvenTree.offload_task')
|
||||||
raise_warning(
|
raise_warning(
|
||||||
f"WARNING: '{taskname}' not started - No module named '{app_mod}'"
|
f"WARNING: '{taskname}' not started - No module named '{app_mod}'"
|
||||||
)
|
)
|
||||||
@ -249,6 +253,7 @@ def offload_task(
|
|||||||
if not _func:
|
if not _func:
|
||||||
_func = eval(func) # pragma: no cover
|
_func = eval(func) # pragma: no cover
|
||||||
except NameError:
|
except NameError:
|
||||||
|
log_error('InvenTree.offload_task')
|
||||||
raise_warning(
|
raise_warning(
|
||||||
f"WARNING: '{taskname}' not started - No function named '{func}'"
|
f"WARNING: '{taskname}' not started - No function named '{func}'"
|
||||||
)
|
)
|
||||||
@ -258,6 +263,7 @@ def offload_task(
|
|||||||
try:
|
try:
|
||||||
_func(*args, **kwargs)
|
_func(*args, **kwargs)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
log_error('InvenTree.offload_task')
|
||||||
raise_warning(f"WARNING: '{taskname}' not started due to {str(exc)}")
|
raise_warning(f"WARNING: '{taskname}' not started due to {str(exc)}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -14,8 +14,10 @@ from django.core import mail
|
|||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.test import TestCase, override_settings, tag
|
from django.test import TestCase, override_settings, tag
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
import pint.errors
|
import pint.errors
|
||||||
|
import pytz
|
||||||
from djmoney.contrib.exchange.exceptions import MissingRate
|
from djmoney.contrib.exchange.exceptions import MissingRate
|
||||||
from djmoney.contrib.exchange.models import Rate, convert_money
|
from djmoney.contrib.exchange.models import Rate, convert_money
|
||||||
from djmoney.money import Money
|
from djmoney.money import Money
|
||||||
@ -40,6 +42,147 @@ from .tasks import offload_task
|
|||||||
from .validators import validate_overage
|
from .validators import validate_overage
|
||||||
|
|
||||||
|
|
||||||
|
class HostTest(InvenTreeTestCase):
|
||||||
|
"""Test for host configuration."""
|
||||||
|
|
||||||
|
@override_settings(ALLOWED_HOSTS=['testserver'])
|
||||||
|
def test_allowed_hosts(self):
|
||||||
|
"""Test that the ALLOWED_HOSTS functions as expected."""
|
||||||
|
self.assertIn('testserver', settings.ALLOWED_HOSTS)
|
||||||
|
|
||||||
|
response = self.client.get('/api/', headers={'host': 'testserver'})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
response = self.client.get('/api/', headers={'host': 'invalidserver'})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
|
||||||
|
@override_settings(ALLOWED_HOSTS=['invalidserver.co.uk'])
|
||||||
|
def test_allowed_hosts_2(self):
|
||||||
|
"""Another test for ALLOWED_HOSTS functionality."""
|
||||||
|
response = self.client.get('/api/', headers={'host': 'invalidserver.co.uk'})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
|
||||||
|
class CorsTest(TestCase):
|
||||||
|
"""Unit tests for CORS functionality."""
|
||||||
|
|
||||||
|
def cors_headers(self):
|
||||||
|
"""Return a list of CORS headers."""
|
||||||
|
return [
|
||||||
|
'access-control-allow-origin',
|
||||||
|
'access-control-allow-credentials',
|
||||||
|
'access-control-allow-methods',
|
||||||
|
'access-control-allow-headers',
|
||||||
|
]
|
||||||
|
|
||||||
|
def preflight(self, url, origin, method='GET'):
|
||||||
|
"""Make a CORS preflight request to the specified URL."""
|
||||||
|
headers = {'origin': origin, 'access-control-request-method': method}
|
||||||
|
|
||||||
|
return self.client.options(url, headers=headers)
|
||||||
|
|
||||||
|
def test_no_origin(self):
|
||||||
|
"""Test that CORS headers are not included for regular requests.
|
||||||
|
|
||||||
|
- We use the /api/ endpoint for this test (it does not require auth)
|
||||||
|
- By default, in debug mode *all* CORS origins are allowed
|
||||||
|
"""
|
||||||
|
# Perform an initial response without the "origin" header
|
||||||
|
response = self.client.get('/api/')
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
for header in self.cors_headers():
|
||||||
|
self.assertNotIn(header, response.headers)
|
||||||
|
|
||||||
|
# Now, perform a "preflight" request with the "origin" header
|
||||||
|
response = self.preflight('/api/', origin='http://random-external-server.com')
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
for header in self.cors_headers():
|
||||||
|
self.assertIn(header, response.headers)
|
||||||
|
|
||||||
|
self.assertEqual(response.headers['content-length'], '0')
|
||||||
|
self.assertEqual(
|
||||||
|
response.headers['access-control-allow-origin'],
|
||||||
|
'http://random-external-server.com',
|
||||||
|
)
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
CORS_ALLOW_ALL_ORIGINS=False,
|
||||||
|
CORS_ALLOWED_ORIGINS=['http://my-external-server.com'],
|
||||||
|
CORS_ALLOWED_ORIGIN_REGEXES=[],
|
||||||
|
)
|
||||||
|
def test_auth_view(self):
|
||||||
|
"""Test that CORS requests work for the /auth/ view.
|
||||||
|
|
||||||
|
Here, we are not authorized by default,
|
||||||
|
but the CORS headers should still be included.
|
||||||
|
"""
|
||||||
|
url = '/auth/'
|
||||||
|
|
||||||
|
# First, a preflight request with a "valid" origin
|
||||||
|
|
||||||
|
response = self.preflight(url, origin='http://my-external-server.com')
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
for header in self.cors_headers():
|
||||||
|
self.assertIn(header, response.headers)
|
||||||
|
|
||||||
|
# Next, a preflight request with an "invalid" origin
|
||||||
|
response = self.preflight(url, origin='http://random-external-server.com')
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
for header in self.cors_headers():
|
||||||
|
self.assertNotIn(header, response.headers)
|
||||||
|
|
||||||
|
# Next, make a GET request (without a token)
|
||||||
|
response = self.client.get(
|
||||||
|
url, headers={'origin': 'http://my-external-server.com'}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Unauthorized
|
||||||
|
self.assertEqual(response.status_code, 401)
|
||||||
|
|
||||||
|
self.assertIn('access-control-allow-origin', response.headers)
|
||||||
|
self.assertNotIn('access-control-allow-methods', response.headers)
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
CORS_ALLOW_ALL_ORIGINS=False,
|
||||||
|
CORS_ALLOWED_ORIGINS=[],
|
||||||
|
CORS_ALLOWED_ORIGIN_REGEXES=['http://.*myserver.com'],
|
||||||
|
)
|
||||||
|
def test_cors_regex(self):
|
||||||
|
"""Test that CORS regexes work as expected."""
|
||||||
|
valid_urls = [
|
||||||
|
'http://www.myserver.com',
|
||||||
|
'http://test.myserver.com',
|
||||||
|
'http://myserver.com',
|
||||||
|
'http://www.myserver.com:8080',
|
||||||
|
]
|
||||||
|
|
||||||
|
invalid_urls = [
|
||||||
|
'http://myserver.org',
|
||||||
|
'http://www.other-server.org',
|
||||||
|
'http://google.com',
|
||||||
|
'http://myserver.co.uk:8080',
|
||||||
|
]
|
||||||
|
|
||||||
|
for url in valid_urls:
|
||||||
|
response = self.preflight('/api/', origin=url)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertIn('access-control-allow-origin', response.headers)
|
||||||
|
|
||||||
|
for url in invalid_urls:
|
||||||
|
response = self.preflight('/api/', origin=url)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertNotIn('access-control-allow-origin', response.headers)
|
||||||
|
|
||||||
|
|
||||||
class ConversionTest(TestCase):
|
class ConversionTest(TestCase):
|
||||||
"""Tests for conversion of physical units."""
|
"""Tests for conversion of physical units."""
|
||||||
|
|
||||||
@ -138,6 +281,24 @@ class ConversionTest(TestCase):
|
|||||||
q = InvenTree.conversion.convert_physical_value(val, 'W', strip_units=False)
|
q = InvenTree.conversion.convert_physical_value(val, 'W', strip_units=False)
|
||||||
self.assertAlmostEqual(float(q.magnitude), expected, places=2)
|
self.assertAlmostEqual(float(q.magnitude), expected, places=2)
|
||||||
|
|
||||||
|
def test_imperial_lengths(self):
|
||||||
|
"""Test support of imperial length measurements."""
|
||||||
|
tests = [
|
||||||
|
('1 inch', 'mm', 25.4),
|
||||||
|
('1 "', 'mm', 25.4),
|
||||||
|
('2 "', 'inches', 2),
|
||||||
|
('3 feet', 'inches', 36),
|
||||||
|
("3'", 'inches', 36),
|
||||||
|
("7 '", 'feet', 7),
|
||||||
|
]
|
||||||
|
|
||||||
|
for val, unit, expected in tests:
|
||||||
|
output = InvenTree.conversion.convert_physical_value(
|
||||||
|
val, unit, strip_units=True
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertAlmostEqual(output, expected, 3)
|
||||||
|
|
||||||
def test_dimensionless_units(self):
|
def test_dimensionless_units(self):
|
||||||
"""Tests for 'dimensionless' unit quantities."""
|
"""Tests for 'dimensionless' unit quantities."""
|
||||||
# Test some dimensionless units
|
# Test some dimensionless units
|
||||||
@ -413,6 +574,7 @@ class FormatTest(TestCase):
|
|||||||
class TestHelpers(TestCase):
|
class TestHelpers(TestCase):
|
||||||
"""Tests for InvenTree helper functions."""
|
"""Tests for InvenTree helper functions."""
|
||||||
|
|
||||||
|
@override_settings(SITE_URL=None)
|
||||||
def test_absolute_url(self):
|
def test_absolute_url(self):
|
||||||
"""Test helper function for generating an absolute URL."""
|
"""Test helper function for generating an absolute URL."""
|
||||||
base = 'https://demo.inventree.org:12345'
|
base = 'https://demo.inventree.org:12345'
|
||||||
@ -587,6 +749,47 @@ class TestHelpers(TestCase):
|
|||||||
self.assertEqual(helpers.generateTestKey(name), key)
|
self.assertEqual(helpers.generateTestKey(name), key)
|
||||||
|
|
||||||
|
|
||||||
|
class TestTimeFormat(TestCase):
|
||||||
|
"""Unit test for time formatting functionality."""
|
||||||
|
|
||||||
|
@override_settings(TIME_ZONE='UTC')
|
||||||
|
def test_tz_utc(self):
|
||||||
|
"""Check UTC timezone."""
|
||||||
|
self.assertEqual(InvenTree.helpers.server_timezone(), 'UTC')
|
||||||
|
|
||||||
|
@override_settings(TIME_ZONE='Europe/London')
|
||||||
|
def test_tz_london(self):
|
||||||
|
"""Check London timezone."""
|
||||||
|
self.assertEqual(InvenTree.helpers.server_timezone(), 'Europe/London')
|
||||||
|
|
||||||
|
@override_settings(TIME_ZONE='Australia/Sydney')
|
||||||
|
def test_to_local_time(self):
|
||||||
|
"""Test that the local time conversion works as expected."""
|
||||||
|
source_time = timezone.datetime(
|
||||||
|
year=2000,
|
||||||
|
month=1,
|
||||||
|
day=1,
|
||||||
|
hour=0,
|
||||||
|
minute=0,
|
||||||
|
second=0,
|
||||||
|
tzinfo=pytz.timezone('Europe/London'),
|
||||||
|
)
|
||||||
|
|
||||||
|
tests = [
|
||||||
|
('UTC', '2000-01-01 00:01:00+00:00'),
|
||||||
|
('Europe/London', '2000-01-01 00:00:00-00:01'),
|
||||||
|
('America/New_York', '1999-12-31 19:01:00-05:00'),
|
||||||
|
# All following tests should result in the same value
|
||||||
|
('Australia/Sydney', '2000-01-01 11:01:00+11:00'),
|
||||||
|
(None, '2000-01-01 11:01:00+11:00'),
|
||||||
|
('', '2000-01-01 11:01:00+11:00'),
|
||||||
|
]
|
||||||
|
|
||||||
|
for tz, expected in tests:
|
||||||
|
local_time = InvenTree.helpers.to_local_time(source_time, tz)
|
||||||
|
self.assertEqual(str(local_time), expected)
|
||||||
|
|
||||||
|
|
||||||
class TestQuoteWrap(TestCase):
|
class TestQuoteWrap(TestCase):
|
||||||
"""Tests for string wrapping."""
|
"""Tests for string wrapping."""
|
||||||
|
|
||||||
@ -894,6 +1097,7 @@ class TestVersionNumber(TestCase):
|
|||||||
hash = str(
|
hash = str(
|
||||||
subprocess.check_output('git rev-parse --short HEAD'.split()), 'utf-8'
|
subprocess.check_output('git rev-parse --short HEAD'.split()), 'utf-8'
|
||||||
).strip()
|
).strip()
|
||||||
|
|
||||||
self.assertEqual(hash, version.inventreeCommitHash())
|
self.assertEqual(hash, version.inventreeCommitHash())
|
||||||
|
|
||||||
d = (
|
d = (
|
||||||
@ -1144,6 +1348,7 @@ class TestInstanceName(InvenTreeTestCase):
|
|||||||
site_obj = Site.objects.all().order_by('id').first()
|
site_obj = Site.objects.all().order_by('id').first()
|
||||||
self.assertEqual(site_obj.name, 'Testing title')
|
self.assertEqual(site_obj.name, 'Testing title')
|
||||||
|
|
||||||
|
@override_settings(SITE_URL=None)
|
||||||
def test_instance_url(self):
|
def test_instance_url(self):
|
||||||
"""Test instance url settings."""
|
"""Test instance url settings."""
|
||||||
# Set up required setting
|
# Set up required setting
|
||||||
|
@ -19,7 +19,7 @@ from dulwich.repo import NotGitRepository, Repo
|
|||||||
from .api_version import INVENTREE_API_TEXT, INVENTREE_API_VERSION
|
from .api_version import INVENTREE_API_TEXT, INVENTREE_API_VERSION
|
||||||
|
|
||||||
# InvenTree software version
|
# InvenTree software version
|
||||||
INVENTREE_SW_VERSION = '0.14.0 dev'
|
INVENTREE_SW_VERSION = '0.15.0 dev'
|
||||||
|
|
||||||
# Discover git
|
# Discover git
|
||||||
try:
|
try:
|
||||||
|
@ -74,7 +74,7 @@ class Build(InvenTree.models.InvenTreeBarcodeMixin, InvenTree.models.InvenTreeNo
|
|||||||
verbose_name = _("Build Order")
|
verbose_name = _("Build Order")
|
||||||
verbose_name_plural = _("Build Orders")
|
verbose_name_plural = _("Build Orders")
|
||||||
|
|
||||||
OVERDUE_FILTER = Q(status__in=BuildStatusGroups.ACTIVE_CODES) & ~Q(target_date=None) & Q(target_date__lte=datetime.now().date())
|
OVERDUE_FILTER = Q(status__in=BuildStatusGroups.ACTIVE_CODES) & ~Q(target_date=None) & Q(target_date__lte=InvenTree.helpers.current_date())
|
||||||
|
|
||||||
# Global setting for specifying reference pattern
|
# Global setting for specifying reference pattern
|
||||||
REFERENCE_PATTERN_SETTING = 'BUILDORDER_REFERENCE_PATTERN'
|
REFERENCE_PATTERN_SETTING = 'BUILDORDER_REFERENCE_PATTERN'
|
||||||
@ -121,6 +121,12 @@ class Build(InvenTree.models.InvenTreeBarcodeMixin, InvenTree.models.InvenTreeNo
|
|||||||
|
|
||||||
super().clean()
|
super().clean()
|
||||||
|
|
||||||
|
if common.models.InvenTreeSetting.get_setting('BUILDORDER_REQUIRE_RESPONSIBLE'):
|
||||||
|
if not self.responsible:
|
||||||
|
raise ValidationError({
|
||||||
|
'responsible': _('Responsible user or group must be specified')
|
||||||
|
})
|
||||||
|
|
||||||
# Prevent changing target part after creation
|
# Prevent changing target part after creation
|
||||||
if self.has_field_changed('part'):
|
if self.has_field_changed('part'):
|
||||||
raise ValidationError({
|
raise ValidationError({
|
||||||
@ -519,16 +525,11 @@ class Build(InvenTree.models.InvenTreeBarcodeMixin, InvenTree.models.InvenTreeNo
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
@transaction.atomic
|
@transaction.atomic
|
||||||
def complete_build(self, user):
|
def complete_allocations(self, user):
|
||||||
"""Mark this build as complete."""
|
"""Complete all stock allocations for this build order.
|
||||||
if self.incomplete_count > 0:
|
|
||||||
return
|
|
||||||
|
|
||||||
self.completion_date = datetime.now().date()
|
|
||||||
self.completed_by = user
|
|
||||||
self.status = BuildStatus.COMPLETE.value
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
|
- This function is called when a build order is completed
|
||||||
|
"""
|
||||||
# Remove untracked allocated stock
|
# Remove untracked allocated stock
|
||||||
self.subtract_allocated_stock(user)
|
self.subtract_allocated_stock(user)
|
||||||
|
|
||||||
@ -536,6 +537,27 @@ class Build(InvenTree.models.InvenTreeBarcodeMixin, InvenTree.models.InvenTreeNo
|
|||||||
# which point to this Build Order
|
# which point to this Build Order
|
||||||
self.allocated_stock.delete()
|
self.allocated_stock.delete()
|
||||||
|
|
||||||
|
@transaction.atomic
|
||||||
|
def complete_build(self, user):
|
||||||
|
"""Mark this build as complete."""
|
||||||
|
|
||||||
|
import build.tasks
|
||||||
|
|
||||||
|
if self.incomplete_count > 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.completion_date = InvenTree.helpers.current_date()
|
||||||
|
self.completed_by = user
|
||||||
|
self.status = BuildStatus.COMPLETE.value
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
# Offload task to complete build allocations
|
||||||
|
InvenTree.tasks.offload_task(
|
||||||
|
build.tasks.complete_build_allocations,
|
||||||
|
self.pk,
|
||||||
|
user.pk if user else None
|
||||||
|
)
|
||||||
|
|
||||||
# Register an event
|
# Register an event
|
||||||
trigger_event('build.completed', id=self.pk)
|
trigger_event('build.completed', id=self.pk)
|
||||||
|
|
||||||
@ -606,7 +628,7 @@ class Build(InvenTree.models.InvenTreeBarcodeMixin, InvenTree.models.InvenTreeNo
|
|||||||
output.delete()
|
output.delete()
|
||||||
|
|
||||||
# Date of 'completion' is the date the build was cancelled
|
# Date of 'completion' is the date the build was cancelled
|
||||||
self.completion_date = datetime.now().date()
|
self.completion_date = InvenTree.helpers.current_date()
|
||||||
self.completed_by = user
|
self.completed_by = user
|
||||||
|
|
||||||
self.status = BuildStatus.CANCELLED.value
|
self.status = BuildStatus.CANCELLED.value
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
"""Background task definitions for the BuildOrder app"""
|
"""Background task definitions for the BuildOrder app"""
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import timedelta
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from django.contrib.auth.models import User
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
|
|
||||||
@ -13,6 +14,7 @@ from plugin.events import trigger_event
|
|||||||
import common.notifications
|
import common.notifications
|
||||||
import build.models
|
import build.models
|
||||||
import InvenTree.email
|
import InvenTree.email
|
||||||
|
import InvenTree.helpers
|
||||||
import InvenTree.helpers_model
|
import InvenTree.helpers_model
|
||||||
import InvenTree.tasks
|
import InvenTree.tasks
|
||||||
from InvenTree.status_codes import BuildStatusGroups
|
from InvenTree.status_codes import BuildStatusGroups
|
||||||
@ -24,6 +26,27 @@ import part.models as part_models
|
|||||||
logger = logging.getLogger('inventree')
|
logger = logging.getLogger('inventree')
|
||||||
|
|
||||||
|
|
||||||
|
def complete_build_allocations(build_id: int, user_id: int):
|
||||||
|
"""Complete build allocations for a specified BuildOrder."""
|
||||||
|
|
||||||
|
build_order = build.models.Build.objects.filter(pk=build_id).first()
|
||||||
|
|
||||||
|
if user_id:
|
||||||
|
try:
|
||||||
|
user = User.objects.get(pk=user_id)
|
||||||
|
except User.DoesNotExist:
|
||||||
|
logger.warning("Could not complete build allocations for BuildOrder <%s> - User does not exist", build_id)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
user = None
|
||||||
|
|
||||||
|
if not build_order:
|
||||||
|
logger.warning("Could not complete build allocations for BuildOrder <%s> - BuildOrder does not exist", build_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
build_order.complete_allocations(user)
|
||||||
|
|
||||||
|
|
||||||
def update_build_order_lines(bom_item_pk: int):
|
def update_build_order_lines(bom_item_pk: int):
|
||||||
"""Update all BuildOrderLineItem objects which reference a particular BomItem.
|
"""Update all BuildOrderLineItem objects which reference a particular BomItem.
|
||||||
|
|
||||||
@ -200,7 +223,7 @@ def check_overdue_build_orders():
|
|||||||
- Look at the 'target_date' of any outstanding BuildOrder objects
|
- Look at the 'target_date' of any outstanding BuildOrder objects
|
||||||
- If the 'target_date' expired *yesterday* then the order is just out of date
|
- If the 'target_date' expired *yesterday* then the order is just out of date
|
||||||
"""
|
"""
|
||||||
yesterday = datetime.now().date() - timedelta(days=1)
|
yesterday = InvenTree.helpers.current_date() - timedelta(days=1)
|
||||||
|
|
||||||
overdue_orders = build.models.Build.objects.filter(
|
overdue_orders = build.models.Build.objects.filter(
|
||||||
target_date=yesterday,
|
target_date=yesterday,
|
||||||
|
@ -148,7 +148,7 @@ class CurrencyExchangeView(APIView):
|
|||||||
|
|
||||||
response = {
|
response = {
|
||||||
'base_currency': common.models.InvenTreeSetting.get_setting(
|
'base_currency': common.models.InvenTreeSetting.get_setting(
|
||||||
'INVENTREE_DEFAULT_CURRENCY', 'USD'
|
'INVENTREE_DEFAULT_CURRENCY', backup_value='USD'
|
||||||
),
|
),
|
||||||
'exchange_rates': {},
|
'exchange_rates': {},
|
||||||
'updated': updated,
|
'updated': updated,
|
||||||
|
@ -13,7 +13,7 @@ import math
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import uuid
|
import uuid
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import timedelta, timezone
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from secrets import compare_digest
|
from secrets import compare_digest
|
||||||
from typing import Any, Callable, TypedDict, Union
|
from typing import Any, Callable, TypedDict, Union
|
||||||
@ -190,6 +190,8 @@ class BaseInvenTreeSetting(models.Model):
|
|||||||
|
|
||||||
SETTINGS: dict[str, SettingsKeyType] = {}
|
SETTINGS: dict[str, SettingsKeyType] = {}
|
||||||
|
|
||||||
|
CHECK_SETTING_KEY = False
|
||||||
|
|
||||||
extra_unique_fields: list[str] = []
|
extra_unique_fields: list[str] = []
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -226,9 +228,12 @@ class BaseInvenTreeSetting(models.Model):
|
|||||||
"""
|
"""
|
||||||
cache_key = f'BUILD_DEFAULT_VALUES:{str(cls.__name__)}'
|
cache_key = f'BUILD_DEFAULT_VALUES:{str(cls.__name__)}'
|
||||||
|
|
||||||
|
try:
|
||||||
if InvenTree.helpers.str2bool(cache.get(cache_key, False)):
|
if InvenTree.helpers.str2bool(cache.get(cache_key, False)):
|
||||||
# Already built default values
|
# Already built default values
|
||||||
return
|
return
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
existing_keys = cls.objects.filter(**kwargs).values_list('key', flat=True)
|
existing_keys = cls.objects.filter(**kwargs).values_list('key', flat=True)
|
||||||
@ -251,7 +256,10 @@ class BaseInvenTreeSetting(models.Model):
|
|||||||
)
|
)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
cache.set(cache_key, True, timeout=3600)
|
cache.set(cache_key, True, timeout=3600)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
def _call_settings_function(self, reference: str, args, kwargs):
|
def _call_settings_function(self, reference: str, args, kwargs):
|
||||||
"""Call a function associated with a particular setting.
|
"""Call a function associated with a particular setting.
|
||||||
@ -280,18 +288,17 @@ class BaseInvenTreeSetting(models.Model):
|
|||||||
|
|
||||||
def save_to_cache(self):
|
def save_to_cache(self):
|
||||||
"""Save this setting object to cache."""
|
"""Save this setting object to cache."""
|
||||||
ckey = self.cache_key
|
key = self.cache_key
|
||||||
|
|
||||||
# skip saving to cache if no pk is set
|
# skip saving to cache if no pk is set
|
||||||
if self.pk is None:
|
if self.pk is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.debug("Saving setting '%s' to cache", ckey)
|
logger.debug("Saving setting '%s' to cache", key)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cache.set(ckey, self, timeout=3600)
|
cache.set(key, self, timeout=3600)
|
||||||
except TypeError:
|
except Exception:
|
||||||
# Some characters cause issues with caching; ignore and move on
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -554,28 +561,30 @@ class BaseInvenTreeSetting(models.Model):
|
|||||||
# Unless otherwise specified, attempt to create the setting
|
# Unless otherwise specified, attempt to create the setting
|
||||||
create = kwargs.pop('create', True)
|
create = kwargs.pop('create', True)
|
||||||
|
|
||||||
|
# Specify if cache lookup should be performed
|
||||||
|
do_cache = kwargs.pop('cache', False)
|
||||||
|
|
||||||
# Prevent saving to the database during data import
|
# Prevent saving to the database during data import
|
||||||
if InvenTree.ready.isImportingData():
|
if InvenTree.ready.isImportingData():
|
||||||
create = False
|
create = False
|
||||||
|
do_cache = False
|
||||||
|
|
||||||
# Prevent saving to the database during migrations
|
# Prevent saving to the database during migrations
|
||||||
if InvenTree.ready.isRunningMigrations():
|
if InvenTree.ready.isRunningMigrations():
|
||||||
create = False
|
create = False
|
||||||
|
do_cache = False
|
||||||
|
|
||||||
# Perform cache lookup by default
|
cache_key = cls.create_cache_key(key, **kwargs)
|
||||||
do_cache = kwargs.pop('cache', True)
|
|
||||||
|
|
||||||
ckey = cls.create_cache_key(key, **kwargs)
|
|
||||||
|
|
||||||
if do_cache:
|
if do_cache:
|
||||||
try:
|
try:
|
||||||
# First attempt to find the setting object in the cache
|
# First attempt to find the setting object in the cache
|
||||||
cached_setting = cache.get(ckey)
|
cached_setting = cache.get(cache_key)
|
||||||
|
|
||||||
if cached_setting is not None:
|
if cached_setting is not None:
|
||||||
return cached_setting
|
return cached_setting
|
||||||
|
|
||||||
except AppRegistryNotReady:
|
except Exception:
|
||||||
# Cache is not ready yet
|
# Cache is not ready yet
|
||||||
do_cache = False
|
do_cache = False
|
||||||
|
|
||||||
@ -628,6 +637,17 @@ class BaseInvenTreeSetting(models.Model):
|
|||||||
|
|
||||||
If it does not exist, return the backup value (default = None)
|
If it does not exist, return the backup value (default = None)
|
||||||
"""
|
"""
|
||||||
|
if (
|
||||||
|
cls.CHECK_SETTING_KEY
|
||||||
|
and key not in cls.SETTINGS
|
||||||
|
and not key.startswith('_')
|
||||||
|
):
|
||||||
|
logger.warning(
|
||||||
|
"get_setting: Setting key '%s' is not defined for class %s",
|
||||||
|
key,
|
||||||
|
str(cls),
|
||||||
|
)
|
||||||
|
|
||||||
# If no backup value is specified, attempt to retrieve a "default" value
|
# If no backup value is specified, attempt to retrieve a "default" value
|
||||||
if backup_value is None:
|
if backup_value is None:
|
||||||
backup_value = cls.get_setting_default(key, **kwargs)
|
backup_value = cls.get_setting_default(key, **kwargs)
|
||||||
@ -663,6 +683,17 @@ class BaseInvenTreeSetting(models.Model):
|
|||||||
change_user: User object (must be staff member to update a core setting)
|
change_user: User object (must be staff member to update a core setting)
|
||||||
create: If True, create a new setting if the specified key does not exist.
|
create: If True, create a new setting if the specified key does not exist.
|
||||||
"""
|
"""
|
||||||
|
if (
|
||||||
|
cls.CHECK_SETTING_KEY
|
||||||
|
and key not in cls.SETTINGS
|
||||||
|
and not key.startswith('_')
|
||||||
|
):
|
||||||
|
logger.warning(
|
||||||
|
"set_setting: Setting key '%s' is not defined for class %s",
|
||||||
|
key,
|
||||||
|
str(cls),
|
||||||
|
)
|
||||||
|
|
||||||
if change_user is not None and not change_user.is_staff:
|
if change_user is not None and not change_user.is_staff:
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -1192,6 +1223,8 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
|||||||
|
|
||||||
SETTINGS: dict[str, InvenTreeSettingsKeyType]
|
SETTINGS: dict[str, InvenTreeSettingsKeyType]
|
||||||
|
|
||||||
|
CHECK_SETTING_KEY = True
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
"""Meta options for InvenTreeSetting."""
|
"""Meta options for InvenTreeSetting."""
|
||||||
|
|
||||||
@ -1646,6 +1679,12 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
|||||||
'default': False,
|
'default': False,
|
||||||
'validator': bool,
|
'validator': bool,
|
||||||
},
|
},
|
||||||
|
'REPORT_LOG_ERRORS': {
|
||||||
|
'name': _('Log Report Errors'),
|
||||||
|
'description': _('Log errors which occur when generating reports'),
|
||||||
|
'default': False,
|
||||||
|
'validator': bool,
|
||||||
|
},
|
||||||
'REPORT_DEFAULT_PAGE_SIZE': {
|
'REPORT_DEFAULT_PAGE_SIZE': {
|
||||||
'name': _('Page Size'),
|
'name': _('Page Size'),
|
||||||
'description': _('Default page size for PDF reports'),
|
'description': _('Default page size for PDF reports'),
|
||||||
@ -1681,7 +1720,7 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
|||||||
'STOCK_DELETE_DEPLETED_DEFAULT': {
|
'STOCK_DELETE_DEPLETED_DEFAULT': {
|
||||||
'name': _('Delete Depleted Stock'),
|
'name': _('Delete Depleted Stock'),
|
||||||
'description': _(
|
'description': _(
|
||||||
'Determines default behaviour when a stock item is depleted'
|
'Determines default behavior when a stock item is depleted'
|
||||||
),
|
),
|
||||||
'default': True,
|
'default': True,
|
||||||
'validator': bool,
|
'validator': bool,
|
||||||
@ -1737,6 +1776,14 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
|||||||
'default': False,
|
'default': False,
|
||||||
'validator': bool,
|
'validator': bool,
|
||||||
},
|
},
|
||||||
|
'STOCK_ENFORCE_BOM_INSTALLATION': {
|
||||||
|
'name': _('Check BOM when installing items'),
|
||||||
|
'description': _(
|
||||||
|
'Installed stock items must exist in the BOM for the parent part'
|
||||||
|
),
|
||||||
|
'default': True,
|
||||||
|
'validator': bool,
|
||||||
|
},
|
||||||
'BUILDORDER_REFERENCE_PATTERN': {
|
'BUILDORDER_REFERENCE_PATTERN': {
|
||||||
'name': _('Build Order Reference Pattern'),
|
'name': _('Build Order Reference Pattern'),
|
||||||
'description': _(
|
'description': _(
|
||||||
@ -1745,6 +1792,20 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
|||||||
'default': 'BO-{ref:04d}',
|
'default': 'BO-{ref:04d}',
|
||||||
'validator': build.validators.validate_build_order_reference_pattern,
|
'validator': build.validators.validate_build_order_reference_pattern,
|
||||||
},
|
},
|
||||||
|
'BUILDORDER_REQUIRE_RESPONSIBLE': {
|
||||||
|
'name': _('Require Responsible Owner'),
|
||||||
|
'description': _('A responsible owner must be assigned to each order'),
|
||||||
|
'default': False,
|
||||||
|
'validator': bool,
|
||||||
|
},
|
||||||
|
'PREVENT_BUILD_COMPLETION_HAVING_INCOMPLETED_TESTS': {
|
||||||
|
'name': _('Block Until Tests Pass'),
|
||||||
|
'description': _(
|
||||||
|
'Prevent build outputs from being completed until all required tests pass'
|
||||||
|
),
|
||||||
|
'default': False,
|
||||||
|
'validator': bool,
|
||||||
|
},
|
||||||
'RETURNORDER_ENABLED': {
|
'RETURNORDER_ENABLED': {
|
||||||
'name': _('Enable Return Orders'),
|
'name': _('Enable Return Orders'),
|
||||||
'description': _('Enable return order functionality in the user interface'),
|
'description': _('Enable return order functionality in the user interface'),
|
||||||
@ -1759,6 +1820,12 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
|||||||
'default': 'RMA-{ref:04d}',
|
'default': 'RMA-{ref:04d}',
|
||||||
'validator': order.validators.validate_return_order_reference_pattern,
|
'validator': order.validators.validate_return_order_reference_pattern,
|
||||||
},
|
},
|
||||||
|
'RETURNORDER_REQUIRE_RESPONSIBLE': {
|
||||||
|
'name': _('Require Responsible Owner'),
|
||||||
|
'description': _('A responsible owner must be assigned to each order'),
|
||||||
|
'default': False,
|
||||||
|
'validator': bool,
|
||||||
|
},
|
||||||
'RETURNORDER_EDIT_COMPLETED_ORDERS': {
|
'RETURNORDER_EDIT_COMPLETED_ORDERS': {
|
||||||
'name': _('Edit Completed Return Orders'),
|
'name': _('Edit Completed Return Orders'),
|
||||||
'description': _(
|
'description': _(
|
||||||
@ -1775,6 +1842,12 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
|||||||
'default': 'SO-{ref:04d}',
|
'default': 'SO-{ref:04d}',
|
||||||
'validator': order.validators.validate_sales_order_reference_pattern,
|
'validator': order.validators.validate_sales_order_reference_pattern,
|
||||||
},
|
},
|
||||||
|
'SALESORDER_REQUIRE_RESPONSIBLE': {
|
||||||
|
'name': _('Require Responsible Owner'),
|
||||||
|
'description': _('A responsible owner must be assigned to each order'),
|
||||||
|
'default': False,
|
||||||
|
'validator': bool,
|
||||||
|
},
|
||||||
'SALESORDER_DEFAULT_SHIPMENT': {
|
'SALESORDER_DEFAULT_SHIPMENT': {
|
||||||
'name': _('Sales Order Default Shipment'),
|
'name': _('Sales Order Default Shipment'),
|
||||||
'description': _('Enable creation of default shipment with sales orders'),
|
'description': _('Enable creation of default shipment with sales orders'),
|
||||||
@ -1797,6 +1870,12 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
|||||||
'default': 'PO-{ref:04d}',
|
'default': 'PO-{ref:04d}',
|
||||||
'validator': order.validators.validate_purchase_order_reference_pattern,
|
'validator': order.validators.validate_purchase_order_reference_pattern,
|
||||||
},
|
},
|
||||||
|
'PURCHASEORDER_REQUIRE_RESPONSIBLE': {
|
||||||
|
'name': _('Require Responsible Owner'),
|
||||||
|
'description': _('A responsible owner must be assigned to each order'),
|
||||||
|
'default': False,
|
||||||
|
'validator': bool,
|
||||||
|
},
|
||||||
'PURCHASEORDER_EDIT_COMPLETED_ORDERS': {
|
'PURCHASEORDER_EDIT_COMPLETED_ORDERS': {
|
||||||
'name': _('Edit Completed Purchase Orders'),
|
'name': _('Edit Completed Purchase Orders'),
|
||||||
'description': _(
|
'description': _(
|
||||||
@ -1983,11 +2062,9 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
|||||||
'default': False,
|
'default': False,
|
||||||
'validator': bool,
|
'validator': bool,
|
||||||
},
|
},
|
||||||
'PREVENT_BUILD_COMPLETION_HAVING_INCOMPLETED_TESTS': {
|
'TEST_STATION_DATA': {
|
||||||
'name': _('Block Until Tests Pass'),
|
'name': _('Enable Test Station Data'),
|
||||||
'description': _(
|
'description': _('Enable test station data collection for test results'),
|
||||||
'Prevent build outputs from being completed until all required tests pass'
|
|
||||||
),
|
|
||||||
'default': False,
|
'default': False,
|
||||||
'validator': bool,
|
'validator': bool,
|
||||||
},
|
},
|
||||||
@ -2029,6 +2106,8 @@ def label_printer_options():
|
|||||||
class InvenTreeUserSetting(BaseInvenTreeSetting):
|
class InvenTreeUserSetting(BaseInvenTreeSetting):
|
||||||
"""An InvenTreeSetting object with a user context."""
|
"""An InvenTreeSetting object with a user context."""
|
||||||
|
|
||||||
|
CHECK_SETTING_KEY = True
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
"""Meta options for InvenTreeUserSetting."""
|
"""Meta options for InvenTreeUserSetting."""
|
||||||
|
|
||||||
@ -2066,7 +2145,7 @@ class InvenTreeUserSetting(BaseInvenTreeSetting):
|
|||||||
'validator': bool,
|
'validator': bool,
|
||||||
},
|
},
|
||||||
'HOMEPAGE_BOM_REQUIRES_VALIDATION': {
|
'HOMEPAGE_BOM_REQUIRES_VALIDATION': {
|
||||||
'name': _('Show unvalidated BOMs'),
|
'name': _('Show invalid BOMs'),
|
||||||
'description': _('Show BOMs that await validation on the homepage'),
|
'description': _('Show BOMs that await validation on the homepage'),
|
||||||
'default': False,
|
'default': False,
|
||||||
'validator': bool,
|
'validator': bool,
|
||||||
@ -2379,6 +2458,14 @@ class InvenTreeUserSetting(BaseInvenTreeSetting):
|
|||||||
'validator': [int],
|
'validator': [int],
|
||||||
'default': '',
|
'default': '',
|
||||||
},
|
},
|
||||||
|
'DEFAULT_LINE_LABEL_TEMPLATE': {
|
||||||
|
'name': _('Default build line label template'),
|
||||||
|
'description': _(
|
||||||
|
'The build line label template to be automatically selected'
|
||||||
|
),
|
||||||
|
'validator': [int],
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
'NOTIFICATION_ERROR_REPORT': {
|
'NOTIFICATION_ERROR_REPORT': {
|
||||||
'name': _('Receive error reports'),
|
'name': _('Receive error reports'),
|
||||||
'description': _('Receive notifications for system errors'),
|
'description': _('Receive notifications for system errors'),
|
||||||
@ -2589,7 +2676,7 @@ class VerificationMethod(Enum):
|
|||||||
|
|
||||||
|
|
||||||
class WebhookEndpoint(models.Model):
|
class WebhookEndpoint(models.Model):
|
||||||
"""Defines a Webhook entdpoint.
|
"""Defines a Webhook endpoint.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
endpoint_id: Path to the webhook,
|
endpoint_id: Path to the webhook,
|
||||||
@ -2828,7 +2915,7 @@ class NotificationEntry(MetaMixin):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def check_recent(cls, key: str, uid: int, delta: timedelta):
|
def check_recent(cls, key: str, uid: int, delta: timedelta):
|
||||||
"""Test if a particular notification has been sent in the specified time period."""
|
"""Test if a particular notification has been sent in the specified time period."""
|
||||||
since = datetime.now().date() - delta
|
since = InvenTree.helpers.current_date() - delta
|
||||||
|
|
||||||
entries = cls.objects.filter(key=key, uid=uid, updated__gte=since)
|
entries = cls.objects.filter(key=key, uid=uid, updated__gte=since)
|
||||||
|
|
||||||
@ -2924,7 +3011,7 @@ class NewsFeedEntry(models.Model):
|
|||||||
- published: Date of publishing of the news item
|
- published: Date of publishing of the news item
|
||||||
- author: Author of news item
|
- author: Author of news item
|
||||||
- summary: Summary of the news items content
|
- summary: Summary of the news items content
|
||||||
- read: Was this iteam already by a superuser?
|
- read: Was this item already by a superuser?
|
||||||
"""
|
"""
|
||||||
|
|
||||||
feed_id = models.CharField(verbose_name=_('Id'), unique=True, max_length=250)
|
feed_id = models.CharField(verbose_name=_('Id'), unique=True, max_length=250)
|
||||||
|
@ -63,7 +63,7 @@ class SettingsSerializer(InvenTreeModelSerializer):
|
|||||||
|
|
||||||
typ = serializers.CharField(read_only=True)
|
typ = serializers.CharField(read_only=True)
|
||||||
|
|
||||||
def get_choices(self, obj):
|
def get_choices(self, obj) -> list:
|
||||||
"""Returns the choices available for a given item."""
|
"""Returns the choices available for a given item."""
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
|
@ -14,7 +14,10 @@ def currency_code_default():
|
|||||||
"""Returns the default currency code (or USD if not specified)."""
|
"""Returns the default currency code (or USD if not specified)."""
|
||||||
from common.models import InvenTreeSetting
|
from common.models import InvenTreeSetting
|
||||||
|
|
||||||
|
try:
|
||||||
cached_value = cache.get('currency_code_default', '')
|
cached_value = cache.get('currency_code_default', '')
|
||||||
|
except Exception:
|
||||||
|
cached_value = None
|
||||||
|
|
||||||
if cached_value:
|
if cached_value:
|
||||||
return cached_value
|
return cached_value
|
||||||
@ -31,7 +34,10 @@ def currency_code_default():
|
|||||||
code = 'USD' # pragma: no cover
|
code = 'USD' # pragma: no cover
|
||||||
|
|
||||||
# Cache the value for a short amount of time
|
# Cache the value for a short amount of time
|
||||||
|
try:
|
||||||
cache.set('currency_code_default', code, 30)
|
cache.set('currency_code_default', code, 30)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
return code
|
return code
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from datetime import datetime, timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import AppRegistryNotReady
|
from django.core.exceptions import AppRegistryNotReady
|
||||||
@ -12,6 +12,7 @@ from django.utils import timezone
|
|||||||
import feedparser
|
import feedparser
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
import InvenTree.helpers
|
||||||
from InvenTree.helpers_model import getModelsWithMixin
|
from InvenTree.helpers_model import getModelsWithMixin
|
||||||
from InvenTree.models import InvenTreeNotesMixin
|
from InvenTree.models import InvenTreeNotesMixin
|
||||||
from InvenTree.tasks import ScheduledTask, scheduled_task
|
from InvenTree.tasks import ScheduledTask, scheduled_task
|
||||||
@ -107,7 +108,7 @@ def delete_old_notes_images():
|
|||||||
note.delete()
|
note.delete()
|
||||||
|
|
||||||
note_classes = getModelsWithMixin(InvenTreeNotesMixin)
|
note_classes = getModelsWithMixin(InvenTreeNotesMixin)
|
||||||
before = datetime.now() - timedelta(days=90)
|
before = InvenTree.helpers.current_date() - timedelta(days=90)
|
||||||
|
|
||||||
for note in NotesImage.objects.filter(date__lte=before):
|
for note in NotesImage.objects.filter(date__lte=before):
|
||||||
# Find any images which are no longer referenced by a note
|
# Find any images which are no longer referenced by a note
|
||||||
|
@ -12,6 +12,7 @@ from django.core.cache import cache
|
|||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||||
from django.test import Client, TestCase
|
from django.test import Client, TestCase
|
||||||
|
from django.test.utils import override_settings
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
|
|
||||||
import PIL
|
import PIL
|
||||||
@ -271,6 +272,7 @@ class SettingsTest(InvenTreeTestCase):
|
|||||||
print(f"run_settings_check failed for user setting '{key}'")
|
print(f"run_settings_check failed for user setting '{key}'")
|
||||||
raise exc
|
raise exc
|
||||||
|
|
||||||
|
@override_settings(SITE_URL=None)
|
||||||
def test_defaults(self):
|
def test_defaults(self):
|
||||||
"""Populate the settings with default values."""
|
"""Populate the settings with default values."""
|
||||||
for key in InvenTreeSetting.SETTINGS.keys():
|
for key in InvenTreeSetting.SETTINGS.keys():
|
||||||
|
@ -82,7 +82,7 @@ class CompanyDetail(RetrieveUpdateDestroyAPI):
|
|||||||
|
|
||||||
|
|
||||||
class CompanyAttachmentList(AttachmentMixin, ListCreateDestroyAPIView):
|
class CompanyAttachmentList(AttachmentMixin, ListCreateDestroyAPIView):
|
||||||
"""API endpoint for the CompanyAttachment model."""
|
"""API endpoint for listing, creating and bulk deleting a CompanyAttachment."""
|
||||||
|
|
||||||
queryset = CompanyAttachment.objects.all()
|
queryset = CompanyAttachment.objects.all()
|
||||||
serializer_class = CompanyAttachmentSerializer
|
serializer_class = CompanyAttachmentSerializer
|
||||||
@ -215,7 +215,7 @@ class ManufacturerPartDetail(RetrieveUpdateDestroyAPI):
|
|||||||
|
|
||||||
|
|
||||||
class ManufacturerPartAttachmentList(AttachmentMixin, ListCreateDestroyAPIView):
|
class ManufacturerPartAttachmentList(AttachmentMixin, ListCreateDestroyAPIView):
|
||||||
"""API endpoint for listing (and creating) a ManufacturerPartAttachment (file upload)."""
|
"""API endpoint for listing, creating and bulk deleting a ManufacturerPartAttachment (file upload)."""
|
||||||
|
|
||||||
queryset = ManufacturerPartAttachment.objects.all()
|
queryset = ManufacturerPartAttachment.objects.all()
|
||||||
serializer_class = ManufacturerPartAttachmentSerializer
|
serializer_class = ManufacturerPartAttachmentSerializer
|
||||||
|
@ -901,7 +901,7 @@ class SupplierPart(
|
|||||||
def update_available_quantity(self, quantity):
|
def update_available_quantity(self, quantity):
|
||||||
"""Update the available quantity for this SupplierPart."""
|
"""Update the available quantity for this SupplierPart."""
|
||||||
self.available = quantity
|
self.available = quantity
|
||||||
self.availability_updated = datetime.now()
|
self.availability_updated = InvenTree.helpers.current_time()
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -42,11 +42,13 @@ class CompanyBriefSerializer(InvenTreeModelSerializer):
|
|||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = Company
|
model = Company
|
||||||
fields = ['pk', 'url', 'name', 'description', 'image']
|
fields = ['pk', 'url', 'name', 'description', 'image', 'thumbnail']
|
||||||
|
|
||||||
url = serializers.CharField(source='get_absolute_url', read_only=True)
|
url = serializers.CharField(source='get_absolute_url', read_only=True)
|
||||||
|
|
||||||
image = serializers.CharField(source='get_thumbnail_url', read_only=True)
|
image = InvenTreeImageSerializerField(read_only=True)
|
||||||
|
|
||||||
|
thumbnail = serializers.CharField(source='get_thumbnail_url', read_only=True)
|
||||||
|
|
||||||
|
|
||||||
class AddressSerializer(InvenTreeModelSerializer):
|
class AddressSerializer(InvenTreeModelSerializer):
|
||||||
|
@ -168,9 +168,9 @@ allowed_hosts:
|
|||||||
|
|
||||||
# Trusted origins (see CSRF_TRUSTED_ORIGINS in Django settings documentation)
|
# Trusted origins (see CSRF_TRUSTED_ORIGINS in Django settings documentation)
|
||||||
# If you are running behind a proxy, you may need to add the proxy address here
|
# If you are running behind a proxy, you may need to add the proxy address here
|
||||||
trusted_origins:
|
# trusted_origins:
|
||||||
- 'http://localhost:8000'
|
# - 'http://localhost'
|
||||||
|
# - 'http://*.localhost'
|
||||||
|
|
||||||
# Proxy forwarding settings
|
# Proxy forwarding settings
|
||||||
# If InvenTree is running behind a proxy, you may need to configure these settings
|
# If InvenTree is running behind a proxy, you may need to configure these settings
|
||||||
@ -183,24 +183,23 @@ use_x_forwarded_port: false
|
|||||||
|
|
||||||
# Cross Origin Resource Sharing (CORS) settings (see https://github.com/adamchainz/django-cors-headers)
|
# Cross Origin Resource Sharing (CORS) settings (see https://github.com/adamchainz/django-cors-headers)
|
||||||
cors:
|
cors:
|
||||||
allow_all: True
|
allow_all: true
|
||||||
allow_credentials: True,
|
allow_credentials: true
|
||||||
|
|
||||||
# whitelist:
|
# whitelist:
|
||||||
# - https://example.com
|
# - https://example.com
|
||||||
# - https://sub.example.com
|
# - https://sub.example.com
|
||||||
|
|
||||||
|
# regex:
|
||||||
|
|
||||||
# MEDIA_ROOT is the local filesystem location for storing uploaded files
|
# MEDIA_ROOT is the local filesystem location for storing uploaded files
|
||||||
#media_root: '/home/inventree/data/media'
|
#media_root: '/home/inventree/data/media'
|
||||||
|
|
||||||
# STATIC_ROOT is the local filesystem location for storing static files
|
# STATIC_ROOT is the local filesystem location for storing static files
|
||||||
#static_root: '/home/inventree/data/static'
|
#static_root: '/home/inventree/data/static'
|
||||||
|
|
||||||
### Backup configuration options ###
|
|
||||||
# INVENTREE_BACKUP_DIR is the local filesystem location for storing backups
|
# INVENTREE_BACKUP_DIR is the local filesystem location for storing backups
|
||||||
backup_storage: django.core.files.storage.FileSystemStorage
|
|
||||||
#backup_dir: '/home/inventree/data/backup'
|
#backup_dir: '/home/inventree/data/backup'
|
||||||
#backup_options:
|
|
||||||
|
|
||||||
# Background worker options
|
# Background worker options
|
||||||
background:
|
background:
|
||||||
|
@ -1,11 +1,9 @@
|
|||||||
"""Shared templating code."""
|
"""Shared templating code."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import warnings
|
import warnings
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.exceptions import AppRegistryNotReady
|
from django.core.exceptions import AppRegistryNotReady
|
||||||
from django.core.files.storage import default_storage
|
from django.core.files.storage import default_storage
|
||||||
from django.db.utils import IntegrityError, OperationalError, ProgrammingError
|
from django.db.utils import IntegrityError, OperationalError, ProgrammingError
|
||||||
@ -18,9 +16,6 @@ from InvenTree.config import ensure_dir
|
|||||||
logger = logging.getLogger('inventree')
|
logger = logging.getLogger('inventree')
|
||||||
|
|
||||||
|
|
||||||
MEDIA_STORAGE_DIR = Path(settings.MEDIA_ROOT)
|
|
||||||
|
|
||||||
|
|
||||||
class TemplatingMixin:
|
class TemplatingMixin:
|
||||||
"""Mixin that contains shared templating code."""
|
"""Mixin that contains shared templating code."""
|
||||||
|
|
||||||
@ -84,8 +79,7 @@ class TemplatingMixin:
|
|||||||
|
|
||||||
# Create root dir for templates
|
# Create root dir for templates
|
||||||
src_dir = self.get_src_dir(ref_name)
|
src_dir = self.get_src_dir(ref_name)
|
||||||
dst_dir = MEDIA_STORAGE_DIR.joinpath(self.name, 'inventree', ref_name)
|
ensure_dir(Path(self.name, 'inventree', ref_name), default_storage)
|
||||||
ensure_dir(dst_dir, default_storage)
|
|
||||||
|
|
||||||
# Copy each template across (if required)
|
# Copy each template across (if required)
|
||||||
for entry in data:
|
for entry in data:
|
||||||
@ -94,29 +88,27 @@ class TemplatingMixin:
|
|||||||
def create_template_file(self, model, src_dir, data, ref_name):
|
def create_template_file(self, model, src_dir, data, ref_name):
|
||||||
"""Ensure a label template is in place."""
|
"""Ensure a label template is in place."""
|
||||||
# Destination filename
|
# Destination filename
|
||||||
filename = os.path.join(self.name, 'inventree', ref_name, data['file'])
|
filename = Path(self.name, 'inventree', ref_name, data['file'])
|
||||||
|
|
||||||
src_file = src_dir.joinpath(data['file'])
|
src_file = src_dir.joinpath(data['file'])
|
||||||
dst_file = MEDIA_STORAGE_DIR.joinpath(filename)
|
|
||||||
|
|
||||||
do_copy = False
|
do_copy = False
|
||||||
|
|
||||||
if not dst_file.exists():
|
if not default_storage.exists(filename):
|
||||||
logger.info("%s template '%s' is not present", self.name, filename)
|
logger.info("%s template '%s' is not present", self.name, filename)
|
||||||
do_copy = True
|
do_copy = True
|
||||||
else:
|
else:
|
||||||
# Check if the file contents are different
|
# Check if the file contents are different
|
||||||
src_hash = InvenTree.helpers.hash_file(src_file)
|
src_hash = InvenTree.helpers.hash_file(src_file)
|
||||||
dst_hash = InvenTree.helpers.hash_file(dst_file)
|
dst_hash = InvenTree.helpers.hash_file(filename, default_storage)
|
||||||
|
|
||||||
if src_hash != dst_hash:
|
if src_hash != dst_hash:
|
||||||
logger.info("Hash differs for '%s'", filename)
|
logger.info("Hash differs for '%s'", filename)
|
||||||
do_copy = True
|
do_copy = True
|
||||||
|
|
||||||
if do_copy:
|
if do_copy:
|
||||||
logger.info("Copying %s template '%s'", self.name, dst_file)
|
logger.info("Copying %s template '%s'", self.name, filename)
|
||||||
# Ensure destination dir exists
|
# Ensure destination dir exists
|
||||||
dst_file.parent.mkdir(parents=True, exist_ok=True)
|
ensure_dir(filename.parent, default_storage)
|
||||||
|
|
||||||
# Copy file
|
# Copy file
|
||||||
default_storage.save(filename, src_file.open('rb'))
|
default_storage.save(filename, src_file.open('rb'))
|
||||||
@ -135,6 +127,8 @@ class TemplatingMixin:
|
|||||||
logger.info("Creating entry for %s '%s'", model, data.get('name'))
|
logger.info("Creating entry for %s '%s'", model, data.get('name'))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
model.objects.create(**self.get_new_obj_data(data, filename))
|
model.objects.create(**self.get_new_obj_data(data, str(filename)))
|
||||||
except Exception:
|
except Exception as _e:
|
||||||
logger.warning("Failed to create %s '%s'", self.name, data['name'])
|
logger.warning(
|
||||||
|
"Failed to create %s '%s' with error '%s'", self.name, data['name'], _e
|
||||||
|
)
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
"""Label printing models."""
|
"""Label printing models."""
|
||||||
|
|
||||||
import datetime
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@ -15,6 +14,7 @@ from django.urls import reverse
|
|||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
import build.models
|
import build.models
|
||||||
|
import InvenTree.helpers
|
||||||
import InvenTree.models
|
import InvenTree.models
|
||||||
import part.models
|
import part.models
|
||||||
import stock.models
|
import stock.models
|
||||||
@ -228,8 +228,8 @@ class LabelTemplate(InvenTree.models.InvenTreeMetadataModel):
|
|||||||
|
|
||||||
# Add "basic" context data which gets passed to every label
|
# Add "basic" context data which gets passed to every label
|
||||||
context['base_url'] = get_base_url(request=request)
|
context['base_url'] = get_base_url(request=request)
|
||||||
context['date'] = datetime.datetime.now().date()
|
context['date'] = InvenTree.helpers.current_date()
|
||||||
context['datetime'] = datetime.datetime.now()
|
context['datetime'] = InvenTree.helpers.current_time()
|
||||||
context['request'] = request
|
context['request'] = request
|
||||||
context['user'] = request.user
|
context['user'] = request.user
|
||||||
context['width'] = self.width
|
context['width'] = self.width
|
||||||
|
@ -15,7 +15,16 @@ class LabelSerializerBase(InvenTreeModelSerializer):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def label_fields():
|
def label_fields():
|
||||||
"""Generic serializer fields for a label template."""
|
"""Generic serializer fields for a label template."""
|
||||||
return ['pk', 'name', 'description', 'label', 'filters', 'enabled']
|
return [
|
||||||
|
'pk',
|
||||||
|
'name',
|
||||||
|
'description',
|
||||||
|
'label',
|
||||||
|
'filters',
|
||||||
|
'width',
|
||||||
|
'height',
|
||||||
|
'enabled',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class StockItemLabelSerializer(LabelSerializerBase):
|
class StockItemLabelSerializer(LabelSerializerBase):
|
||||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
14162
InvenTree/locale/lv/LC_MESSAGES/django.po
Normal file
14162
InvenTree/locale/lv/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -144,7 +144,9 @@ class MachineRestart(APIView):
|
|||||||
|
|
||||||
permission_classes = [permissions.IsAuthenticated]
|
permission_classes = [permissions.IsAuthenticated]
|
||||||
|
|
||||||
@extend_schema(responses={200: MachineSerializers.MachineRestartSerializer()})
|
@extend_schema(
|
||||||
|
request=None, responses={200: MachineSerializers.MachineRestartSerializer()}
|
||||||
|
)
|
||||||
def post(self, request, pk):
|
def post(self, request, pk):
|
||||||
"""Restart machine by pk."""
|
"""Restart machine by pk."""
|
||||||
machine = get_machine(pk)
|
machine = get_machine(pk)
|
||||||
|
@ -629,7 +629,7 @@ class PurchaseOrderExtraLineDetail(RetrieveUpdateDestroyAPI):
|
|||||||
|
|
||||||
|
|
||||||
class SalesOrderAttachmentList(AttachmentMixin, ListCreateDestroyAPIView):
|
class SalesOrderAttachmentList(AttachmentMixin, ListCreateDestroyAPIView):
|
||||||
"""API endpoint for listing (and creating) a SalesOrderAttachment (file upload)."""
|
"""API endpoint for listing, creating and bulk deleting a SalesOrderAttachment (file upload)."""
|
||||||
|
|
||||||
queryset = models.SalesOrderAttachment.objects.all()
|
queryset = models.SalesOrderAttachment.objects.all()
|
||||||
serializer_class = serializers.SalesOrderAttachmentSerializer
|
serializer_class = serializers.SalesOrderAttachmentSerializer
|
||||||
@ -1097,7 +1097,7 @@ class SalesOrderShipmentComplete(CreateAPI):
|
|||||||
|
|
||||||
|
|
||||||
class PurchaseOrderAttachmentList(AttachmentMixin, ListCreateDestroyAPIView):
|
class PurchaseOrderAttachmentList(AttachmentMixin, ListCreateDestroyAPIView):
|
||||||
"""API endpoint for listing (and creating) a PurchaseOrderAttachment (file upload)."""
|
"""API endpoint for listing, creating and bulk deleting) a PurchaseOrderAttachment (file upload)."""
|
||||||
|
|
||||||
queryset = models.PurchaseOrderAttachment.objects.all()
|
queryset = models.PurchaseOrderAttachment.objects.all()
|
||||||
serializer_class = serializers.PurchaseOrderAttachmentSerializer
|
serializer_class = serializers.PurchaseOrderAttachmentSerializer
|
||||||
@ -1363,7 +1363,7 @@ class ReturnOrderExtraLineDetail(RetrieveUpdateDestroyAPI):
|
|||||||
|
|
||||||
|
|
||||||
class ReturnOrderAttachmentList(AttachmentMixin, ListCreateDestroyAPIView):
|
class ReturnOrderAttachmentList(AttachmentMixin, ListCreateDestroyAPIView):
|
||||||
"""API endpoint for listing (and creating) a ReturnOrderAttachment (file upload)."""
|
"""API endpoint for listing, creating and bulk deleting a ReturnOrderAttachment (file upload)."""
|
||||||
|
|
||||||
queryset = models.ReturnOrderAttachment.objects.all()
|
queryset = models.ReturnOrderAttachment.objects.all()
|
||||||
serializer_class = serializers.ReturnOrderAttachmentSerializer
|
serializer_class = serializers.ReturnOrderAttachmentSerializer
|
||||||
|
@ -207,6 +207,8 @@ class Order(
|
|||||||
responsible: User (or group) responsible for managing the order
|
responsible: User (or group) responsible for managing the order
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
REQUIRE_RESPONSIBLE_SETTING = None
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
"""Metaclass options. Abstract ensures no database table is created."""
|
"""Metaclass options. Abstract ensures no database table is created."""
|
||||||
|
|
||||||
@ -219,7 +221,7 @@ class Order(
|
|||||||
"""
|
"""
|
||||||
self.reference_int = self.rebuild_reference_field(self.reference)
|
self.reference_int = self.rebuild_reference_field(self.reference)
|
||||||
if not self.creation_date:
|
if not self.creation_date:
|
||||||
self.creation_date = datetime.now().date()
|
self.creation_date = InvenTree.helpers.current_date()
|
||||||
|
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
@ -227,6 +229,16 @@ class Order(
|
|||||||
"""Custom clean method for the generic order class."""
|
"""Custom clean method for the generic order class."""
|
||||||
super().clean()
|
super().clean()
|
||||||
|
|
||||||
|
# Check if a responsible owner is required for this order type
|
||||||
|
if self.REQUIRE_RESPONSIBLE_SETTING:
|
||||||
|
if common_models.InvenTreeSetting.get_setting(
|
||||||
|
self.REQUIRE_RESPONSIBLE_SETTING, backup_value=False
|
||||||
|
):
|
||||||
|
if not self.responsible:
|
||||||
|
raise ValidationError({
|
||||||
|
'responsible': _('Responsible user or group must be specified')
|
||||||
|
})
|
||||||
|
|
||||||
# Check that the referenced 'contact' matches the correct 'company'
|
# Check that the referenced 'contact' matches the correct 'company'
|
||||||
if self.company and self.contact:
|
if self.company and self.contact:
|
||||||
if self.contact.company != self.company:
|
if self.contact.company != self.company:
|
||||||
@ -240,7 +252,7 @@ class Order(
|
|||||||
|
|
||||||
It requires any subclasses to implement the get_status_class() class method
|
It requires any subclasses to implement the get_status_class() class method
|
||||||
"""
|
"""
|
||||||
today = datetime.now().date()
|
today = InvenTree.helpers.current_date()
|
||||||
return (
|
return (
|
||||||
Q(status__in=cls.get_status_class().OPEN)
|
Q(status__in=cls.get_status_class().OPEN)
|
||||||
& ~Q(target_date=None)
|
& ~Q(target_date=None)
|
||||||
@ -347,6 +359,9 @@ class PurchaseOrder(TotalPriceMixin, Order):
|
|||||||
target_date: Expected delivery target date for PurchaseOrder completion (optional)
|
target_date: Expected delivery target date for PurchaseOrder completion (optional)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
REFERENCE_PATTERN_SETTING = 'PURCHASEORDER_REFERENCE_PATTERN'
|
||||||
|
REQUIRE_RESPONSIBLE_SETTING = 'PURCHASEORDER_REQUIRE_RESPONSIBLE'
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
"""Get the 'web' URL for this order."""
|
"""Get the 'web' URL for this order."""
|
||||||
if settings.ENABLE_CLASSIC_FRONTEND:
|
if settings.ENABLE_CLASSIC_FRONTEND:
|
||||||
@ -372,9 +387,6 @@ class PurchaseOrder(TotalPriceMixin, Order):
|
|||||||
|
|
||||||
return defaults
|
return defaults
|
||||||
|
|
||||||
# Global setting for specifying reference pattern
|
|
||||||
REFERENCE_PATTERN_SETTING = 'PURCHASEORDER_REFERENCE_PATTERN'
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def filterByDate(queryset, min_date, max_date):
|
def filterByDate(queryset, min_date, max_date):
|
||||||
"""Filter by 'minimum and maximum date range'.
|
"""Filter by 'minimum and maximum date range'.
|
||||||
@ -572,7 +584,7 @@ class PurchaseOrder(TotalPriceMixin, Order):
|
|||||||
"""
|
"""
|
||||||
if self.is_pending:
|
if self.is_pending:
|
||||||
self.status = PurchaseOrderStatus.PLACED.value
|
self.status = PurchaseOrderStatus.PLACED.value
|
||||||
self.issue_date = datetime.now().date()
|
self.issue_date = InvenTree.helpers.current_date()
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
trigger_event('purchaseorder.placed', id=self.pk)
|
trigger_event('purchaseorder.placed', id=self.pk)
|
||||||
@ -592,7 +604,7 @@ class PurchaseOrder(TotalPriceMixin, Order):
|
|||||||
"""
|
"""
|
||||||
if self.status == PurchaseOrderStatus.PLACED:
|
if self.status == PurchaseOrderStatus.PLACED:
|
||||||
self.status = PurchaseOrderStatus.COMPLETE.value
|
self.status = PurchaseOrderStatus.COMPLETE.value
|
||||||
self.complete_date = datetime.now().date()
|
self.complete_date = InvenTree.helpers.current_date()
|
||||||
|
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@ -805,6 +817,9 @@ class PurchaseOrder(TotalPriceMixin, Order):
|
|||||||
class SalesOrder(TotalPriceMixin, Order):
|
class SalesOrder(TotalPriceMixin, Order):
|
||||||
"""A SalesOrder represents a list of goods shipped outwards to a customer."""
|
"""A SalesOrder represents a list of goods shipped outwards to a customer."""
|
||||||
|
|
||||||
|
REFERENCE_PATTERN_SETTING = 'SALESORDER_REFERENCE_PATTERN'
|
||||||
|
REQUIRE_RESPONSIBLE_SETTING = 'SALESORDER_REQUIRE_RESPONSIBLE'
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
"""Get the 'web' URL for this order."""
|
"""Get the 'web' URL for this order."""
|
||||||
if settings.ENABLE_CLASSIC_FRONTEND:
|
if settings.ENABLE_CLASSIC_FRONTEND:
|
||||||
@ -828,9 +843,6 @@ class SalesOrder(TotalPriceMixin, Order):
|
|||||||
|
|
||||||
return defaults
|
return defaults
|
||||||
|
|
||||||
# Global setting for specifying reference pattern
|
|
||||||
REFERENCE_PATTERN_SETTING = 'SALESORDER_REFERENCE_PATTERN'
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def filterByDate(queryset, min_date, max_date):
|
def filterByDate(queryset, min_date, max_date):
|
||||||
"""Filter by "minimum and maximum date range".
|
"""Filter by "minimum and maximum date range".
|
||||||
@ -1018,7 +1030,7 @@ class SalesOrder(TotalPriceMixin, Order):
|
|||||||
"""Change this order from 'PENDING' to 'IN_PROGRESS'."""
|
"""Change this order from 'PENDING' to 'IN_PROGRESS'."""
|
||||||
if self.status == SalesOrderStatus.PENDING:
|
if self.status == SalesOrderStatus.PENDING:
|
||||||
self.status = SalesOrderStatus.IN_PROGRESS.value
|
self.status = SalesOrderStatus.IN_PROGRESS.value
|
||||||
self.issue_date = datetime.now().date()
|
self.issue_date = InvenTree.helpers.current_date()
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
trigger_event('salesorder.issued', id=self.pk)
|
trigger_event('salesorder.issued', id=self.pk)
|
||||||
@ -1032,7 +1044,7 @@ class SalesOrder(TotalPriceMixin, Order):
|
|||||||
|
|
||||||
self.status = SalesOrderStatus.SHIPPED.value
|
self.status = SalesOrderStatus.SHIPPED.value
|
||||||
self.shipped_by = user
|
self.shipped_by = user
|
||||||
self.shipment_date = datetime.now()
|
self.shipment_date = InvenTree.helpers.current_date()
|
||||||
|
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@ -1334,7 +1346,7 @@ class PurchaseOrderLineItem(OrderLineItem):
|
|||||||
OVERDUE_FILTER = (
|
OVERDUE_FILTER = (
|
||||||
Q(received__lt=F('quantity'))
|
Q(received__lt=F('quantity'))
|
||||||
& ~Q(target_date=None)
|
& ~Q(target_date=None)
|
||||||
& Q(target_date__lt=datetime.now().date())
|
& Q(target_date__lt=InvenTree.helpers.current_date())
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -1493,7 +1505,7 @@ class SalesOrderLineItem(OrderLineItem):
|
|||||||
OVERDUE_FILTER = (
|
OVERDUE_FILTER = (
|
||||||
Q(shipped__lt=F('quantity'))
|
Q(shipped__lt=F('quantity'))
|
||||||
& ~Q(target_date=None)
|
& ~Q(target_date=None)
|
||||||
& Q(target_date__lt=datetime.now().date())
|
& Q(target_date__lt=InvenTree.helpers.current_date())
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -1736,7 +1748,9 @@ class SalesOrderShipment(
|
|||||||
allocation.complete_allocation(user)
|
allocation.complete_allocation(user)
|
||||||
|
|
||||||
# Update the "shipment" date
|
# Update the "shipment" date
|
||||||
self.shipment_date = kwargs.get('shipment_date', datetime.now())
|
self.shipment_date = kwargs.get(
|
||||||
|
'shipment_date', InvenTree.helpers.current_date()
|
||||||
|
)
|
||||||
self.shipped_by = user
|
self.shipped_by = user
|
||||||
|
|
||||||
# Was a tracking number provided?
|
# Was a tracking number provided?
|
||||||
@ -1943,6 +1957,9 @@ class ReturnOrder(TotalPriceMixin, Order):
|
|||||||
status: The status of the order (refer to status_codes.ReturnOrderStatus)
|
status: The status of the order (refer to status_codes.ReturnOrderStatus)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
REFERENCE_PATTERN_SETTING = 'RETURNORDER_REFERENCE_PATTERN'
|
||||||
|
REQUIRE_RESPONSIBLE_SETTING = 'RETURNORDER_REQUIRE_RESPONSIBLE'
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
"""Get the 'web' URL for this order."""
|
"""Get the 'web' URL for this order."""
|
||||||
if settings.ENABLE_CLASSIC_FRONTEND:
|
if settings.ENABLE_CLASSIC_FRONTEND:
|
||||||
@ -1968,8 +1985,6 @@ class ReturnOrder(TotalPriceMixin, Order):
|
|||||||
|
|
||||||
return defaults
|
return defaults
|
||||||
|
|
||||||
REFERENCE_PATTERN_SETTING = 'RETURNORDER_REFERENCE_PATTERN'
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""Render a string representation of this ReturnOrder."""
|
"""Render a string representation of this ReturnOrder."""
|
||||||
return f"{self.reference} - {self.customer.name if self.customer else _('no customer')}"
|
return f"{self.reference} - {self.customer.name if self.customer else _('no customer')}"
|
||||||
@ -2063,7 +2078,7 @@ class ReturnOrder(TotalPriceMixin, Order):
|
|||||||
"""Complete this ReturnOrder (if not already completed)."""
|
"""Complete this ReturnOrder (if not already completed)."""
|
||||||
if self.status == ReturnOrderStatus.IN_PROGRESS:
|
if self.status == ReturnOrderStatus.IN_PROGRESS:
|
||||||
self.status = ReturnOrderStatus.COMPLETE.value
|
self.status = ReturnOrderStatus.COMPLETE.value
|
||||||
self.complete_date = datetime.now().date()
|
self.complete_date = InvenTree.helpers.current_date()
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
trigger_event('returnorder.completed', id=self.pk)
|
trigger_event('returnorder.completed', id=self.pk)
|
||||||
@ -2076,7 +2091,7 @@ class ReturnOrder(TotalPriceMixin, Order):
|
|||||||
"""Issue this ReturnOrder (if currently pending)."""
|
"""Issue this ReturnOrder (if currently pending)."""
|
||||||
if self.status == ReturnOrderStatus.PENDING:
|
if self.status == ReturnOrderStatus.PENDING:
|
||||||
self.status = ReturnOrderStatus.IN_PROGRESS.value
|
self.status = ReturnOrderStatus.IN_PROGRESS.value
|
||||||
self.issue_date = datetime.now().date()
|
self.issue_date = InvenTree.helpers.current_date()
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
trigger_event('returnorder.issued', id=self.pk)
|
trigger_event('returnorder.issued', id=self.pk)
|
||||||
@ -2149,7 +2164,7 @@ class ReturnOrder(TotalPriceMixin, Order):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Update the LineItem
|
# Update the LineItem
|
||||||
line.received_date = datetime.now().date()
|
line.received_date = InvenTree.helpers.current_date()
|
||||||
line.save()
|
line.save()
|
||||||
|
|
||||||
trigger_event('returnorder.received', id=self.pk)
|
trigger_event('returnorder.received', id=self.pk)
|
||||||
|
@ -5,7 +5,16 @@ from decimal import Decimal
|
|||||||
|
|
||||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||||
from django.db import models, transaction
|
from django.db import models, transaction
|
||||||
from django.db.models import BooleanField, Case, ExpressionWrapper, F, Q, Value, When
|
from django.db.models import (
|
||||||
|
BooleanField,
|
||||||
|
Case,
|
||||||
|
ExpressionWrapper,
|
||||||
|
F,
|
||||||
|
Prefetch,
|
||||||
|
Q,
|
||||||
|
Value,
|
||||||
|
When,
|
||||||
|
)
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
@ -14,6 +23,8 @@ from sql_util.utils import SubqueryCount
|
|||||||
|
|
||||||
import order.models
|
import order.models
|
||||||
import part.filters
|
import part.filters
|
||||||
|
import part.filters as part_filters
|
||||||
|
import part.models as part_models
|
||||||
import stock.models
|
import stock.models
|
||||||
import stock.serializers
|
import stock.serializers
|
||||||
from common.serializers import ProjectCodeSerializer
|
from common.serializers import ProjectCodeSerializer
|
||||||
@ -23,7 +34,13 @@ from company.serializers import (
|
|||||||
ContactSerializer,
|
ContactSerializer,
|
||||||
SupplierPartSerializer,
|
SupplierPartSerializer,
|
||||||
)
|
)
|
||||||
from InvenTree.helpers import extract_serial_numbers, hash_barcode, normalize, str2bool
|
from InvenTree.helpers import (
|
||||||
|
current_date,
|
||||||
|
extract_serial_numbers,
|
||||||
|
hash_barcode,
|
||||||
|
normalize,
|
||||||
|
str2bool,
|
||||||
|
)
|
||||||
from InvenTree.serializers import (
|
from InvenTree.serializers import (
|
||||||
InvenTreeAttachmentSerializer,
|
InvenTreeAttachmentSerializer,
|
||||||
InvenTreeCurrencySerializer,
|
InvenTreeCurrencySerializer,
|
||||||
@ -375,6 +392,17 @@ class PurchaseOrderLineItemSerializer(InvenTreeModelSerializer):
|
|||||||
- "total_price" = purchase_price * quantity
|
- "total_price" = purchase_price * quantity
|
||||||
- "overdue" status (boolean field)
|
- "overdue" status (boolean field)
|
||||||
"""
|
"""
|
||||||
|
queryset = queryset.prefetch_related(
|
||||||
|
Prefetch(
|
||||||
|
'part__part',
|
||||||
|
queryset=part_models.Part.objects.annotate(
|
||||||
|
category_default_location=part_filters.annotate_default_location(
|
||||||
|
'category__'
|
||||||
|
)
|
||||||
|
).prefetch_related(None),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
queryset = queryset.annotate(
|
queryset = queryset.annotate(
|
||||||
total_price=ExpressionWrapper(
|
total_price=ExpressionWrapper(
|
||||||
F('purchase_price') * F('quantity'), output_field=models.DecimalField()
|
F('purchase_price') * F('quantity'), output_field=models.DecimalField()
|
||||||
@ -1118,11 +1146,12 @@ class SalesOrderShipmentCompleteSerializer(serializers.ModelSerializer):
|
|||||||
user = request.user
|
user = request.user
|
||||||
|
|
||||||
# Extract shipping date (defaults to today's date)
|
# Extract shipping date (defaults to today's date)
|
||||||
shipment_date = data.get('shipment_date', datetime.now())
|
now = current_date()
|
||||||
|
shipment_date = data.get('shipment_date', now)
|
||||||
if shipment_date is None:
|
if shipment_date is None:
|
||||||
# Shipment date should not be None - check above only
|
# Shipment date should not be None - check above only
|
||||||
# checks if shipment_date exists in data
|
# checks if shipment_date exists in data
|
||||||
shipment_date = datetime.now()
|
shipment_date = now
|
||||||
|
|
||||||
shipment.complete_shipment(
|
shipment.complete_shipment(
|
||||||
user,
|
user,
|
||||||
|
@ -13,6 +13,7 @@ from djmoney.money import Money
|
|||||||
from icalendar import Calendar
|
from icalendar import Calendar
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
|
||||||
|
from common.models import InvenTreeSetting
|
||||||
from common.settings import currency_codes
|
from common.settings import currency_codes
|
||||||
from company.models import Company, SupplierPart, SupplierPriceBreak
|
from company.models import Company, SupplierPart, SupplierPriceBreak
|
||||||
from InvenTree.status_codes import (
|
from InvenTree.status_codes import (
|
||||||
@ -27,6 +28,7 @@ from InvenTree.unit_test import InvenTreeAPITestCase
|
|||||||
from order import models
|
from order import models
|
||||||
from part.models import Part
|
from part.models import Part
|
||||||
from stock.models import StockItem
|
from stock.models import StockItem
|
||||||
|
from users.models import Owner
|
||||||
|
|
||||||
|
|
||||||
class OrderTest(InvenTreeAPITestCase):
|
class OrderTest(InvenTreeAPITestCase):
|
||||||
@ -347,15 +349,35 @@ class PurchaseOrderTest(OrderTest):
|
|||||||
"""Test that we can create a new PurchaseOrder via the API."""
|
"""Test that we can create a new PurchaseOrder via the API."""
|
||||||
self.assignRole('purchase_order.add')
|
self.assignRole('purchase_order.add')
|
||||||
|
|
||||||
self.post(
|
setting = 'PURCHASEORDER_REQUIRE_RESPONSIBLE'
|
||||||
reverse('api-po-list'),
|
url = reverse('api-po-list')
|
||||||
{
|
|
||||||
|
InvenTreeSetting.set_setting(setting, False)
|
||||||
|
|
||||||
|
data = {
|
||||||
'reference': 'PO-12345678',
|
'reference': 'PO-12345678',
|
||||||
'supplier': 1,
|
'supplier': 1,
|
||||||
'description': 'A test purchase order',
|
'description': 'A test purchase order',
|
||||||
},
|
}
|
||||||
expected_code=201,
|
|
||||||
)
|
self.post(url, data, expected_code=201)
|
||||||
|
|
||||||
|
# Check the 'responsible required' field
|
||||||
|
InvenTreeSetting.set_setting(setting, True)
|
||||||
|
|
||||||
|
data['reference'] = 'PO-12345679'
|
||||||
|
data['responsible'] = None
|
||||||
|
|
||||||
|
response = self.post(url, data, expected_code=400)
|
||||||
|
|
||||||
|
self.assertIn('Responsible user or group must be specified', str(response.data))
|
||||||
|
|
||||||
|
data['responsible'] = Owner.objects.first().pk
|
||||||
|
|
||||||
|
response = self.post(url, data, expected_code=201)
|
||||||
|
|
||||||
|
# Revert the setting to previous value
|
||||||
|
InvenTreeSetting.set_setting(setting, False)
|
||||||
|
|
||||||
def test_po_creation_date(self):
|
def test_po_creation_date(self):
|
||||||
"""Test that we can create set the creation_date field of PurchaseOrder via the API."""
|
"""Test that we can create set the creation_date field of PurchaseOrder via the API."""
|
||||||
|
@ -10,6 +10,8 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
|
|
||||||
from django_filters import rest_framework as rest_filters
|
from django_filters import rest_framework as rest_filters
|
||||||
from django_filters.rest_framework import DjangoFilterBackend
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
|
from drf_spectacular.types import OpenApiTypes
|
||||||
|
from drf_spectacular.utils import extend_schema_field
|
||||||
from rest_framework import permissions, serializers, status
|
from rest_framework import permissions, serializers, status
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
@ -214,6 +216,7 @@ class CategoryFilter(rest_filters.FilterSet):
|
|||||||
help_text=_('Exclude sub-categories under the specified category'),
|
help_text=_('Exclude sub-categories under the specified category'),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@extend_schema_field(OpenApiTypes.INT)
|
||||||
def filter_exclude_tree(self, queryset, name, value):
|
def filter_exclude_tree(self, queryset, name, value):
|
||||||
"""Exclude all sub-categories under the specified category."""
|
"""Exclude all sub-categories under the specified category."""
|
||||||
# Exclude the specified category
|
# Exclude the specified category
|
||||||
@ -406,7 +409,7 @@ class PartInternalPriceList(ListCreateAPI):
|
|||||||
|
|
||||||
|
|
||||||
class PartAttachmentList(AttachmentMixin, ListCreateDestroyAPIView):
|
class PartAttachmentList(AttachmentMixin, ListCreateDestroyAPIView):
|
||||||
"""API endpoint for listing (and creating) a PartAttachment (file upload)."""
|
"""API endpoint for listing, creating and bulk deleting a PartAttachment (file upload)."""
|
||||||
|
|
||||||
queryset = PartAttachment.objects.all()
|
queryset = PartAttachment.objects.all()
|
||||||
serializer_class = part_serializers.PartAttachmentSerializer
|
serializer_class = part_serializers.PartAttachmentSerializer
|
||||||
@ -1003,6 +1006,7 @@ class PartFilter(rest_filters.FilterSet):
|
|||||||
method='filter_convert_from',
|
method='filter_convert_from',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@extend_schema_field(OpenApiTypes.INT)
|
||||||
def filter_convert_from(self, queryset, name, part):
|
def filter_convert_from(self, queryset, name, part):
|
||||||
"""Limit the queryset to valid conversion options for the specified part."""
|
"""Limit the queryset to valid conversion options for the specified part."""
|
||||||
conversion_options = part.get_conversion_options()
|
conversion_options = part.get_conversion_options()
|
||||||
@ -1017,6 +1021,7 @@ class PartFilter(rest_filters.FilterSet):
|
|||||||
method='filter_exclude_tree',
|
method='filter_exclude_tree',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@extend_schema_field(OpenApiTypes.INT)
|
||||||
def filter_exclude_tree(self, queryset, name, part):
|
def filter_exclude_tree(self, queryset, name, part):
|
||||||
"""Exclude all parts and variants 'down' from the specified part from the queryset."""
|
"""Exclude all parts and variants 'down' from the specified part from the queryset."""
|
||||||
children = part.get_descendants(include_self=True)
|
children = part.get_descendants(include_self=True)
|
||||||
@ -1027,6 +1032,7 @@ class PartFilter(rest_filters.FilterSet):
|
|||||||
label='Ancestor', queryset=Part.objects.all(), method='filter_ancestor'
|
label='Ancestor', queryset=Part.objects.all(), method='filter_ancestor'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@extend_schema_field(OpenApiTypes.INT)
|
||||||
def filter_ancestor(self, queryset, name, part):
|
def filter_ancestor(self, queryset, name, part):
|
||||||
"""Limit queryset to descendants of the specified ancestor part."""
|
"""Limit queryset to descendants of the specified ancestor part."""
|
||||||
descendants = part.get_descendants(include_self=False)
|
descendants = part.get_descendants(include_self=False)
|
||||||
@ -1044,6 +1050,7 @@ class PartFilter(rest_filters.FilterSet):
|
|||||||
label='In BOM Of', queryset=Part.objects.all(), method='filter_in_bom'
|
label='In BOM Of', queryset=Part.objects.all(), method='filter_in_bom'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@extend_schema_field(OpenApiTypes.INT)
|
||||||
def filter_in_bom(self, queryset, name, part):
|
def filter_in_bom(self, queryset, name, part):
|
||||||
"""Limit queryset to parts in the BOM for the specified part."""
|
"""Limit queryset to parts in the BOM for the specified part."""
|
||||||
bom_parts = part.get_parts_in_bom()
|
bom_parts = part.get_parts_in_bom()
|
||||||
@ -1528,6 +1535,7 @@ class PartParameterTemplateFilter(rest_filters.FilterSet):
|
|||||||
queryset=Part.objects.all(), method='filter_part', label=_('Part')
|
queryset=Part.objects.all(), method='filter_part', label=_('Part')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@extend_schema_field(OpenApiTypes.INT)
|
||||||
def filter_part(self, queryset, name, part):
|
def filter_part(self, queryset, name, part):
|
||||||
"""Filter queryset to include only PartParameterTemplates which are referenced by a part."""
|
"""Filter queryset to include only PartParameterTemplates which are referenced by a part."""
|
||||||
parameters = PartParameter.objects.filter(part=part)
|
parameters = PartParameter.objects.filter(part=part)
|
||||||
@ -1541,6 +1549,7 @@ class PartParameterTemplateFilter(rest_filters.FilterSet):
|
|||||||
label=_('Category'),
|
label=_('Category'),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@extend_schema_field(OpenApiTypes.INT)
|
||||||
def filter_category(self, queryset, name, category):
|
def filter_category(self, queryset, name, category):
|
||||||
"""Filter queryset to include only PartParameterTemplates which are referenced by parts in this category."""
|
"""Filter queryset to include only PartParameterTemplates which are referenced by parts in this category."""
|
||||||
cats = category.get_descendants(include_self=True)
|
cats = category.get_descendants(include_self=True)
|
||||||
@ -1828,6 +1837,7 @@ class BomFilter(rest_filters.FilterSet):
|
|||||||
queryset=Part.objects.all(), method='filter_uses', label=_('Uses')
|
queryset=Part.objects.all(), method='filter_uses', label=_('Uses')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@extend_schema_field(OpenApiTypes.INT)
|
||||||
def filter_uses(self, queryset, name, part):
|
def filter_uses(self, queryset, name, part):
|
||||||
"""Filter the queryset based on the specified part."""
|
"""Filter the queryset based on the specified part."""
|
||||||
return queryset.filter(part.get_used_in_bom_item_filter())
|
return queryset.filter(part.get_used_in_bom_item_filter())
|
||||||
|
@ -287,6 +287,32 @@ def annotate_category_parts():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def annotate_default_location(reference=''):
|
||||||
|
"""Construct a queryset that finds the closest default location in the part's category tree.
|
||||||
|
|
||||||
|
If the part's category has its own default_location, this is returned.
|
||||||
|
If not, the category tree is traversed until a value is found.
|
||||||
|
"""
|
||||||
|
subquery = part.models.PartCategory.objects.filter(
|
||||||
|
tree_id=OuterRef(f'{reference}tree_id'),
|
||||||
|
lft__lt=OuterRef(f'{reference}lft'),
|
||||||
|
rght__gt=OuterRef(f'{reference}rght'),
|
||||||
|
level__lte=OuterRef(f'{reference}level'),
|
||||||
|
parent__isnull=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Coalesce(
|
||||||
|
F(f'{reference}default_location'),
|
||||||
|
Subquery(
|
||||||
|
subquery.order_by('-level')
|
||||||
|
.filter(default_location__isnull=False)
|
||||||
|
.values('default_location')
|
||||||
|
),
|
||||||
|
Value(None),
|
||||||
|
output_field=IntegerField(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def annotate_sub_categories():
|
def annotate_sub_categories():
|
||||||
"""Construct a queryset annotation which returns the number of subcategories for each provided category."""
|
"""Construct a queryset annotation which returns the number of subcategories for each provided category."""
|
||||||
subquery = part.models.PartCategory.objects.filter(
|
subquery = part.models.PartCategory.objects.filter(
|
||||||
|
@ -25,7 +25,9 @@ def compile_full_name_template(*args, **kwargs):
|
|||||||
global _part_full_name_template
|
global _part_full_name_template
|
||||||
global _part_full_name_template_string
|
global _part_full_name_template_string
|
||||||
|
|
||||||
template_string = InvenTreeSetting.get_setting('PART_NAME_FORMAT', '')
|
template_string = InvenTreeSetting.get_setting(
|
||||||
|
'PART_NAME_FORMAT', backup_value='', cache=True
|
||||||
|
)
|
||||||
|
|
||||||
# Skip if the template string has not changed
|
# Skip if the template string has not changed
|
||||||
if (
|
if (
|
||||||
|
@ -138,6 +138,8 @@ def update_parameter_values(apps, schema_editor):
|
|||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
atomic = False
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('part', '0108_auto_20230516_1334'),
|
('part', '0108_auto_20230516_1334'),
|
||||||
]
|
]
|
||||||
|
@ -37,6 +37,7 @@ import common.models
|
|||||||
import common.settings
|
import common.settings
|
||||||
import InvenTree.conversion
|
import InvenTree.conversion
|
||||||
import InvenTree.fields
|
import InvenTree.fields
|
||||||
|
import InvenTree.helpers
|
||||||
import InvenTree.models
|
import InvenTree.models
|
||||||
import InvenTree.ready
|
import InvenTree.ready
|
||||||
import InvenTree.tasks
|
import InvenTree.tasks
|
||||||
@ -1728,7 +1729,7 @@ class Part(
|
|||||||
|
|
||||||
self.bom_checksum = self.get_bom_hash()
|
self.bom_checksum = self.get_bom_hash()
|
||||||
self.bom_checked_by = user
|
self.bom_checked_by = user
|
||||||
self.bom_checked_date = datetime.now().date()
|
self.bom_checked_date = InvenTree.helpers.current_date()
|
||||||
|
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@ -2715,7 +2716,7 @@ class PartPricing(common.models.MetaMixin):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if days > 0:
|
if days > 0:
|
||||||
date_threshold = datetime.now().date() - timedelta(days=days)
|
date_threshold = InvenTree.helpers.current_date() - timedelta(days=days)
|
||||||
items = items.filter(updated__gte=date_threshold)
|
items = items.filter(updated__gte=date_threshold)
|
||||||
|
|
||||||
for item in items:
|
for item in items:
|
||||||
@ -3428,6 +3429,13 @@ class PartTestTemplate(InvenTree.models.InvenTreeMetadataModel):
|
|||||||
|
|
||||||
self.key = helpers.generateTestKey(self.test_name)
|
self.key = helpers.generateTestKey(self.test_name)
|
||||||
|
|
||||||
|
if len(self.key) == 0:
|
||||||
|
raise ValidationError({
|
||||||
|
'test_name': _(
|
||||||
|
'Invalid template name - must include at least one alphanumeric character'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
self.validate_unique()
|
self.validate_unique()
|
||||||
super().clean()
|
super().clean()
|
||||||
|
|
||||||
@ -3445,7 +3453,9 @@ class PartTestTemplate(InvenTree.models.InvenTreeMetadataModel):
|
|||||||
|
|
||||||
if tests.exists():
|
if tests.exists():
|
||||||
raise ValidationError({
|
raise ValidationError({
|
||||||
'test_name': _('Test with this name already exists for this part')
|
'test_name': _(
|
||||||
|
'Test template with the same key already exists for part'
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
super().validate_unique(exclude)
|
super().validate_unique(exclude)
|
||||||
@ -3824,6 +3834,28 @@ class PartCategoryParameterTemplate(InvenTree.models.InvenTreeMetadataModel):
|
|||||||
return f'{self.category.name} | {self.parameter_template.name} | {self.default_value}'
|
return f'{self.category.name} | {self.parameter_template.name} | {self.default_value}'
|
||||||
return f'{self.category.name} | {self.parameter_template.name}'
|
return f'{self.category.name} | {self.parameter_template.name}'
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
"""Validate this PartCategoryParameterTemplate instance.
|
||||||
|
|
||||||
|
Checks the provided 'default_value', and (if not blank), ensure it is valid.
|
||||||
|
"""
|
||||||
|
super().clean()
|
||||||
|
|
||||||
|
self.default_value = (
|
||||||
|
'' if self.default_value is None else str(self.default_value.strip())
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.default_value and InvenTreeSetting.get_setting(
|
||||||
|
'PART_PARAMETER_ENFORCE_UNITS', True, cache=False, create=False
|
||||||
|
):
|
||||||
|
if self.parameter_template.units:
|
||||||
|
try:
|
||||||
|
InvenTree.conversion.convert_physical_value(
|
||||||
|
self.default_value, self.parameter_template.units
|
||||||
|
)
|
||||||
|
except ValidationError as e:
|
||||||
|
raise ValidationError({'default_value': e.message})
|
||||||
|
|
||||||
category = models.ForeignKey(
|
category = models.ForeignKey(
|
||||||
PartCategory,
|
PartCategory,
|
||||||
on_delete=models.CASCADE,
|
on_delete=models.CASCADE,
|
||||||
|
@ -74,12 +74,14 @@ class CategorySerializer(InvenTree.serializers.InvenTreeModelSerializer):
|
|||||||
'level',
|
'level',
|
||||||
'parent',
|
'parent',
|
||||||
'part_count',
|
'part_count',
|
||||||
|
'subcategories',
|
||||||
'pathstring',
|
'pathstring',
|
||||||
'path',
|
'path',
|
||||||
'starred',
|
'starred',
|
||||||
'url',
|
'url',
|
||||||
'structural',
|
'structural',
|
||||||
'icon',
|
'icon',
|
||||||
|
'parent_default_location',
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
@ -99,13 +101,22 @@ class CategorySerializer(InvenTree.serializers.InvenTreeModelSerializer):
|
|||||||
def annotate_queryset(queryset):
|
def annotate_queryset(queryset):
|
||||||
"""Annotate extra information to the queryset."""
|
"""Annotate extra information to the queryset."""
|
||||||
# Annotate the number of 'parts' which exist in each category (including subcategories!)
|
# Annotate the number of 'parts' which exist in each category (including subcategories!)
|
||||||
queryset = queryset.annotate(part_count=part.filters.annotate_category_parts())
|
queryset = queryset.annotate(
|
||||||
|
part_count=part.filters.annotate_category_parts(),
|
||||||
|
subcategories=part.filters.annotate_sub_categories(),
|
||||||
|
)
|
||||||
|
|
||||||
|
queryset = queryset.annotate(
|
||||||
|
parent_default_location=part.filters.annotate_default_location('parent__')
|
||||||
|
)
|
||||||
|
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
url = serializers.CharField(source='get_absolute_url', read_only=True)
|
url = serializers.CharField(source='get_absolute_url', read_only=True)
|
||||||
|
|
||||||
part_count = serializers.IntegerField(read_only=True)
|
part_count = serializers.IntegerField(read_only=True, label=_('Parts'))
|
||||||
|
|
||||||
|
subcategories = serializers.IntegerField(read_only=True, label=_('Subcategories'))
|
||||||
|
|
||||||
level = serializers.IntegerField(read_only=True)
|
level = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
@ -115,6 +126,8 @@ class CategorySerializer(InvenTree.serializers.InvenTreeModelSerializer):
|
|||||||
child=serializers.DictField(), source='get_path', read_only=True
|
child=serializers.DictField(), source='get_path', read_only=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
parent_default_location = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
|
||||||
class CategoryTree(InvenTree.serializers.InvenTreeModelSerializer):
|
class CategoryTree(InvenTree.serializers.InvenTreeModelSerializer):
|
||||||
"""Serializer for PartCategory tree."""
|
"""Serializer for PartCategory tree."""
|
||||||
@ -277,11 +290,13 @@ class PartBriefSerializer(InvenTree.serializers.InvenTreeModelSerializer):
|
|||||||
'pk',
|
'pk',
|
||||||
'IPN',
|
'IPN',
|
||||||
'barcode_hash',
|
'barcode_hash',
|
||||||
|
'category_default_location',
|
||||||
'default_location',
|
'default_location',
|
||||||
'name',
|
'name',
|
||||||
'revision',
|
'revision',
|
||||||
'full_name',
|
'full_name',
|
||||||
'description',
|
'description',
|
||||||
|
'image',
|
||||||
'thumbnail',
|
'thumbnail',
|
||||||
'active',
|
'active',
|
||||||
'assembly',
|
'assembly',
|
||||||
@ -307,6 +322,9 @@ class PartBriefSerializer(InvenTree.serializers.InvenTreeModelSerializer):
|
|||||||
self.fields.pop('pricing_min')
|
self.fields.pop('pricing_min')
|
||||||
self.fields.pop('pricing_max')
|
self.fields.pop('pricing_max')
|
||||||
|
|
||||||
|
category_default_location = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
image = InvenTree.serializers.InvenTreeImageSerializerField(read_only=True)
|
||||||
thumbnail = serializers.CharField(source='get_thumbnail_url', read_only=True)
|
thumbnail = serializers.CharField(source='get_thumbnail_url', read_only=True)
|
||||||
|
|
||||||
# Pricing fields
|
# Pricing fields
|
||||||
@ -603,6 +621,7 @@ class PartSerializer(
|
|||||||
'allocated_to_build_orders',
|
'allocated_to_build_orders',
|
||||||
'allocated_to_sales_orders',
|
'allocated_to_sales_orders',
|
||||||
'building',
|
'building',
|
||||||
|
'category_default_location',
|
||||||
'in_stock',
|
'in_stock',
|
||||||
'ordering',
|
'ordering',
|
||||||
'required_for_build_orders',
|
'required_for_build_orders',
|
||||||
@ -758,6 +777,12 @@ class PartSerializer(
|
|||||||
required_for_sales_orders=part.filters.annotate_sales_order_requirements(),
|
required_for_sales_orders=part.filters.annotate_sales_order_requirements(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
queryset = queryset.annotate(
|
||||||
|
category_default_location=part.filters.annotate_default_location(
|
||||||
|
'category__'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
def get_starred(self, part) -> bool:
|
def get_starred(self, part) -> bool:
|
||||||
@ -797,6 +822,7 @@ class PartSerializer(
|
|||||||
unallocated_stock = serializers.FloatField(
|
unallocated_stock = serializers.FloatField(
|
||||||
read_only=True, label=_('Unallocated Stock')
|
read_only=True, label=_('Unallocated Stock')
|
||||||
)
|
)
|
||||||
|
category_default_location = serializers.IntegerField(read_only=True)
|
||||||
variant_stock = serializers.FloatField(read_only=True, label=_('Variant Stock'))
|
variant_stock = serializers.FloatField(read_only=True, label=_('Variant Stock'))
|
||||||
|
|
||||||
minimum_stock = serializers.FloatField()
|
minimum_stock = serializers.FloatField()
|
||||||
|
@ -266,7 +266,7 @@ def generate_stocktake_report(**kwargs):
|
|||||||
buffer = io.StringIO()
|
buffer = io.StringIO()
|
||||||
buffer.write(dataset.export('csv'))
|
buffer.write(dataset.export('csv'))
|
||||||
|
|
||||||
today = datetime.now().date().isoformat()
|
today = InvenTree.helpers.current_date().isoformat()
|
||||||
filename = f'InvenTree_Stocktake_{today}.csv'
|
filename = f'InvenTree_Stocktake_{today}.csv'
|
||||||
report_file = ContentFile(buffer.getvalue(), name=filename)
|
report_file = ContentFile(buffer.getvalue(), name=filename)
|
||||||
|
|
||||||
|
@ -196,6 +196,11 @@ class PartCategoryAPITest(InvenTreeAPITestCase):
|
|||||||
# Add some more category templates via the API
|
# Add some more category templates via the API
|
||||||
n = PartParameterTemplate.objects.count()
|
n = PartParameterTemplate.objects.count()
|
||||||
|
|
||||||
|
# Ensure validation of parameter values is disabled for these checks
|
||||||
|
InvenTreeSetting.set_setting(
|
||||||
|
'PART_PARAMETER_ENFORCE_UNITS', False, change_user=None
|
||||||
|
)
|
||||||
|
|
||||||
for template in PartParameterTemplate.objects.all():
|
for template in PartParameterTemplate.objects.all():
|
||||||
response = self.post(
|
response = self.post(
|
||||||
url,
|
url,
|
||||||
@ -486,7 +491,7 @@ class PartCategoryAPITest(InvenTreeAPITestCase):
|
|||||||
|
|
||||||
PartCategory.objects.rebuild()
|
PartCategory.objects.rebuild()
|
||||||
|
|
||||||
with self.assertNumQueriesLessThan(10):
|
with self.assertNumQueriesLessThan(12):
|
||||||
response = self.get(reverse('api-part-category-tree'), expected_code=200)
|
response = self.get(reverse('api-part-category-tree'), expected_code=200)
|
||||||
|
|
||||||
self.assertEqual(len(response.data), PartCategory.objects.count())
|
self.assertEqual(len(response.data), PartCategory.objects.count())
|
||||||
|
@ -6,6 +6,7 @@ from django.conf import settings
|
|||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
from django.test.utils import override_settings
|
||||||
|
|
||||||
from allauth.account.models import EmailAddress
|
from allauth.account.models import EmailAddress
|
||||||
|
|
||||||
@ -63,6 +64,7 @@ class TemplateTagTest(InvenTreeTestCase):
|
|||||||
"""Test the 'instance name' setting."""
|
"""Test the 'instance name' setting."""
|
||||||
self.assertEqual(inventree_extras.inventree_instance_name(), 'InvenTree')
|
self.assertEqual(inventree_extras.inventree_instance_name(), 'InvenTree')
|
||||||
|
|
||||||
|
@override_settings(SITE_URL=None)
|
||||||
def test_inventree_base_url(self):
|
def test_inventree_base_url(self):
|
||||||
"""Test that the base URL tag returns correctly."""
|
"""Test that the base URL tag returns correctly."""
|
||||||
self.assertEqual(inventree_extras.inventree_base_url(), '')
|
self.assertEqual(inventree_extras.inventree_base_url(), '')
|
||||||
@ -431,6 +433,29 @@ class TestTemplateTest(TestCase):
|
|||||||
|
|
||||||
self.assertEqual(variant.getTestTemplates().count(), n + 1)
|
self.assertEqual(variant.getTestTemplates().count(), n + 1)
|
||||||
|
|
||||||
|
def test_key_generation(self):
|
||||||
|
"""Test the key generation method."""
|
||||||
|
variant = Part.objects.get(pk=10004)
|
||||||
|
|
||||||
|
invalid_names = ['', '+', '+++++++', ' ', '<>$&&&']
|
||||||
|
|
||||||
|
for name in invalid_names:
|
||||||
|
template = PartTestTemplate(part=variant, test_name=name)
|
||||||
|
with self.assertRaises(ValidationError):
|
||||||
|
template.clean()
|
||||||
|
|
||||||
|
valid_names = [
|
||||||
|
'Собранный щит',
|
||||||
|
'!! 123 Собранный щит <><><> $$$$$ !!!',
|
||||||
|
'----hello world----',
|
||||||
|
'Olá Mundo',
|
||||||
|
'我不懂中文',
|
||||||
|
]
|
||||||
|
|
||||||
|
for name in valid_names:
|
||||||
|
template = PartTestTemplate(part=variant, test_name=name)
|
||||||
|
template.clean()
|
||||||
|
|
||||||
|
|
||||||
class PartSettingsTest(InvenTreeTestCase):
|
class PartSettingsTest(InvenTreeTestCase):
|
||||||
"""Tests to ensure that the user-configurable default values work as expected.
|
"""Tests to ensure that the user-configurable default values work as expected.
|
||||||
|
@ -466,7 +466,6 @@ plugin_api_urls = [
|
|||||||
# Plugin management
|
# Plugin management
|
||||||
path('reload/', PluginReload.as_view(), name='api-plugin-reload'),
|
path('reload/', PluginReload.as_view(), name='api-plugin-reload'),
|
||||||
path('install/', PluginInstall.as_view(), name='api-plugin-install'),
|
path('install/', PluginInstall.as_view(), name='api-plugin-install'),
|
||||||
path('activate/', PluginActivate.as_view(), name='api-plugin-activate'),
|
|
||||||
# Registry status
|
# Registry status
|
||||||
path(
|
path(
|
||||||
'status/',
|
'status/',
|
||||||
|
@ -10,7 +10,7 @@ from django.apps import AppConfig
|
|||||||
|
|
||||||
from maintenance_mode.core import set_maintenance_mode
|
from maintenance_mode.core import set_maintenance_mode
|
||||||
|
|
||||||
from InvenTree.ready import canAppAccessDatabase, isInMainThread
|
from InvenTree.ready import canAppAccessDatabase, isInMainThread, isInWorkerThread
|
||||||
from plugin import registry
|
from plugin import registry
|
||||||
|
|
||||||
logger = logging.getLogger('inventree')
|
logger = logging.getLogger('inventree')
|
||||||
@ -24,7 +24,8 @@ class PluginAppConfig(AppConfig):
|
|||||||
def ready(self):
|
def ready(self):
|
||||||
"""The ready method is extended to initialize plugins."""
|
"""The ready method is extended to initialize plugins."""
|
||||||
# skip loading if we run in a background thread
|
# skip loading if we run in a background thread
|
||||||
if not isInMainThread():
|
|
||||||
|
if not isInMainThread() and not isInWorkerThread():
|
||||||
return
|
return
|
||||||
|
|
||||||
if not canAppAccessDatabase(
|
if not canAppAccessDatabase(
|
||||||
|
@ -117,7 +117,7 @@ def allow_table_event(table_name):
|
|||||||
return False # pragma: no cover
|
return False # pragma: no cover
|
||||||
|
|
||||||
# Prevent table events when in testing mode (saves a lot of time)
|
# Prevent table events when in testing mode (saves a lot of time)
|
||||||
if settings.TESTING:
|
if settings.TESTING and not settings.TESTING_TABLE_EVENTS:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
table_name = table_name.lower().strip()
|
table_name = table_name.lower().strip()
|
||||||
|
@ -112,7 +112,7 @@ class ScheduleMixin:
|
|||||||
@property
|
@property
|
||||||
def has_scheduled_tasks(self):
|
def has_scheduled_tasks(self):
|
||||||
"""Are tasks defined for this plugin."""
|
"""Are tasks defined for this plugin."""
|
||||||
return bool(self.scheduled_tasks)
|
return bool(self.get_scheduled_tasks())
|
||||||
|
|
||||||
def validate_scheduled_tasks(self):
|
def validate_scheduled_tasks(self):
|
||||||
"""Check that the provided scheduled tasks are valid."""
|
"""Check that the provided scheduled tasks are valid."""
|
||||||
|
@ -138,7 +138,13 @@ class MixinBase:
|
|||||||
if fnc_name is True:
|
if fnc_name is True:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return getattr(self, fnc_name, True)
|
attr = getattr(self, fnc_name, True)
|
||||||
|
|
||||||
|
if callable(attr):
|
||||||
|
return attr()
|
||||||
|
else:
|
||||||
|
return attr
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def add_mixin(self, key: str, fnc_enabled=True, cls=None):
|
def add_mixin(self, key: str, fnc_enabled=True, cls=None):
|
||||||
|
@ -90,12 +90,19 @@ class PluginDetailAPITest(PluginMixin, InvenTreeAPITestCase):
|
|||||||
def test_plugin_activate(self):
|
def test_plugin_activate(self):
|
||||||
"""Test the plugin activate."""
|
"""Test the plugin activate."""
|
||||||
test_plg = self.plugin_confs.first()
|
test_plg = self.plugin_confs.first()
|
||||||
|
assert test_plg is not None
|
||||||
|
|
||||||
def assert_plugin_active(self, active):
|
def assert_plugin_active(self, active):
|
||||||
self.assertEqual(PluginConfig.objects.all().first().active, active)
|
plgs = PluginConfig.objects.all().first()
|
||||||
|
assert plgs is not None
|
||||||
|
self.assertEqual(plgs.active, active)
|
||||||
|
|
||||||
# Should not work - not a superuser
|
# Should not work - not a superuser
|
||||||
response = self.client.post(reverse('api-plugin-activate'), {}, follow=True)
|
response = self.client.post(
|
||||||
|
reverse('api-plugin-detail-activate', kwargs={'pk': test_plg.pk}),
|
||||||
|
{},
|
||||||
|
follow=True,
|
||||||
|
)
|
||||||
self.assertEqual(response.status_code, 403)
|
self.assertEqual(response.status_code, 403)
|
||||||
|
|
||||||
# Make user superuser
|
# Make user superuser
|
||||||
@ -109,7 +116,7 @@ class PluginDetailAPITest(PluginMixin, InvenTreeAPITestCase):
|
|||||||
# Activate plugin with detail url
|
# Activate plugin with detail url
|
||||||
assert_plugin_active(self, False)
|
assert_plugin_active(self, False)
|
||||||
response = self.client.patch(
|
response = self.client.patch(
|
||||||
reverse('api-plugin-detail-activate', kwargs={'pk': test_plg.id}),
|
reverse('api-plugin-detail-activate', kwargs={'pk': test_plg.pk}),
|
||||||
{},
|
{},
|
||||||
follow=True,
|
follow=True,
|
||||||
)
|
)
|
||||||
@ -123,7 +130,9 @@ class PluginDetailAPITest(PluginMixin, InvenTreeAPITestCase):
|
|||||||
# Activate plugin
|
# Activate plugin
|
||||||
assert_plugin_active(self, False)
|
assert_plugin_active(self, False)
|
||||||
response = self.client.patch(
|
response = self.client.patch(
|
||||||
reverse('api-plugin-activate'), {'pk': test_plg.pk}, follow=True
|
reverse('api-plugin-detail-activate', kwargs={'pk': test_plg.pk}),
|
||||||
|
{},
|
||||||
|
follow=True,
|
||||||
)
|
)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
assert_plugin_active(self, True)
|
assert_plugin_active(self, True)
|
||||||
@ -133,6 +142,8 @@ class PluginDetailAPITest(PluginMixin, InvenTreeAPITestCase):
|
|||||||
url = reverse('admin:plugin_pluginconfig_changelist')
|
url = reverse('admin:plugin_pluginconfig_changelist')
|
||||||
|
|
||||||
test_plg = self.plugin_confs.first()
|
test_plg = self.plugin_confs.first()
|
||||||
|
assert test_plg is not None
|
||||||
|
|
||||||
# deactivate plugin
|
# deactivate plugin
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
url,
|
url,
|
||||||
@ -181,6 +192,8 @@ class PluginDetailAPITest(PluginMixin, InvenTreeAPITestCase):
|
|||||||
"""Test the PluginConfig model."""
|
"""Test the PluginConfig model."""
|
||||||
# check mixin registry
|
# check mixin registry
|
||||||
plg = self.plugin_confs.first()
|
plg = self.plugin_confs.first()
|
||||||
|
assert plg is not None
|
||||||
|
|
||||||
mixin_dict = plg.mixins()
|
mixin_dict = plg.mixins()
|
||||||
self.assertIn('base', mixin_dict)
|
self.assertIn('base', mixin_dict)
|
||||||
self.assertDictContainsSubset(
|
self.assertDictContainsSubset(
|
||||||
@ -190,6 +203,8 @@ class PluginDetailAPITest(PluginMixin, InvenTreeAPITestCase):
|
|||||||
# check reload on save
|
# check reload on save
|
||||||
with self.assertWarns(Warning) as cm:
|
with self.assertWarns(Warning) as cm:
|
||||||
plg_inactive = self.plugin_confs.filter(active=False).first()
|
plg_inactive = self.plugin_confs.filter(active=False).first()
|
||||||
|
assert plg_inactive is not None
|
||||||
|
|
||||||
plg_inactive.active = True
|
plg_inactive.active = True
|
||||||
plg_inactive.save()
|
plg_inactive.save()
|
||||||
self.assertEqual(cm.warning.args[0], 'A reload was triggered')
|
self.assertEqual(cm.warning.args[0], 'A reload was triggered')
|
||||||
@ -208,7 +223,7 @@ class PluginDetailAPITest(PluginMixin, InvenTreeAPITestCase):
|
|||||||
|
|
||||||
# Wrong with pk
|
# Wrong with pk
|
||||||
with self.assertRaises(NotFound) as exc:
|
with self.assertRaises(NotFound) as exc:
|
||||||
check_plugin(plugin_slug=None, plugin_pk='123')
|
check_plugin(plugin_slug=None, plugin_pk=123)
|
||||||
self.assertEqual(str(exc.exception.detail), "Plugin '123' not installed")
|
self.assertEqual(str(exc.exception.detail), "Plugin '123' not installed")
|
||||||
|
|
||||||
def test_plugin_settings(self):
|
def test_plugin_settings(self):
|
||||||
@ -219,6 +234,8 @@ class PluginDetailAPITest(PluginMixin, InvenTreeAPITestCase):
|
|||||||
|
|
||||||
# Activate the 'sample' plugin via the API
|
# Activate the 'sample' plugin via the API
|
||||||
cfg = PluginConfig.objects.filter(key='sample').first()
|
cfg = PluginConfig.objects.filter(key='sample').first()
|
||||||
|
assert cfg is not None
|
||||||
|
|
||||||
url = reverse('api-plugin-detail-activate', kwargs={'pk': cfg.pk})
|
url = reverse('api-plugin-detail-activate', kwargs={'pk': cfg.pk})
|
||||||
self.client.patch(url, {}, expected_code=200)
|
self.client.patch(url, {}, expected_code=200)
|
||||||
|
|
||||||
|
@ -17,31 +17,14 @@ import common.models
|
|||||||
import InvenTree.helpers
|
import InvenTree.helpers
|
||||||
import order.models
|
import order.models
|
||||||
import part.models
|
import part.models
|
||||||
|
import report.models
|
||||||
|
import report.serializers
|
||||||
from InvenTree.api import MetadataView
|
from InvenTree.api import MetadataView
|
||||||
from InvenTree.exceptions import log_error
|
from InvenTree.exceptions import log_error
|
||||||
from InvenTree.filters import InvenTreeSearchFilter
|
from InvenTree.filters import InvenTreeSearchFilter
|
||||||
from InvenTree.mixins import ListCreateAPI, RetrieveAPI, RetrieveUpdateDestroyAPI
|
from InvenTree.mixins import ListCreateAPI, RetrieveAPI, RetrieveUpdateDestroyAPI
|
||||||
from stock.models import StockItem, StockItemAttachment, StockLocation
|
from stock.models import StockItem, StockItemAttachment, StockLocation
|
||||||
|
|
||||||
from .models import (
|
|
||||||
BillOfMaterialsReport,
|
|
||||||
BuildReport,
|
|
||||||
PurchaseOrderReport,
|
|
||||||
ReturnOrderReport,
|
|
||||||
SalesOrderReport,
|
|
||||||
StockLocationReport,
|
|
||||||
TestReport,
|
|
||||||
)
|
|
||||||
from .serializers import (
|
|
||||||
BOMReportSerializer,
|
|
||||||
BuildReportSerializer,
|
|
||||||
PurchaseOrderReportSerializer,
|
|
||||||
ReturnOrderReportSerializer,
|
|
||||||
SalesOrderReportSerializer,
|
|
||||||
StockLocationReportSerializer,
|
|
||||||
TestReportSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ReportListView(ListCreateAPI):
|
class ReportListView(ListCreateAPI):
|
||||||
"""Generic API class for report templates."""
|
"""Generic API class for report templates."""
|
||||||
@ -264,6 +247,11 @@ class ReportPrintMixin:
|
|||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
# Log the exception to the database
|
# Log the exception to the database
|
||||||
|
if InvenTree.helpers.str2bool(
|
||||||
|
common.models.InvenTreeSetting.get_setting(
|
||||||
|
'REPORT_LOG_ERRORS', cache=False
|
||||||
|
)
|
||||||
|
):
|
||||||
log_error(request.path)
|
log_error(request.path)
|
||||||
|
|
||||||
# Re-throw the exception to the client as a DRF exception
|
# Re-throw the exception to the client as a DRF exception
|
||||||
@ -287,8 +275,8 @@ class StockItemTestReportMixin(ReportFilterMixin):
|
|||||||
|
|
||||||
ITEM_MODEL = StockItem
|
ITEM_MODEL = StockItem
|
||||||
ITEM_KEY = 'item'
|
ITEM_KEY = 'item'
|
||||||
queryset = TestReport.objects.all()
|
queryset = report.models.TestReport.objects.all()
|
||||||
serializer_class = TestReportSerializer
|
serializer_class = report.serializers.TestReportSerializer
|
||||||
|
|
||||||
|
|
||||||
class StockItemTestReportList(StockItemTestReportMixin, ReportListView):
|
class StockItemTestReportList(StockItemTestReportMixin, ReportListView):
|
||||||
@ -338,8 +326,8 @@ class BOMReportMixin(ReportFilterMixin):
|
|||||||
ITEM_MODEL = part.models.Part
|
ITEM_MODEL = part.models.Part
|
||||||
ITEM_KEY = 'part'
|
ITEM_KEY = 'part'
|
||||||
|
|
||||||
queryset = BillOfMaterialsReport.objects.all()
|
queryset = report.models.BillOfMaterialsReport.objects.all()
|
||||||
serializer_class = BOMReportSerializer
|
serializer_class = report.serializers.BOMReportSerializer
|
||||||
|
|
||||||
|
|
||||||
class BOMReportList(BOMReportMixin, ReportListView):
|
class BOMReportList(BOMReportMixin, ReportListView):
|
||||||
@ -372,8 +360,8 @@ class BuildReportMixin(ReportFilterMixin):
|
|||||||
ITEM_MODEL = build.models.Build
|
ITEM_MODEL = build.models.Build
|
||||||
ITEM_KEY = 'build'
|
ITEM_KEY = 'build'
|
||||||
|
|
||||||
queryset = BuildReport.objects.all()
|
queryset = report.models.BuildReport.objects.all()
|
||||||
serializer_class = BuildReportSerializer
|
serializer_class = report.serializers.BuildReportSerializer
|
||||||
|
|
||||||
|
|
||||||
class BuildReportList(BuildReportMixin, ReportListView):
|
class BuildReportList(BuildReportMixin, ReportListView):
|
||||||
@ -406,8 +394,8 @@ class PurchaseOrderReportMixin(ReportFilterMixin):
|
|||||||
ITEM_MODEL = order.models.PurchaseOrder
|
ITEM_MODEL = order.models.PurchaseOrder
|
||||||
ITEM_KEY = 'order'
|
ITEM_KEY = 'order'
|
||||||
|
|
||||||
queryset = PurchaseOrderReport.objects.all()
|
queryset = report.models.PurchaseOrderReport.objects.all()
|
||||||
serializer_class = PurchaseOrderReportSerializer
|
serializer_class = report.serializers.PurchaseOrderReportSerializer
|
||||||
|
|
||||||
|
|
||||||
class PurchaseOrderReportList(PurchaseOrderReportMixin, ReportListView):
|
class PurchaseOrderReportList(PurchaseOrderReportMixin, ReportListView):
|
||||||
@ -434,8 +422,8 @@ class SalesOrderReportMixin(ReportFilterMixin):
|
|||||||
ITEM_MODEL = order.models.SalesOrder
|
ITEM_MODEL = order.models.SalesOrder
|
||||||
ITEM_KEY = 'order'
|
ITEM_KEY = 'order'
|
||||||
|
|
||||||
queryset = SalesOrderReport.objects.all()
|
queryset = report.models.SalesOrderReport.objects.all()
|
||||||
serializer_class = SalesOrderReportSerializer
|
serializer_class = report.serializers.SalesOrderReportSerializer
|
||||||
|
|
||||||
|
|
||||||
class SalesOrderReportList(SalesOrderReportMixin, ReportListView):
|
class SalesOrderReportList(SalesOrderReportMixin, ReportListView):
|
||||||
@ -462,8 +450,8 @@ class ReturnOrderReportMixin(ReportFilterMixin):
|
|||||||
ITEM_MODEL = order.models.ReturnOrder
|
ITEM_MODEL = order.models.ReturnOrder
|
||||||
ITEM_KEY = 'order'
|
ITEM_KEY = 'order'
|
||||||
|
|
||||||
queryset = ReturnOrderReport.objects.all()
|
queryset = report.models.ReturnOrderReport.objects.all()
|
||||||
serializer_class = ReturnOrderReportSerializer
|
serializer_class = report.serializers.ReturnOrderReportSerializer
|
||||||
|
|
||||||
|
|
||||||
class ReturnOrderReportList(ReturnOrderReportMixin, ReportListView):
|
class ReturnOrderReportList(ReturnOrderReportMixin, ReportListView):
|
||||||
@ -489,8 +477,8 @@ class StockLocationReportMixin(ReportFilterMixin):
|
|||||||
|
|
||||||
ITEM_MODEL = StockLocation
|
ITEM_MODEL = StockLocation
|
||||||
ITEM_KEY = 'location'
|
ITEM_KEY = 'location'
|
||||||
queryset = StockLocationReport.objects.all()
|
queryset = report.models.StockLocationReport.objects.all()
|
||||||
serializer_class = StockLocationReportSerializer
|
serializer_class = report.serializers.StockLocationReportSerializer
|
||||||
|
|
||||||
|
|
||||||
class StockLocationReportList(StockLocationReportMixin, ReportListView):
|
class StockLocationReportList(StockLocationReportMixin, ReportListView):
|
||||||
@ -511,7 +499,57 @@ class StockLocationReportPrint(StockLocationReportMixin, ReportPrintMixin, Retri
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ReportSnippetList(ListCreateAPI):
|
||||||
|
"""API endpoint for listing ReportSnippet objects."""
|
||||||
|
|
||||||
|
queryset = report.models.ReportSnippet.objects.all()
|
||||||
|
serializer_class = report.serializers.ReportSnippetSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class ReportSnippetDetail(RetrieveUpdateDestroyAPI):
|
||||||
|
"""API endpoint for a single ReportSnippet object."""
|
||||||
|
|
||||||
|
queryset = report.models.ReportSnippet.objects.all()
|
||||||
|
serializer_class = report.serializers.ReportSnippetSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class ReportAssetList(ListCreateAPI):
|
||||||
|
"""API endpoint for listing ReportAsset objects."""
|
||||||
|
|
||||||
|
queryset = report.models.ReportAsset.objects.all()
|
||||||
|
serializer_class = report.serializers.ReportAssetSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class ReportAssetDetail(RetrieveUpdateDestroyAPI):
|
||||||
|
"""API endpoint for a single ReportAsset object."""
|
||||||
|
|
||||||
|
queryset = report.models.ReportAsset.objects.all()
|
||||||
|
serializer_class = report.serializers.ReportAssetSerializer
|
||||||
|
|
||||||
|
|
||||||
report_api_urls = [
|
report_api_urls = [
|
||||||
|
# Report assets
|
||||||
|
path(
|
||||||
|
'asset/',
|
||||||
|
include([
|
||||||
|
path(
|
||||||
|
'<int:pk>/', ReportAssetDetail.as_view(), name='api-report-asset-detail'
|
||||||
|
),
|
||||||
|
path('', ReportAssetList.as_view(), name='api-report-asset-list'),
|
||||||
|
]),
|
||||||
|
),
|
||||||
|
# Report snippets
|
||||||
|
path(
|
||||||
|
'snippet/',
|
||||||
|
include([
|
||||||
|
path(
|
||||||
|
'<int:pk>/',
|
||||||
|
ReportSnippetDetail.as_view(),
|
||||||
|
name='api-report-snippet-detail',
|
||||||
|
),
|
||||||
|
path('', ReportSnippetList.as_view(), name='api-report-snippet-list'),
|
||||||
|
]),
|
||||||
|
),
|
||||||
# Purchase order reports
|
# Purchase order reports
|
||||||
path(
|
path(
|
||||||
'po/',
|
'po/',
|
||||||
@ -528,7 +566,7 @@ report_api_urls = [
|
|||||||
path(
|
path(
|
||||||
'metadata/',
|
'metadata/',
|
||||||
MetadataView.as_view(),
|
MetadataView.as_view(),
|
||||||
{'model': PurchaseOrderReport},
|
{'model': report.models.PurchaseOrderReport},
|
||||||
name='api-po-report-metadata',
|
name='api-po-report-metadata',
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
@ -558,7 +596,7 @@ report_api_urls = [
|
|||||||
path(
|
path(
|
||||||
'metadata/',
|
'metadata/',
|
||||||
MetadataView.as_view(),
|
MetadataView.as_view(),
|
||||||
{'model': SalesOrderReport},
|
{'model': report.models.SalesOrderReport},
|
||||||
name='api-so-report-metadata',
|
name='api-so-report-metadata',
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
@ -586,7 +624,7 @@ report_api_urls = [
|
|||||||
path(
|
path(
|
||||||
'metadata/',
|
'metadata/',
|
||||||
MetadataView.as_view(),
|
MetadataView.as_view(),
|
||||||
{'model': ReturnOrderReport},
|
{'model': report.models.ReturnOrderReport},
|
||||||
name='api-so-report-metadata',
|
name='api-so-report-metadata',
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
@ -617,7 +655,7 @@ report_api_urls = [
|
|||||||
path(
|
path(
|
||||||
'metadata/',
|
'metadata/',
|
||||||
MetadataView.as_view(),
|
MetadataView.as_view(),
|
||||||
{'model': BuildReport},
|
{'model': report.models.BuildReport},
|
||||||
name='api-build-report-metadata',
|
name='api-build-report-metadata',
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
@ -645,7 +683,7 @@ report_api_urls = [
|
|||||||
path(
|
path(
|
||||||
'metadata/',
|
'metadata/',
|
||||||
MetadataView.as_view(),
|
MetadataView.as_view(),
|
||||||
{'model': BillOfMaterialsReport},
|
{'model': report.models.BillOfMaterialsReport},
|
||||||
name='api-bom-report-metadata',
|
name='api-bom-report-metadata',
|
||||||
),
|
),
|
||||||
path('', BOMReportDetail.as_view(), name='api-bom-report-detail'),
|
path('', BOMReportDetail.as_view(), name='api-bom-report-detail'),
|
||||||
@ -671,7 +709,7 @@ report_api_urls = [
|
|||||||
path(
|
path(
|
||||||
'metadata/',
|
'metadata/',
|
||||||
MetadataView.as_view(),
|
MetadataView.as_view(),
|
||||||
{'report': TestReport},
|
{'report': report.models.TestReport},
|
||||||
name='api-stockitem-testreport-metadata',
|
name='api-stockitem-testreport-metadata',
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
@ -705,7 +743,7 @@ report_api_urls = [
|
|||||||
path(
|
path(
|
||||||
'metadata/',
|
'metadata/',
|
||||||
MetadataView.as_view(),
|
MetadataView.as_view(),
|
||||||
{'report': StockLocationReport},
|
{'report': report.models.StockLocationReport},
|
||||||
name='api-stocklocation-report-metadata',
|
name='api-stocklocation-report-metadata',
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
|
@ -7,6 +7,7 @@ import sys
|
|||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
from django.core.validators import FileExtensionValidator
|
from django.core.validators import FileExtensionValidator
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.template import Context, Template
|
from django.template import Context, Template
|
||||||
@ -17,6 +18,7 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
import build.models
|
import build.models
|
||||||
import common.models
|
import common.models
|
||||||
import InvenTree.exceptions
|
import InvenTree.exceptions
|
||||||
|
import InvenTree.helpers
|
||||||
import InvenTree.models
|
import InvenTree.models
|
||||||
import order.models
|
import order.models
|
||||||
import part.models
|
import part.models
|
||||||
@ -249,8 +251,8 @@ class ReportTemplateBase(MetadataMixin, ReportBase):
|
|||||||
context = self.get_context_data(request)
|
context = self.get_context_data(request)
|
||||||
|
|
||||||
context['base_url'] = get_base_url(request=request)
|
context['base_url'] = get_base_url(request=request)
|
||||||
context['date'] = datetime.datetime.now().date()
|
context['date'] = InvenTree.helpers.current_date()
|
||||||
context['datetime'] = datetime.datetime.now()
|
context['datetime'] = InvenTree.helpers.current_time()
|
||||||
context['page_size'] = self.get_report_size()
|
context['page_size'] = self.get_report_size()
|
||||||
context['report_template'] = self
|
context['report_template'] = self
|
||||||
context['report_description'] = self.description
|
context['report_description'] = self.description
|
||||||
@ -585,10 +587,7 @@ class ReturnOrderReport(ReportTemplateBase):
|
|||||||
|
|
||||||
def rename_snippet(instance, filename):
|
def rename_snippet(instance, filename):
|
||||||
"""Function to rename a report snippet once uploaded."""
|
"""Function to rename a report snippet once uploaded."""
|
||||||
filename = os.path.basename(filename)
|
path = ReportSnippet.snippet_path(filename)
|
||||||
|
|
||||||
path = os.path.join('report', 'snippets', filename)
|
|
||||||
|
|
||||||
fullpath = settings.MEDIA_ROOT.joinpath(path).resolve()
|
fullpath = settings.MEDIA_ROOT.joinpath(path).resolve()
|
||||||
|
|
||||||
# If the snippet file is the *same* filename as the one being uploaded,
|
# If the snippet file is the *same* filename as the one being uploaded,
|
||||||
@ -610,6 +609,40 @@ class ReportSnippet(models.Model):
|
|||||||
Useful for 'common' template actions, sub-templates, etc
|
Useful for 'common' template actions, sub-templates, etc
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
"""String representation of a ReportSnippet instance."""
|
||||||
|
return f'snippets/{self.filename}'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filename(self):
|
||||||
|
"""Return the filename of the asset."""
|
||||||
|
path = self.snippet.name
|
||||||
|
if path:
|
||||||
|
return os.path.basename(path)
|
||||||
|
else:
|
||||||
|
return '-'
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def snippet_path(filename):
|
||||||
|
"""Return the fully-qualified snippet path for the given filename."""
|
||||||
|
return os.path.join('report', 'snippets', os.path.basename(str(filename)))
|
||||||
|
|
||||||
|
def validate_unique(self, exclude=None):
|
||||||
|
"""Validate that this report asset is unique."""
|
||||||
|
proposed_path = self.snippet_path(self.snippet)
|
||||||
|
|
||||||
|
if (
|
||||||
|
ReportSnippet.objects.filter(snippet=proposed_path)
|
||||||
|
.exclude(pk=self.pk)
|
||||||
|
.count()
|
||||||
|
> 0
|
||||||
|
):
|
||||||
|
raise ValidationError({
|
||||||
|
'snippet': _('Snippet file with this name already exists')
|
||||||
|
})
|
||||||
|
|
||||||
|
return super().validate_unique(exclude)
|
||||||
|
|
||||||
snippet = models.FileField(
|
snippet = models.FileField(
|
||||||
upload_to=rename_snippet,
|
upload_to=rename_snippet,
|
||||||
verbose_name=_('Snippet'),
|
verbose_name=_('Snippet'),
|
||||||
@ -626,19 +659,20 @@ class ReportSnippet(models.Model):
|
|||||||
|
|
||||||
def rename_asset(instance, filename):
|
def rename_asset(instance, filename):
|
||||||
"""Function to rename an asset file when uploaded."""
|
"""Function to rename an asset file when uploaded."""
|
||||||
filename = os.path.basename(filename)
|
path = ReportAsset.asset_path(filename)
|
||||||
|
fullpath = settings.MEDIA_ROOT.joinpath(path).resolve()
|
||||||
path = os.path.join('report', 'assets', filename)
|
|
||||||
|
|
||||||
# If the asset file is the *same* filename as the one being uploaded,
|
# If the asset file is the *same* filename as the one being uploaded,
|
||||||
# delete the original one from the media directory
|
# delete the original one from the media directory
|
||||||
if str(filename) == str(instance.asset):
|
if str(filename) == str(instance.asset):
|
||||||
fullpath = settings.MEDIA_ROOT.joinpath(path).resolve()
|
|
||||||
|
|
||||||
if fullpath.exists():
|
if fullpath.exists():
|
||||||
|
# Check for existing asset file with the same name
|
||||||
logger.info("Deleting existing asset file: '%s'", filename)
|
logger.info("Deleting existing asset file: '%s'", filename)
|
||||||
os.remove(fullpath)
|
os.remove(fullpath)
|
||||||
|
|
||||||
|
# Ensure the cache is deleted for this asset
|
||||||
|
cache.delete(fullpath)
|
||||||
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
@ -652,7 +686,35 @@ class ReportAsset(models.Model):
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""String representation of a ReportAsset instance."""
|
"""String representation of a ReportAsset instance."""
|
||||||
return os.path.basename(self.asset.name)
|
return f'assets/{self.filename}'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filename(self):
|
||||||
|
"""Return the filename of the asset."""
|
||||||
|
path = self.asset.name
|
||||||
|
if path:
|
||||||
|
return os.path.basename(path)
|
||||||
|
else:
|
||||||
|
return '-'
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def asset_path(filename):
|
||||||
|
"""Return the fully-qualified asset path for the given filename."""
|
||||||
|
return os.path.join('report', 'assets', os.path.basename(str(filename)))
|
||||||
|
|
||||||
|
def validate_unique(self, exclude=None):
|
||||||
|
"""Validate that this report asset is unique."""
|
||||||
|
proposed_path = self.asset_path(self.asset)
|
||||||
|
|
||||||
|
if (
|
||||||
|
ReportAsset.objects.filter(asset=proposed_path).exclude(pk=self.pk).count()
|
||||||
|
> 0
|
||||||
|
):
|
||||||
|
raise ValidationError({
|
||||||
|
'asset': _('Asset file with this name already exists')
|
||||||
|
})
|
||||||
|
|
||||||
|
return super().validate_unique(exclude)
|
||||||
|
|
||||||
# Asset file
|
# Asset file
|
||||||
asset = models.FileField(
|
asset = models.FileField(
|
||||||
|
@ -1,20 +1,13 @@
|
|||||||
"""API serializers for the reporting models."""
|
"""API serializers for the reporting models."""
|
||||||
|
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
import report.models
|
||||||
from InvenTree.serializers import (
|
from InvenTree.serializers import (
|
||||||
InvenTreeAttachmentSerializerField,
|
InvenTreeAttachmentSerializerField,
|
||||||
InvenTreeModelSerializer,
|
InvenTreeModelSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .models import (
|
|
||||||
BillOfMaterialsReport,
|
|
||||||
BuildReport,
|
|
||||||
PurchaseOrderReport,
|
|
||||||
ReturnOrderReport,
|
|
||||||
SalesOrderReport,
|
|
||||||
StockLocationReport,
|
|
||||||
TestReport,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ReportSerializerBase(InvenTreeModelSerializer):
|
class ReportSerializerBase(InvenTreeModelSerializer):
|
||||||
"""Base class for report serializer."""
|
"""Base class for report serializer."""
|
||||||
@ -24,7 +17,16 @@ class ReportSerializerBase(InvenTreeModelSerializer):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def report_fields():
|
def report_fields():
|
||||||
"""Generic serializer fields for a report template."""
|
"""Generic serializer fields for a report template."""
|
||||||
return ['pk', 'name', 'description', 'template', 'filters', 'enabled']
|
return [
|
||||||
|
'pk',
|
||||||
|
'name',
|
||||||
|
'description',
|
||||||
|
'template',
|
||||||
|
'filters',
|
||||||
|
'page_size',
|
||||||
|
'landscape',
|
||||||
|
'enabled',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class TestReportSerializer(ReportSerializerBase):
|
class TestReportSerializer(ReportSerializerBase):
|
||||||
@ -33,7 +35,7 @@ class TestReportSerializer(ReportSerializerBase):
|
|||||||
class Meta:
|
class Meta:
|
||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = TestReport
|
model = report.models.TestReport
|
||||||
fields = ReportSerializerBase.report_fields()
|
fields = ReportSerializerBase.report_fields()
|
||||||
|
|
||||||
|
|
||||||
@ -43,7 +45,7 @@ class BuildReportSerializer(ReportSerializerBase):
|
|||||||
class Meta:
|
class Meta:
|
||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = BuildReport
|
model = report.models.BuildReport
|
||||||
fields = ReportSerializerBase.report_fields()
|
fields = ReportSerializerBase.report_fields()
|
||||||
|
|
||||||
|
|
||||||
@ -53,7 +55,7 @@ class BOMReportSerializer(ReportSerializerBase):
|
|||||||
class Meta:
|
class Meta:
|
||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = BillOfMaterialsReport
|
model = report.models.BillOfMaterialsReport
|
||||||
fields = ReportSerializerBase.report_fields()
|
fields = ReportSerializerBase.report_fields()
|
||||||
|
|
||||||
|
|
||||||
@ -63,7 +65,7 @@ class PurchaseOrderReportSerializer(ReportSerializerBase):
|
|||||||
class Meta:
|
class Meta:
|
||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = PurchaseOrderReport
|
model = report.models.PurchaseOrderReport
|
||||||
fields = ReportSerializerBase.report_fields()
|
fields = ReportSerializerBase.report_fields()
|
||||||
|
|
||||||
|
|
||||||
@ -73,7 +75,7 @@ class SalesOrderReportSerializer(ReportSerializerBase):
|
|||||||
class Meta:
|
class Meta:
|
||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = SalesOrderReport
|
model = report.models.SalesOrderReport
|
||||||
fields = ReportSerializerBase.report_fields()
|
fields = ReportSerializerBase.report_fields()
|
||||||
|
|
||||||
|
|
||||||
@ -83,7 +85,7 @@ class ReturnOrderReportSerializer(ReportSerializerBase):
|
|||||||
class Meta:
|
class Meta:
|
||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = ReturnOrderReport
|
model = report.models.ReturnOrderReport
|
||||||
fields = ReportSerializerBase.report_fields()
|
fields = ReportSerializerBase.report_fields()
|
||||||
|
|
||||||
|
|
||||||
@ -93,5 +95,30 @@ class StockLocationReportSerializer(ReportSerializerBase):
|
|||||||
class Meta:
|
class Meta:
|
||||||
"""Metaclass options."""
|
"""Metaclass options."""
|
||||||
|
|
||||||
model = StockLocationReport
|
model = report.models.StockLocationReport
|
||||||
fields = ReportSerializerBase.report_fields()
|
fields = ReportSerializerBase.report_fields()
|
||||||
|
|
||||||
|
|
||||||
|
class ReportSnippetSerializer(InvenTreeModelSerializer):
|
||||||
|
"""Serializer class for the ReportSnippet model."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
"""Metaclass options."""
|
||||||
|
|
||||||
|
model = report.models.ReportSnippet
|
||||||
|
|
||||||
|
fields = ['pk', 'snippet', 'description']
|
||||||
|
|
||||||
|
snippet = InvenTreeAttachmentSerializerField()
|
||||||
|
|
||||||
|
|
||||||
|
class ReportAssetSerializer(InvenTreeModelSerializer):
|
||||||
|
"""Serializer class for the ReportAsset model."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
"""Meta class options."""
|
||||||
|
|
||||||
|
model = report.models.ReportAsset
|
||||||
|
fields = ['pk', 'asset', 'description']
|
||||||
|
|
||||||
|
asset = InvenTreeAttachmentSerializerField()
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user