mirror of
https://github.com/inventree/InvenTree
synced 2024-08-30 18:33:04 +00:00
Merge branch 'master' into pr/ChristianSchindler/6305
This commit is contained in:
commit
6d2270103f
2
.github/actions/setup/action.yaml
vendored
2
.github/actions/setup/action.yaml
vendored
@ -98,4 +98,4 @@ runs:
|
||||
- name: Run invoke update
|
||||
if: ${{ inputs.update == 'true' }}
|
||||
shell: bash
|
||||
run: invoke update --uv
|
||||
run: invoke update --uv --skip-backup --skip-static
|
||||
|
3
.github/scripts/version_check.py
vendored
3
.github/scripts/version_check.py
vendored
@ -97,6 +97,9 @@ if __name__ == '__main__':
|
||||
)
|
||||
text = version_file.read_text()
|
||||
results = re.findall(r"""INVENTREE_API_VERSION = (.*)""", text)
|
||||
# If 2. args is true lower the version number by 1
|
||||
if len(sys.argv) > 2 and sys.argv[2] == 'true':
|
||||
results[0] = str(int(results[0]) - 1)
|
||||
print(results[0])
|
||||
exit(0)
|
||||
# GITHUB_REF_TYPE may be either 'branch' or 'tag'
|
||||
|
8
.github/workflows/docker.yaml
vendored
8
.github/workflows/docker.yaml
vendored
@ -68,7 +68,7 @@ jobs:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
|
||||
- name: Set Up Python ${{ env.python_version }}
|
||||
uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # pin@v5.1.0
|
||||
uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # pin@v5.1.1
|
||||
with:
|
||||
python-version: ${{ env.python_version }}
|
||||
- name: Version Check
|
||||
@ -124,10 +124,10 @@ jobs:
|
||||
rm -rf InvenTree/_testfolder
|
||||
- name: Set up QEMU
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # pin@v3.0.0
|
||||
uses: docker/setup-qemu-action@5927c834f5b4fdf503fca6f4c7eccda82949e1ee # pin@v3.1.0
|
||||
- name: Set up Docker Buildx
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb # pin@v3.3.0
|
||||
uses: docker/setup-buildx-action@4fd812986e6c8c2a69e18311145f9371337f27d4 # pin@v3.4.0
|
||||
- name: Set up cosign
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: sigstore/cosign-installer@59acb6260d9c0ba8f4a2f9d9b48431a222b68e20 # pin@v3.5.0
|
||||
@ -166,7 +166,7 @@ jobs:
|
||||
- name: Push Docker Images
|
||||
id: push-docker
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/build-push-action@31159d49c0d4756269a0940a750801a1ea5d7003 # pin@v6.1.0
|
||||
uses: docker/build-push-action@a254f8ca60a858f3136a2f1f23a60969f2c402dd # pin@v6.4.0
|
||||
with:
|
||||
context: .
|
||||
file: ./contrib/container/Dockerfile
|
||||
|
37
.github/workflows/qc_checks.yaml
vendored
37
.github/workflows/qc_checks.yaml
vendored
@ -94,7 +94,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
|
||||
- name: Set up Python ${{ env.python_version }}
|
||||
uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # pin@v5.1.0
|
||||
uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # pin@v5.1.1
|
||||
with:
|
||||
python-version: ${{ env.python_version }}
|
||||
cache: "pip"
|
||||
@ -115,7 +115,7 @@ jobs:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
|
||||
- name: Set up Python ${{ env.python_version }}
|
||||
uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # pin@v5.1.0
|
||||
uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # pin@v5.1.1
|
||||
with:
|
||||
python-version: ${{ env.python_version }}
|
||||
- name: Check Config
|
||||
@ -159,20 +159,32 @@ jobs:
|
||||
- name: Export API Documentation
|
||||
run: invoke schema --ignore-warnings --filename src/backend/InvenTree/schema.yml
|
||||
- name: Upload schema
|
||||
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # pin@v4.3.3
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # pin@v4.3.4
|
||||
with:
|
||||
name: schema.yml
|
||||
path: src/backend/InvenTree/schema.yml
|
||||
- name: Download public schema
|
||||
if: needs.paths-filter.outputs.api == 'false'
|
||||
run: |
|
||||
pip install --require-hashes -r contrib/dev_reqs/requirements.txt >/dev/null 2>&1
|
||||
version="$(python3 .github/scripts/version_check.py only_version 2>&1)"
|
||||
version="$(python3 .github/scripts/version_check.py only_version ${{ needs.paths-filter.outputs.api }} 2>&1)"
|
||||
echo "Version: $version"
|
||||
url="https://raw.githubusercontent.com/inventree/schema/main/export/${version}/api.yaml"
|
||||
echo "URL: $url"
|
||||
curl -s -o api.yaml $url
|
||||
code=$(curl -s -o api.yaml $url --write-out '%{http_code}' --silent)
|
||||
if [ "$code" != "200" ]; then
|
||||
exit 1
|
||||
fi
|
||||
echo "Downloaded api.yaml"
|
||||
- name: Running OpenAPI Spec diff action
|
||||
id: breaking_changes
|
||||
uses: oasdiff/oasdiff-action/diff@main
|
||||
with:
|
||||
base: 'api.yaml'
|
||||
revision: 'src/backend/InvenTree/schema.yml'
|
||||
format: 'html'
|
||||
- name: Echoing diff to step
|
||||
run: echo "${{ steps.breaking_changes.outputs.diff }}" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Check for differences in API Schema
|
||||
if: needs.paths-filter.outputs.api == 'false'
|
||||
run: |
|
||||
@ -200,11 +212,12 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
|
||||
name: Checkout Code
|
||||
with:
|
||||
repository: inventree/schema
|
||||
token: ${{ secrets.SCHEMA_PAT }}
|
||||
- name: Download schema artifact
|
||||
uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||
with:
|
||||
name: schema.yml
|
||||
- name: Move schema to correct location
|
||||
@ -213,8 +226,9 @@ jobs:
|
||||
mkdir export/${version}
|
||||
mv schema.yml export/${version}/api.yaml
|
||||
- uses: stefanzweifel/git-auto-commit-action@8621497c8c39c72f3e2a999a26b4ca1b5058a842 # v5.0.1
|
||||
name: Commit schema changes
|
||||
with:
|
||||
commit_message: "Update API schema for ${version}"
|
||||
commit_message: "Update API schema for ${{ env.version }} / ${{ github.sha }}"
|
||||
|
||||
python:
|
||||
name: Tests - inventree-python
|
||||
@ -267,7 +281,8 @@ jobs:
|
||||
continue-on-error: true # continue if a step fails so that coverage gets pushed
|
||||
strategy:
|
||||
matrix:
|
||||
python_version: [3.9, 3.12]
|
||||
python_version: [3.9]
|
||||
# python_version: [3.9, 3.12] # Disabled due to requirement issues
|
||||
|
||||
env:
|
||||
INVENTREE_DB_NAME: ./inventree.sqlite
|
||||
@ -520,7 +535,7 @@ jobs:
|
||||
- name: Run Playwright tests
|
||||
id: tests
|
||||
run: cd src/frontend && npx nyc playwright test
|
||||
- uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # pin@v4
|
||||
- uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # pin@v4
|
||||
if: ${{ !cancelled() && steps.tests.outcome == 'failure' }}
|
||||
with:
|
||||
name: playwright-report
|
||||
@ -556,7 +571,7 @@ jobs:
|
||||
run: |
|
||||
cd src/backend/InvenTree/web/static
|
||||
zip -r frontend-build.zip web/ web/.vite
|
||||
- uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # pin@v4.3.3
|
||||
- uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # pin@v4.3.4
|
||||
with:
|
||||
name: frontend-build
|
||||
path: src/backend/InvenTree/web/static/web
|
||||
|
4
.github/workflows/scorecard.yaml
vendored
4
.github/workflows/scorecard.yaml
vendored
@ -59,7 +59,7 @@ jobs:
|
||||
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
|
||||
# format to the repository Actions tab.
|
||||
- name: "Upload artifact"
|
||||
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4
|
||||
with:
|
||||
name: SARIF file
|
||||
path: results.sarif
|
||||
@ -67,6 +67,6 @@ jobs:
|
||||
|
||||
# Upload the results to GitHub's code scanning dashboard.
|
||||
- name: "Upload to code-scanning"
|
||||
uses: github/codeql-action/upload-sarif@23acc5c183826b7a8a97bce3cecc52db901f8251 # v3.25.10
|
||||
uses: github/codeql-action/upload-sarif@4fa2a7953630fd2f3fb380f21be14ede0169dd4f # v3.25.12
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
|
@ -14,6 +14,7 @@ env:
|
||||
- INVENTREE_BACKUP_DIR=/opt/inventree/backup
|
||||
- INVENTREE_PLUGIN_FILE=/opt/inventree/plugins.txt
|
||||
- INVENTREE_CONFIG_FILE=/opt/inventree/config.yaml
|
||||
before_install: contrib/packager.io/preinstall.sh
|
||||
after_install: contrib/packager.io/postinstall.sh
|
||||
before:
|
||||
- contrib/packager.io/before.sh
|
||||
|
@ -66,7 +66,7 @@ InvenTree is designed to be **extensible**, and provides multiple options for **
|
||||
<li><a href="https://www.djangoproject.com/">Django</a></li>
|
||||
<li><a href="https://www.django-rest-framework.org/">DRF</a></li>
|
||||
<li><a href="https://django-q.readthedocs.io/">Django Q</a></li>
|
||||
<li><a href="https://django-allauth.readthedocs.io/">Django-Allauth</a></li>
|
||||
<li><a href="https://docs.allauth.org/">Django-Allauth</a></li>
|
||||
</ul>
|
||||
</details>
|
||||
|
||||
|
@ -4,9 +4,9 @@ asgiref==3.8.1 \
|
||||
--hash=sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47 \
|
||||
--hash=sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590
|
||||
# via django
|
||||
django==4.2.11 \
|
||||
--hash=sha256:6e6ff3db2d8dd0c986b4eec8554c8e4f919b5c1ff62a5b4390c17aff2ed6e5c4 \
|
||||
--hash=sha256:ddc24a0a8280a0430baa37aff11f28574720af05888c62b7cfe71d219f4599d3
|
||||
django==4.2.14 \
|
||||
--hash=sha256:3ec32bc2c616ab02834b9cac93143a7dc1cdcd5b822d78ac95fc20a38c534240 \
|
||||
--hash=sha256:fc6919875a6226c7ffcae1a7d51e0f2ceaf6f160393180818f6c95f51b1e7b96
|
||||
# via django-auth-ldap
|
||||
django-auth-ldap==4.8.0 \
|
||||
--hash=sha256:4b4b944f3c28bce362f33fb6e8db68429ed8fd8f12f0c0c4b1a4344a7ef225ce \
|
||||
@ -184,9 +184,9 @@ pyyaml==6.0.1 \
|
||||
--hash=sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585 \
|
||||
--hash=sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d \
|
||||
--hash=sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f
|
||||
setuptools==69.5.1 \
|
||||
--hash=sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987 \
|
||||
--hash=sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32
|
||||
setuptools==70.3.0 \
|
||||
--hash=sha256:f171bab1dfbc86b132997f26a119f6056a57950d058587841a0082e8830f9dc5 \
|
||||
--hash=sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc
|
||||
sqlparse==0.5.0 \
|
||||
--hash=sha256:714d0a4932c059d16189f58ef5411ec2287a4360f17cdd0edd2d09d4c5087c93 \
|
||||
--hash=sha256:c204494cd97479d0e39f28c93d46c0b2d5959c7b9ab904762ea6c7af211c8663
|
||||
|
@ -1,8 +1,8 @@
|
||||
# This file was autogenerated by uv via the following command:
|
||||
# uv pip compile contrib/dev_reqs/requirements.in -o contrib/dev_reqs/requirements.txt
|
||||
certifi==2024.2.2 \
|
||||
--hash=sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f \
|
||||
--hash=sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1
|
||||
certifi==2024.7.4 \
|
||||
--hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \
|
||||
--hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90
|
||||
# via requests
|
||||
charset-normalizer==3.3.2 \
|
||||
--hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \
|
||||
|
@ -4,6 +4,8 @@
|
||||
#
|
||||
Color_Off='\033[0m'
|
||||
On_Red='\033[41m'
|
||||
PYTHON_FROM=9
|
||||
PYTHON_TO=12
|
||||
|
||||
function detect_docker() {
|
||||
if [ -n "$(grep docker </proc/1/cgroup)" ]; then
|
||||
@ -57,6 +59,19 @@ function detect_python() {
|
||||
echo "# No python environment found - using environment variable: ${SETUP_PYTHON}"
|
||||
fi
|
||||
|
||||
# Try to detect a python between 3.9 and 3.12 in reverse order
|
||||
if [ -z "${SETUP_PYTHON}" ]; then
|
||||
echo "# Trying to detecting python3.${PYTHON_FROM} to python3.${PYTHON_TO} - using newest version"
|
||||
for i in $(seq $PYTHON_TO -1 $PYTHON_FROM); do
|
||||
echo "# Checking for python3.${i}"
|
||||
if [ -n "$(which python3.${i})" ]; then
|
||||
SETUP_PYTHON="python3.${i}"
|
||||
echo "# Found python3.${i} installed - using for setup ${SETUP_PYTHON}"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Ensure python can be executed - abort if not
|
||||
if [ -z "$(which ${SETUP_PYTHON})" ]; then
|
||||
echo "${On_Red}"
|
||||
@ -117,22 +132,22 @@ function detect_envs() {
|
||||
pip install --require-hashes -r ${APP_HOME}/contrib/dev_reqs/requirements.txt -q
|
||||
|
||||
# Load config
|
||||
local CONF=$(cat ${INVENTREE_CONFIG_FILE} | jc --yaml)
|
||||
export INVENTREE_CONF_DATA=$(cat ${INVENTREE_CONFIG_FILE} | jc --yaml)
|
||||
|
||||
# Parse the config file
|
||||
export INVENTREE_MEDIA_ROOT=$(jq -r '.[].media_root' <<< ${CONF})
|
||||
export INVENTREE_STATIC_ROOT=$(jq -r '.[].static_root' <<< ${CONF})
|
||||
export INVENTREE_BACKUP_DIR=$(jq -r '.[].backup_dir' <<< ${CONF})
|
||||
export INVENTREE_PLUGINS_ENABLED=$(jq -r '.[].plugins_enabled' <<< ${CONF})
|
||||
export INVENTREE_PLUGIN_FILE=$(jq -r '.[].plugin_file' <<< ${CONF})
|
||||
export INVENTREE_SECRET_KEY_FILE=$(jq -r '.[].secret_key_file' <<< ${CONF})
|
||||
export INVENTREE_MEDIA_ROOT=$(jq -r '.[].media_root' <<< ${INVENTREE_CONF_DATA})
|
||||
export INVENTREE_STATIC_ROOT=$(jq -r '.[].static_root' <<< ${INVENTREE_CONF_DATA})
|
||||
export INVENTREE_BACKUP_DIR=$(jq -r '.[].backup_dir' <<< ${INVENTREE_CONF_DATA})
|
||||
export INVENTREE_PLUGINS_ENABLED=$(jq -r '.[].plugins_enabled' <<< ${INVENTREE_CONF_DATA})
|
||||
export INVENTREE_PLUGIN_FILE=$(jq -r '.[].plugin_file' <<< ${INVENTREE_CONF_DATA})
|
||||
export INVENTREE_SECRET_KEY_FILE=$(jq -r '.[].secret_key_file' <<< ${INVENTREE_CONF_DATA})
|
||||
|
||||
export INVENTREE_DB_ENGINE=$(jq -r '.[].database.ENGINE' <<< ${CONF})
|
||||
export INVENTREE_DB_NAME=$(jq -r '.[].database.NAME' <<< ${CONF})
|
||||
export INVENTREE_DB_USER=$(jq -r '.[].database.USER' <<< ${CONF})
|
||||
export INVENTREE_DB_PASSWORD=$(jq -r '.[].database.PASSWORD' <<< ${CONF})
|
||||
export INVENTREE_DB_HOST=$(jq -r '.[].database.HOST' <<< ${CONF})
|
||||
export INVENTREE_DB_PORT=$(jq -r '.[].database.PORT' <<< ${CONF})
|
||||
export INVENTREE_DB_ENGINE=$(jq -r '.[].database.ENGINE' <<< ${INVENTREE_CONF_DATA})
|
||||
export INVENTREE_DB_NAME=$(jq -r '.[].database.NAME' <<< ${INVENTREE_CONF_DATA})
|
||||
export INVENTREE_DB_USER=$(jq -r '.[].database.USER' <<< ${INVENTREE_CONF_DATA})
|
||||
export INVENTREE_DB_PASSWORD=$(jq -r '.[].database.PASSWORD' <<< ${INVENTREE_CONF_DATA})
|
||||
export INVENTREE_DB_HOST=$(jq -r '.[].database.HOST' <<< ${INVENTREE_CONF_DATA})
|
||||
export INVENTREE_DB_PORT=$(jq -r '.[].database.PORT' <<< ${INVENTREE_CONF_DATA})
|
||||
else
|
||||
echo "# No config file found: ${INVENTREE_CONFIG_FILE}, using envs or defaults"
|
||||
|
||||
@ -231,7 +246,11 @@ function create_initscripts() {
|
||||
}
|
||||
|
||||
function create_admin() {
|
||||
# Create data for admin user
|
||||
# Create data for admin users - stop with setting SETUP_ADMIN_NOCREATION to true
|
||||
if [ "${SETUP_ADMIN_NOCREATION}" == "true" ]; then
|
||||
echo "# Admin creation is disabled - skipping"
|
||||
return
|
||||
fi
|
||||
|
||||
if test -f "${SETUP_ADMIN_PASSWORD_FILE}"; then
|
||||
echo "# Admin data already exists - skipping"
|
||||
@ -340,3 +359,44 @@ function final_message() {
|
||||
echo -e " Password: ${INVENTREE_ADMIN_PASSWORD}"
|
||||
echo -e "####################################################################################"
|
||||
}
|
||||
|
||||
|
||||
function update_checks() {
|
||||
echo "# Running upgrade"
|
||||
local old_version=$1
|
||||
local old_version_rev=$(echo ${old_version} | cut -d'-' -f1 | cut -d'.' -f2)
|
||||
echo "# Old version is: ${old_version} - release: ${old_version_rev}"
|
||||
|
||||
local ABORT=false
|
||||
function check_config_value() {
|
||||
local env_key=$1
|
||||
local config_key=$2
|
||||
local name=$3
|
||||
|
||||
local value=$(inventree config:get ${env_key})
|
||||
if [ -z "${value}" ] || [ "$value" == "null" ]; then
|
||||
value=$(jq -r ".[].${config_key}" <<< ${INVENTREE_CONF_DATA})
|
||||
fi
|
||||
if [ -z "${value}" ] || [ "$value" == "null" ]; then
|
||||
echo "# No setting for ${name} found - please set it manually either in ${INVENTREE_CONFIG_FILE} under '${config_key}' or with 'inventree config:set ${env_key}=value'"
|
||||
ABORT=true
|
||||
else
|
||||
echo "# Found setting for ${name} - ${value}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Custom checks if old version is below 0.8.0
|
||||
if [ "${old_version_rev}" -lt "9" ]; then
|
||||
echo "# Old version is below 0.9.0 - You might be missing some configs"
|
||||
|
||||
# Check for BACKUP_DIR and SITE_URL in INVENTREE_CONF_DATA and config
|
||||
check_config_value "INVENTREE_SITE_URL" "site_url" "site URL"
|
||||
check_config_value "INVENTREE_BACKUP_DIR" "backup_dir" "backup dir"
|
||||
|
||||
if [ "${ABORT}" = true ]; then
|
||||
echo "# Aborting - please set the missing values and run the update again"
|
||||
exit 1
|
||||
fi
|
||||
echo "# All checks passed - continuing with the update"
|
||||
fi
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ PATH=${APP_HOME}/env/bin:${APP_HOME}/:/sbin:/bin:/usr/sbin:/usr/bin:
|
||||
. ${APP_HOME}/contrib/packager.io/functions.sh
|
||||
|
||||
# Envs that should be passed to setup commands
|
||||
export SETUP_ENVS=PATH,APP_HOME,INVENTREE_MEDIA_ROOT,INVENTREE_STATIC_ROOT,INVENTREE_BACKUP_DIR,INVENTREE_PLUGINS_ENABLED,INVENTREE_PLUGIN_FILE,INVENTREE_CONFIG_FILE,INVENTREE_SECRET_KEY_FILE,INVENTREE_DB_ENGINE,INVENTREE_DB_NAME,INVENTREE_DB_USER,INVENTREE_DB_PASSWORD,INVENTREE_DB_HOST,INVENTREE_DB_PORT,INVENTREE_ADMIN_USER,INVENTREE_ADMIN_EMAIL,INVENTREE_ADMIN_PASSWORD,SETUP_NGINX_FILE,SETUP_ADMIN_PASSWORD_FILE,SETUP_NO_CALLS,SETUP_DEBUG,SETUP_EXTRA_PIP,SETUP_PYTHON
|
||||
export SETUP_ENVS=PATH,APP_HOME,INVENTREE_MEDIA_ROOT,INVENTREE_STATIC_ROOT,INVENTREE_BACKUP_DIR,INVENTREE_PLUGINS_ENABLED,INVENTREE_PLUGIN_FILE,INVENTREE_CONFIG_FILE,INVENTREE_SECRET_KEY_FILE,INVENTREE_DB_ENGINE,INVENTREE_DB_NAME,INVENTREE_DB_USER,INVENTREE_DB_PASSWORD,INVENTREE_DB_HOST,INVENTREE_DB_PORT,INVENTREE_ADMIN_USER,INVENTREE_ADMIN_EMAIL,INVENTREE_ADMIN_PASSWORD,SETUP_NGINX_FILE,SETUP_ADMIN_PASSWORD_FILE,SETUP_NO_CALLS,SETUP_DEBUG,SETUP_EXTRA_PIP,SETUP_PYTHON,SETUP_ADMIN_NOCREATION
|
||||
|
||||
# Get the envs
|
||||
detect_local_env
|
||||
@ -24,6 +24,7 @@ export SETUP_NGINX_FILE=${SETUP_NGINX_FILE:-/etc/nginx/sites-enabled/inventree.c
|
||||
export SETUP_ADMIN_PASSWORD_FILE=${CONF_DIR}/admin_password.txt
|
||||
export SETUP_NO_CALLS=${SETUP_NO_CALLS:-false}
|
||||
export SETUP_PYTHON=${SETUP_PYTHON:-python3.9}
|
||||
export SETUP_ADMIN_NOCREATION=${SETUP_ADMIN_NOCREATION:-false}
|
||||
# SETUP_DEBUG can be set to get debug info
|
||||
# SETUP_EXTRA_PIP can be set to install extra pip packages
|
||||
# SETUP_PYTHON can be set to use a different python version
|
||||
@ -35,6 +36,14 @@ detect_initcmd
|
||||
detect_ip
|
||||
detect_python
|
||||
|
||||
# Check if we are updating and need to alert
|
||||
echo "# Checking if update checks are needed"
|
||||
if [ -z "$2" ]; then
|
||||
echo "# Normal install - no need for checks"
|
||||
else
|
||||
update_checks $2
|
||||
fi
|
||||
|
||||
# create processes
|
||||
create_initscripts
|
||||
create_admin
|
||||
|
15
contrib/packager.io/preinstall.sh
Executable file
15
contrib/packager.io/preinstall.sh
Executable file
@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# packager.io preinstall script
|
||||
#
|
||||
PATH=${APP_HOME}/env/bin:${APP_HOME}/:/sbin:/bin:/usr/sbin:/usr/bin:
|
||||
|
||||
# Envs that should be passed to setup commands
|
||||
export SETUP_ENVS=PATH,APP_HOME,INVENTREE_MEDIA_ROOT,INVENTREE_STATIC_ROOT,INVENTREE_BACKUP_DIR,INVENTREE_PLUGINS_ENABLED,INVENTREE_PLUGIN_FILE,INVENTREE_CONFIG_FILE,INVENTREE_SECRET_KEY_FILE,INVENTREE_DB_ENGINE,INVENTREE_DB_NAME,INVENTREE_DB_USER,INVENTREE_DB_PASSWORD,INVENTREE_DB_HOST,INVENTREE_DB_PORT,INVENTREE_ADMIN_USER,INVENTREE_ADMIN_EMAIL,INVENTREE_ADMIN_PASSWORD,SETUP_NGINX_FILE,SETUP_ADMIN_PASSWORD_FILE,SETUP_NO_CALLS,SETUP_DEBUG,SETUP_EXTRA_PIP,SETUP_PYTHON
|
||||
|
||||
if test -f "${APP_HOME}/env/bin/pip"; then
|
||||
echo "# Clearing precompiled files"
|
||||
sudo -u ${APP_USER} --preserve-env=$SETUP_ENVS bash -c "cd ${APP_HOME} && invoke clear-generated"
|
||||
else
|
||||
echo "# No python environment found - skipping"
|
||||
fi
|
BIN
docs/docs/assets/images/build/allocated_stock_table.png
Normal file
BIN
docs/docs/assets/images/build/allocated_stock_table.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 68 KiB |
BIN
docs/docs/assets/images/part/part_create_revision.png
Normal file
BIN
docs/docs/assets/images/part/part_create_revision.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 68 KiB |
BIN
docs/docs/assets/images/part/part_revision_b.png
Normal file
BIN
docs/docs/assets/images/part/part_revision_b.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 16 KiB |
BIN
docs/docs/assets/images/part/part_revision_select.png
Normal file
BIN
docs/docs/assets/images/part/part_revision_select.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 33 KiB |
BIN
docs/docs/assets/images/part/part_revision_settings.png
Normal file
BIN
docs/docs/assets/images/part/part_revision_settings.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 5.4 KiB |
37
docs/docs/build/build.md
vendored
37
docs/docs/build/build.md
vendored
@ -26,14 +26,6 @@ To navigate to the Build Order display, select *Build* from the main navigation
|
||||
{% include "img.html" %}
|
||||
{% endwith %}
|
||||
|
||||
#### Tree View
|
||||
|
||||
*Tree View* also provides a tabulated view of Build Orders. Orders are displayed in a hierarchical manner, showing any parent / child relationships between different build orders.
|
||||
|
||||
{% with id="build_tree", url="build/build_tree.png", description="Build Tree" %}
|
||||
{% include "img.html" %}
|
||||
{% endwith %}
|
||||
|
||||
#### Calendar View
|
||||
|
||||
*Calendar View* shows a calendar display with upcoming build orders, based on the various dates specified for each build.
|
||||
@ -121,9 +113,9 @@ The *Build Details* tab provides an overview of the Build Order:
|
||||
{% include "img.html" %}
|
||||
{% endwith %}
|
||||
|
||||
### Allocate Stock
|
||||
### Line Items
|
||||
|
||||
The *Allocate Stock* tab provides an interface to allocate required stock (as specified by the BOM) to the build:
|
||||
The *Line Items* tab provides an interface to allocate required stock (as specified by the BOM) to the build:
|
||||
|
||||
{% with id="build_allocate", url="build/build_allocate.png", description="Allocation tab" %}
|
||||
{% include "img.html" %}
|
||||
@ -131,8 +123,13 @@ The *Allocate Stock* tab provides an interface to allocate required stock (as sp
|
||||
|
||||
The allocation table (as shown above) shows the stock allocation progress for this build. In the example above, there are two BOM lines, which have been partially allocated.
|
||||
|
||||
!!! info "Completed Builds"
|
||||
The *Allocate Stock* tab is not available if the build has been completed!
|
||||
### Allocated Stock
|
||||
|
||||
The *Allocated Stock* tab displays all stock items which have been *allocated* to this build order. These stock items are reserved for this build, and will be consumed when the build is completed:
|
||||
|
||||
{% with id="allocated_stock_table", url="build/allocated_stock_table.png", description="Allocated Stock Table" %}
|
||||
{% include "img.html" %}
|
||||
{% endwith %}
|
||||
|
||||
### Consumed Stock
|
||||
|
||||
@ -246,3 +243,19 @@ Build orders may (optionally) have a target complete date specified. If this dat
|
||||
|
||||
- Builds can be filtered by overdue status in the build list
|
||||
- Overdue builds will be displayed on the home page
|
||||
|
||||
## Build Order Restrictions
|
||||
|
||||
There are a number of optional restrictions which can be applied to build orders, which may be enabled or disabled in the system settings:
|
||||
|
||||
### Require Active Part
|
||||
|
||||
If this option is enabled, build orders can only be created for parts which are marked as [Active](../part/part.md#active-parts).
|
||||
|
||||
### Require Locked Part
|
||||
|
||||
If this option is enabled, build orders can only be created for parts which are marked as [Locked](../part/part.md#locked-parts).
|
||||
|
||||
### Require Valid BOM
|
||||
|
||||
If this option is enabled, build orders can only be created for parts which have a valid [Bill of Materials](./bom.md) defined.
|
||||
|
@ -67,7 +67,7 @@ If you need to process your queue with background workers, run the `worker` task
|
||||
You can either only run InvenTree or use the integrated debugger for debugging. Goto the `Run and debug` side panel make sure `InvenTree Server` is selected. Click on the play button on the left.
|
||||
|
||||
!!! tip "Debug with 3rd party"
|
||||
Sometimes you need to debug also some 3rd party packages. Just select `InvenTree Servre - 3rd party`
|
||||
Sometimes you need to debug also some 3rd party packages. Just select `InvenTree Server - 3rd party`
|
||||
|
||||
You can now set breakpoints and vscode will automatically pause execution if that point is hit. You can see all variables available in that context and evaluate some code with the debugger console at the bottom. Use the play or step buttons to continue execution.
|
||||
|
||||
|
@ -6,6 +6,9 @@ title: Machines
|
||||
|
||||
InvenTree has a builtin machine registry. There are different machine types available where each type can have different drivers. Drivers and even custom machine types can be provided by plugins.
|
||||
|
||||
!!! info "Requires Redis"
|
||||
If the machines features is used in production setup using workers, a shared [redis cache](../../start/docker.md#redis-cache) is required to function properly.
|
||||
|
||||
### Registry
|
||||
|
||||
The machine registry is the main component which gets initialized on server start and manages all configured machines.
|
||||
@ -21,6 +24,13 @@ The machine registry initialization process can be divided into three stages:
|
||||
2. The driver.init_driver function is called for each used driver
|
||||
3. The machine.initialize function is called for each machine, which calls the driver.init_machine function for each machine, then the machine.initialized state is set to true
|
||||
|
||||
#### Production setup (with a worker)
|
||||
|
||||
If a worker is connected, there exist multiple instances of the machine registry (one in each worker thread and one in the main thread) due to the nature of how python handles state in different processes. Therefore the machine instances and drivers are instantiated multiple times (The `__init__` method is called multiple times). But the init functions and update hooks (e.g. `init_machine`) are only called once from the main process.
|
||||
|
||||
The registry, driver and machine state (e.g. machine status codes, errors, ...) is stored in the cache. Therefore a shared redis cache is needed. (The local in-memory cache which is used by default is not capable to cache across multiple processes)
|
||||
|
||||
|
||||
### Machine types
|
||||
|
||||
Each machine type can provide a different type of connection functionality between inventree and a physical machine. These machine types are already built into InvenTree.
|
||||
@ -86,6 +96,7 @@ The machine type class gets instantiated for each machine on server startup and
|
||||
- update
|
||||
- restart
|
||||
- handle_error
|
||||
- clear_errors
|
||||
- get_setting
|
||||
- set_setting
|
||||
- check_setting
|
||||
|
@ -73,7 +73,15 @@ A [Purchase Order](../order/purchase_order.md) allows parts to be ordered from a
|
||||
|
||||
If a part is designated as *Salable* it can be sold to external customers. Setting this flag allows parts to be added to sales orders.
|
||||
|
||||
### Active
|
||||
## Locked Parts
|
||||
|
||||
Parts can be locked to prevent them from being modified. This is useful for parts which are in production and should not be changed. The following restrictions apply to parts which are locked:
|
||||
|
||||
- Locked parts cannot be deleted
|
||||
- BOM items cannot be created, edited, or deleted when they are part of a locked assembly
|
||||
- Part parameters linked to a locked part cannot be created, edited or deleted
|
||||
|
||||
## Active Parts
|
||||
|
||||
By default, all parts are *Active*. Marking a part as inactive means it is not available for many actions, but the part remains in the database. If a part becomes obsolete, it is recommended that it is marked as inactive, rather than deleting it from the database.
|
||||
|
||||
|
78
docs/docs/part/revision.md
Normal file
78
docs/docs/part/revision.md
Normal file
@ -0,0 +1,78 @@
|
||||
---
|
||||
title: Part Revisions
|
||||
---
|
||||
|
||||
## Part Revisions
|
||||
|
||||
When creating a complex part (such as an assembly comprised of other parts), it is often necessary to track changes to the part over time. For example, throughout the lifetime of an assembly, it may be necessary to adjust the bill of materials, or update the design of the part.
|
||||
|
||||
Rather than overwrite the existing part data, InvenTree allows you to create a new *revision* of the part. This allows you to track changes to the part over time, and maintain a history of the part design.
|
||||
|
||||
Crucially, creating a new *revision* ensures that any related data entries which refer to the original part (such as stock items, build orders, purchase orders, etc) are not affected by the change.
|
||||
|
||||
### Revisions are Parts
|
||||
|
||||
A *revision* of a part is itself a part. This means that each revision of a part has its own part number, stock items, parameters, bill of materials, etc. The only thing that differentiates a *revision* from any other part is that the *revision* is linked to the original part.
|
||||
|
||||
### Revision Fields
|
||||
|
||||
Each part has two fields which are used to track the revision of the part:
|
||||
|
||||
* **Revision**: The revision number of the part. This is a user-defined field, and can be any string value.
|
||||
* **Revision Of**: A reference to the part of which *this* part is a revision. This field is used to keep track of the available revisions for any particular part.
|
||||
|
||||
### Revision Restrictions
|
||||
|
||||
When creating a new revision of a part, there are some restrictions which must be adhered to:
|
||||
|
||||
* **Circular References**: A part cannot be a revision of itself. This would create a circular reference which is not allowed.
|
||||
* **Unique Revisions**: A part cannot have two revisions with the same revision number. Each revision (of a given part) must have a unique revision code.
|
||||
* **Revisions of Revisions**: A single part can have multiple revisions, but a revision cannot have its own revision. This restriction is in place to prevent overly complex part relationships.
|
||||
* **Template Revisions**: A part which is a [template part](./template.md) cannot have revisions. This is because the template part is used to create variants, and allowing revisions of templates would create disallowed relationship states in the database. However, variant parts are allowed to have revisions.
|
||||
* **Template References**: A part which is a revision of a variant part must point to the same template as the original part. This is to ensure that the revision is correctly linked to the original part.
|
||||
|
||||
## Revision Settings
|
||||
|
||||
The following options are available to control the behavior of part revisions.
|
||||
|
||||
Note that these options can be changed in the InvenTree settings:
|
||||
|
||||
{% with id="part_revision_settings", url="part/part_revision_settings.png", description="Part revision settings" %}
|
||||
{% include 'img.html' %}
|
||||
{% endwith %}
|
||||
|
||||
* **Enable Revisions**: If this setting is enabled, parts can have revisions. If this setting is disabled, parts cannot have revisions.
|
||||
* **Assembly Revisions Only**: If this setting is enabled, only assembly parts can have revisions. This is useful if you only want to track revisions of assemblies, and not individual parts.
|
||||
|
||||
## Create a Revision
|
||||
|
||||
To create a new revision for a given part, navigate to the part detail page, and click on the "Revisions" tab.
|
||||
|
||||
Select the "Duplicate Part" action, to create a new copy of the selected part. This will open the "Duplicate Part" form:
|
||||
|
||||
{% with id="part_create_revision", url="part/part_create_revision.png", description="Create part revision" %}
|
||||
{% include 'img.html' %}
|
||||
{% endwith %}
|
||||
|
||||
In this form, make the following updates:
|
||||
|
||||
1. Set the *Revision Of* field to the original part (the one that you are duplicating)
|
||||
2. Set the *Revision* field to a unique revision number for the new part revision
|
||||
|
||||
Once these changes (and any other required changes) are made, press *Submit* to create the new part.
|
||||
|
||||
Once the form is submitted (without any errors), you will be redirected to the new part revision. Here you can see that it is linked to the original part:
|
||||
|
||||
{% with id="part_revision_b", url="part/part_revision_b.png", description="Revision B" %}
|
||||
{% include 'img.html' %}
|
||||
{% endwith %}
|
||||
|
||||
## Revision Navigation
|
||||
|
||||
When multiple revisions exist for a particular part, you can navigate between revisions using the *Select Part Revision* drop-down which renders at the top of the part page:
|
||||
|
||||
{% with id="part_revision_select", url="part/part_revision_select.png", description="Select part revision" %}
|
||||
{% include 'img.html' %}
|
||||
{% endwith %}
|
||||
|
||||
Note that this revision selector is only visible when multiple revisions exist for the part.
|
@ -28,7 +28,6 @@ Details provides information about the particular part. Parts details can be dis
|
||||
{% with id="part_overview", url="part/part_overview.png", description="Part details" %}
|
||||
{% include 'img.html' %}
|
||||
{% endwith %}
|
||||
<p></p>
|
||||
|
||||
A Part is defined in the system by the following parameters:
|
||||
|
||||
@ -38,7 +37,7 @@ A Part is defined in the system by the following parameters:
|
||||
|
||||
**Description** - Longer form text field describing the Part
|
||||
|
||||
**Revision** - An optional revision code denoting the particular version for the part. Used when there are multiple revisions of the same master part object.
|
||||
**Revision** - An optional revision code denoting the particular version for the part. Used when there are multiple revisions of the same master part object. Read [more about part revisions here](./revision.md).
|
||||
|
||||
**Keywords** - Optional few words to describe the part and make the part search more efficient.
|
||||
|
||||
@ -62,7 +61,7 @@ Parts can have multiple defined parameters.
|
||||
|
||||
If a part is a *Template Part* then the *Variants* tab will be visible.
|
||||
|
||||
[Read about Part templates](./template.md)
|
||||
[Read about Part templates and variants](./template.md)
|
||||
|
||||
### Stock
|
||||
|
||||
|
@ -4,13 +4,13 @@ title: InvenTree Single Sign On
|
||||
|
||||
## Single Sign On
|
||||
|
||||
InvenTree provides the possibility to use 3rd party services to authenticate users. This functionality makes use of [django-allauth](https://django-allauth.readthedocs.io/en/latest/) and supports a wide array of OpenID and OAuth [providers](https://django-allauth.readthedocs.io/en/latest/socialaccount/providers/index.html).
|
||||
InvenTree provides the possibility to use 3rd party services to authenticate users. This functionality makes use of [django-allauth](https://docs.allauth.org/en/latest/) and supports a wide array of OpenID and OAuth [providers](https://docs.allauth.org/en/latest/socialaccount/providers/index.html).
|
||||
|
||||
!!! tip "Provider Documentation"
|
||||
There are a lot of technical considerations when configuring a particular SSO provider. A good starting point is the [django-allauth documentation](https://django-allauth.readthedocs.io/en/latest/socialaccount/providers/index.html)
|
||||
There are a lot of technical considerations when configuring a particular SSO provider. A good starting point is the [django-allauth documentation](https://docs.allauth.org/en/latest/socialaccount/providers/index.html)
|
||||
|
||||
!!! warning "Advanced Users"
|
||||
The SSO functionality provided by django-allauth is powerful, but can prove challenging to configure. Please ensure that you understand the implications of enabling SSO for your InvenTree instance. Specific technical details of each available SSO provider are beyond the scope of this documentation - please refer to the [django-allauth documentation](https://django-allauth.readthedocs.io/en/latest/socialaccount/providers/index.html) for more information.
|
||||
The SSO functionality provided by django-allauth is powerful, but can prove challenging to configure. Please ensure that you understand the implications of enabling SSO for your InvenTree instance. Specific technical details of each available SSO provider are beyond the scope of this documentation - please refer to the [django-allauth documentation](https://docs.allauth.org/en/latest/socialaccount/providers/index.html) for more information.
|
||||
|
||||
## SSO Configuration
|
||||
|
||||
@ -31,8 +31,8 @@ There are two variables in the configuration file which define the operation of
|
||||
|
||||
| Environment Variable |Configuration File | Description | More Info |
|
||||
| --- | --- | --- | --- |
|
||||
| INVENTREE_SOCIAL_BACKENDS | `social_backends` | A *list* of provider backends enabled for the InvenTree instance | [django-allauth docs](https://django-allauth.readthedocs.io/en/latest/installation/quickstart.html) |
|
||||
| INVENTREE_SOCIAL_PROVIDERS | `social_providers` | A *dict* of settings specific to the installed providers | [provider documentation](https://django-allauth.readthedocs.io/en/latest/socialaccount/providers/index.html) |
|
||||
| INVENTREE_SOCIAL_BACKENDS | `social_backends` | A *list* of provider backends enabled for the InvenTree instance | [django-allauth docs](https://docs.allauth.org/en/latest/installation/quickstart.html) |
|
||||
| INVENTREE_SOCIAL_PROVIDERS | `social_providers` | A *dict* of settings specific to the installed providers | [provider documentation](https://docs.allauth.org/en/latest/socialaccount/providers/index.html) |
|
||||
|
||||
In the example below, SSO provider modules are activated for *google*, *github* and *microsoft*. Specific configuration options are specified for the *microsoft* provider module:
|
||||
|
||||
@ -44,7 +44,7 @@ In the example below, SSO provider modules are activated for *google*, *github*
|
||||
Note that the provider modules specified in `social_backends` must be prefixed with `allauth.socialaccounts.providers`
|
||||
|
||||
!!! warning "Provider Documentation"
|
||||
We do not provide any specific documentation for each provider module. Please refer to the [django-allauth documentation](https://django-allauth.readthedocs.io/en/latest/socialaccount/providers/index.html) for more information.
|
||||
We do not provide any specific documentation for each provider module. Please refer to the [django-allauth documentation](https://docs.allauth.org/en/latest/socialaccount/providers/index.html) for more information.
|
||||
|
||||
!!! tip "Restart Server"
|
||||
As the [configuration file](../start/config.md) is only read when the server is launched, ensure you restart the server after editing the file.
|
||||
@ -57,7 +57,7 @@ The next step is to create an external authentication app with your provider of
|
||||
The provider application will be created as part of your SSO provider setup. This is *not* the same as the *SocialApp* entry in the InvenTree admin interface.
|
||||
|
||||
!!! info "Read the Documentation"
|
||||
The [django-allauth documentation](https://django-allauth.readthedocs.io/en/latest/socialaccount/providers/index.html) is a good starting point here. There are also a number of good tutorials online (at least for the major supported SSO providers).
|
||||
The [django-allauth documentation](https://docs.allauth.org/en/latest/socialaccount/providers/index.html) is a good starting point here. There are also a number of good tutorials online (at least for the major supported SSO providers).
|
||||
|
||||
In general, the external app will generate a *key* and *secret* pair - although different terminology may be used, depending on the provider.
|
||||
|
||||
@ -132,6 +132,31 @@ In the [settings screen](./global.md), navigate to the *Login Settings* panel. H
|
||||
|
||||
Note that [email settings](./email.md) must be correctly configured before SSO will be activated. Ensure that your email setup is correctly configured and operational.
|
||||
|
||||
## SSO Group Sync Configuration
|
||||
|
||||
InvenTree has the ability to synchronize groups assigned to each user directly from the IdP. To enable this feature, navigate to the *Login Settings* panel in the [settings screen](./global.md) first. Here, the following options are available:
|
||||
|
||||
| Setting | Description |
|
||||
| --- | --- |
|
||||
| Enable SSO group sync | Enable synchronizing InvenTree groups with groups provided by the IdP |
|
||||
| SSO group key | The name of the claim containing all groups, e.g. `groups` or `roles` |
|
||||
| SSO group map | A mapping from SSO groups to InvenTree groups as JSON, e.g. `{"/inventree/admins": "admin"}`. If the mapped group does not exist once a user signs up, a new group without assigned permissions will be created. |
|
||||
| Remove groups outside of SSO | Whether groups should be removed from the user if they are not present in the IdP data |
|
||||
|
||||
!!! warning "Remove groups outside of SSO"
|
||||
Disabling this feature might cause security issues as groups that are removed in the IdP will stay assigned in InvenTree
|
||||
|
||||
### Keycloak OIDC example configuration
|
||||
|
||||
!!! tip "Configuration for different IdPs"
|
||||
The main challenge in enabling the SSO group sync feature is for the SSO admin to configure the IdP such that the groups are correctly represented in in the Django allauth `extra_data` attribute. The SSO group sync feature has been developed and tested using integrated Keycloak users/groups and OIDC. If you are utilizing this feature using another IdP, kindly consider documenting your configuration steps as well.
|
||||
|
||||
Keycloak groups are not sent to the OIDC client by default. To enable such functionality, create a new client scope named `groups` in the Keycloak admin console. For this scope, add a new mapper ('By Configuration') and select 'Group Membership'. Give it a descriptive name and set the token claim name to `groups`.
|
||||
|
||||
For each OIDC client that relies on those group, explicitly add the `groups` scope to client scopes. The groups will now be sent to client upon request.
|
||||
|
||||
**Note:** A group named `foo` will be displayed as `/foo`. For this reason, the example above recommends using group names like `appname/rolename` which will be sent to the client as `/appname/rolename`.
|
||||
|
||||
## Security Considerations
|
||||
|
||||
You should use SSL for your website if you want to use this feature. Also set your callback-endpoints to `https://` addresses to reduce the risk of leaking user's tokens.
|
||||
|
@ -37,6 +37,10 @@ Change how logins, password-forgot, signups are handled.
|
||||
| Enable registration | Boolean | Enable self-registration for users on the login-pages | False |
|
||||
| Enable SSO | Boolean | Enable SSO on the login-pages | False |
|
||||
| Enable SSO registration | Boolean | Enable self-registration for users via SSO on the login-pages | False |
|
||||
| Enable SSO group sync | Boolean | Enable synchronizing InvenTree groups directly from the IdP | False |
|
||||
| SSO group key | String | The name of the groups claim attribute provided by the IdP | |
|
||||
| SSO group map | String (JSON) | A mapping from SSO groups to local InvenTree groups | {} |
|
||||
| Remove groups outside of SSO | Boolean | Whether groups assigned to the user should be removed if they are not backend by the IdP. Disabling this setting might cause security issues | True |
|
||||
| Enable password forgot | Boolean | Enable password forgot function on the login-pages.<br><br>This will let users reset their passwords on their own. For this feature to work you need to configure E-mail | True |
|
||||
| E-Mail required | Boolean | Require user to supply e-mail on signup.<br><br>Without a way (e-mail) to contact the user notifications and security features might not work! | False |
|
||||
| Enforce MFA | Boolean | Users must use multifactor security.<br><br>This forces each user to setup MFA and use it on each authentication | False |
|
||||
|
@ -6,6 +6,10 @@ title: Stock
|
||||
|
||||
A stock location represents a physical real-world location where *Stock Items* are stored. Locations are arranged in a cascading manner and each location may contain multiple sub-locations, or stock, or both.
|
||||
|
||||
## Stock Location Type
|
||||
|
||||
A stock location type represents a specific type of location (e.g. one specific size of drawer, shelf, ... or box) which can be assigned to multiple stock locations. In the first place, it is used to specify an icon and having the icon in sync for all locations that use this location type, but it also serves as a data field to quickly see what type of location this is. It is planned to add e.g. drawer dimension information to the location type to add a "find a matching, empty stock location" tool.
|
||||
|
||||
## Stock Item
|
||||
|
||||
A *Stock Item* is an actual instance of a [*Part*](../part/part.md) item. It represents a physical quantity of the *Part* in a specific location.
|
||||
|
@ -106,6 +106,7 @@ nav:
|
||||
- Part Views: part/views.md
|
||||
- Tracking: part/trackable.md
|
||||
- Parameters: part/parameter.md
|
||||
- Revisions: part/revision.md
|
||||
- Templates: part/template.md
|
||||
- Tests: part/test.md
|
||||
- Pricing: part/pricing.md
|
||||
|
@ -14,9 +14,9 @@ bracex==2.4 \
|
||||
--hash=sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb \
|
||||
--hash=sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418
|
||||
# via wcmatch
|
||||
certifi==2024.2.2 \
|
||||
--hash=sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f \
|
||||
--hash=sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1
|
||||
certifi==2024.7.4 \
|
||||
--hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \
|
||||
--hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90
|
||||
# via
|
||||
# httpcore
|
||||
# httpx
|
||||
@ -173,9 +173,9 @@ idna==3.7 \
|
||||
# anyio
|
||||
# httpx
|
||||
# requests
|
||||
importlib-metadata==7.1.0 \
|
||||
--hash=sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570 \
|
||||
--hash=sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2
|
||||
importlib-metadata==8.0.0 \
|
||||
--hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \
|
||||
--hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812
|
||||
# via
|
||||
# markdown
|
||||
# mkdocs
|
||||
@ -539,15 +539,15 @@ termcolor==2.4.0 \
|
||||
--hash=sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63 \
|
||||
--hash=sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a
|
||||
# via mkdocs-macros-plugin
|
||||
typing-extensions==4.11.0 \
|
||||
--hash=sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0 \
|
||||
--hash=sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a
|
||||
typing-extensions==4.12.2 \
|
||||
--hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \
|
||||
--hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8
|
||||
# via
|
||||
# anyio
|
||||
# mkdocstrings
|
||||
urllib3==2.2.1 \
|
||||
--hash=sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d \
|
||||
--hash=sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19
|
||||
urllib3==2.2.2 \
|
||||
--hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \
|
||||
--hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168
|
||||
# via requests
|
||||
watchdog==4.0.0 \
|
||||
--hash=sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257 \
|
||||
@ -584,7 +584,7 @@ wcmatch==8.5.2 \
|
||||
--hash=sha256:17d3ad3758f9d0b5b4dedc770b65420d4dac62e680229c287bf24c9db856a478 \
|
||||
--hash=sha256:a70222b86dea82fb382dd87b73278c10756c138bd6f8f714e2183128887b9eb2
|
||||
# via mkdocs-include-markdown-plugin
|
||||
zipp==3.18.2 \
|
||||
--hash=sha256:6278d9ddbcfb1f1089a88fde84481528b07b0e10474e09dcfe53dad4069fa059 \
|
||||
--hash=sha256:dce197b859eb796242b0622af1b8beb0a722d52aa2f57133ead08edd5bf5374e
|
||||
zipp==3.19.2 \
|
||||
--hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \
|
||||
--hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c
|
||||
# via importlib-metadata
|
||||
|
@ -311,8 +311,26 @@ class BulkDeleteMixin:
|
||||
- Speed (single API call and DB query)
|
||||
"""
|
||||
|
||||
def validate_delete(self, queryset, request) -> None:
|
||||
"""Perform validation right before deletion.
|
||||
|
||||
Arguments:
|
||||
queryset: The queryset to be deleted
|
||||
request: The request object
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
Raises:
|
||||
ValidationError: If the deletion should not proceed
|
||||
"""
|
||||
pass
|
||||
|
||||
def filter_delete_queryset(self, queryset, request):
|
||||
"""Provide custom filtering for the queryset *before* it is deleted."""
|
||||
"""Provide custom filtering for the queryset *before* it is deleted.
|
||||
|
||||
The default implementation does nothing, just returns the queryset.
|
||||
"""
|
||||
return queryset
|
||||
|
||||
def delete(self, request, *args, **kwargs):
|
||||
@ -371,6 +389,9 @@ class BulkDeleteMixin:
|
||||
if filters:
|
||||
queryset = queryset.filter(**filters)
|
||||
|
||||
# Run a final validation step (should raise an error if the deletion should not proceed)
|
||||
self.validate_delete(queryset, request)
|
||||
|
||||
n_deleted = queryset.count()
|
||||
queryset.delete()
|
||||
|
||||
@ -383,42 +404,6 @@ class ListCreateDestroyAPIView(BulkDeleteMixin, ListCreateAPI):
|
||||
...
|
||||
|
||||
|
||||
class APIDownloadMixin:
|
||||
"""Mixin for enabling a LIST endpoint to be downloaded a file.
|
||||
|
||||
To download the data, add the ?export=<fmt> to the query string.
|
||||
|
||||
The implementing class must provided a download_queryset method,
|
||||
e.g.
|
||||
|
||||
def download_queryset(self, queryset, export_format):
|
||||
dataset = StockItemResource().export(queryset=queryset)
|
||||
|
||||
filedata = dataset.export(export_format)
|
||||
|
||||
filename = 'InvenTree_Stocktake_{date}.{fmt}'.format(
|
||||
date=datetime.now().strftime("%d-%b-%Y"),
|
||||
fmt=export_format
|
||||
)
|
||||
|
||||
return DownloadFile(filedata, filename)
|
||||
"""
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Generic handler for a download request."""
|
||||
export_format = request.query_params.get('export', None)
|
||||
|
||||
if export_format and export_format in ['csv', 'tsv', 'xls', 'xlsx']:
|
||||
queryset = self.filter_queryset(self.get_queryset())
|
||||
return self.download_queryset(queryset, export_format)
|
||||
# Default to the parent class implementation
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
def download_queryset(self, queryset, export_format):
|
||||
"""This function must be implemented to provide a downloadFile request."""
|
||||
raise NotImplementedError('download_queryset method not implemented!')
|
||||
|
||||
|
||||
class APISearchViewSerializer(serializers.Serializer):
|
||||
"""Serializer for the APISearchView."""
|
||||
|
||||
|
@ -1,11 +1,61 @@
|
||||
"""InvenTree API version information."""
|
||||
|
||||
# InvenTree API version
|
||||
INVENTREE_API_VERSION = 210
|
||||
INVENTREE_API_VERSION = 225
|
||||
|
||||
"""Increment this API version number whenever there is a significant change to the API that any clients need to know about."""
|
||||
|
||||
|
||||
INVENTREE_API_TEXT = """
|
||||
v225 - 2024-07-17 : https://github.com/inventree/InvenTree/pull/7671
|
||||
- Adds "filters" field to DataImportSession API
|
||||
|
||||
v224 - 2024-07-14 : https://github.com/inventree/InvenTree/pull/7667
|
||||
- Add notes field to ManufacturerPart and SupplierPart API endpoints
|
||||
|
||||
v223 - 2024-07-14 : https://github.com/inventree/InvenTree/pull/7649
|
||||
- Allow adjustment of "packaging" field when receiving items against a purchase order
|
||||
|
||||
v222 - 2024-07-14 : https://github.com/inventree/InvenTree/pull/7635
|
||||
- Adjust the BomItem API endpoint to improve data import process
|
||||
|
||||
v221 - 2024-07-13 : https://github.com/inventree/InvenTree/pull/7636
|
||||
- Adds missing fields from StockItemBriefSerializer
|
||||
- Adds missing fields from PartBriefSerializer
|
||||
- Adds extra exportable fields to BuildItemSerializer
|
||||
|
||||
v220 - 2024-07-11 : https://github.com/inventree/InvenTree/pull/7585
|
||||
- Adds "revision_of" field to Part serializer
|
||||
- Adds new API filters for "revision" status
|
||||
|
||||
v219 - 2024-07-11 : https://github.com/inventree/InvenTree/pull/7611
|
||||
- Adds new fields to the BuildItem API endpoints
|
||||
- Adds new ordering / filtering options to the BuildItem API endpoints
|
||||
|
||||
v218 - 2024-07-11 : https://github.com/inventree/InvenTree/pull/7619
|
||||
- Adds "can_build" field to the BomItem API
|
||||
|
||||
v217 - 2024-07-09 : https://github.com/inventree/InvenTree/pull/7599
|
||||
- Fixes bug in "project_code" field for order API endpoints
|
||||
|
||||
v216 - 2024-07-08 : https://github.com/inventree/InvenTree/pull/7595
|
||||
- Moves API endpoint for contenttype lookup by model name
|
||||
|
||||
v215 - 2024-07-09 : https://github.com/inventree/InvenTree/pull/7591
|
||||
- Adds additional fields to the BuildLine serializer
|
||||
|
||||
v214 - 2024-07-08 : https://github.com/inventree/InvenTree/pull/7587
|
||||
- Adds "default_location_detail" field to the Part API
|
||||
|
||||
v213 - 2024-07-06 : https://github.com/inventree/InvenTree/pull/7527
|
||||
- Adds 'locked' field to Part API
|
||||
|
||||
v212 - 2024-07-06 : https://github.com/inventree/InvenTree/pull/7562
|
||||
- Makes API generation more robust (no functional changes)
|
||||
|
||||
v211 - 2024-06-26 : https://github.com/inventree/InvenTree/pull/6911
|
||||
- Adds API endpoints for managing data import and export
|
||||
|
||||
v210 - 2024-06-26 : https://github.com/inventree/InvenTree/pull/7518
|
||||
- Adds translateable text to User API fields
|
||||
|
||||
|
@ -11,6 +11,8 @@ from django.core.exceptions import AppRegistryNotReady
|
||||
from django.db import transaction
|
||||
from django.db.utils import IntegrityError, OperationalError
|
||||
|
||||
from allauth.socialaccount.signals import social_account_added, social_account_updated
|
||||
|
||||
import InvenTree.conversion
|
||||
import InvenTree.ready
|
||||
import InvenTree.tasks
|
||||
@ -70,6 +72,12 @@ class InvenTreeConfig(AppConfig):
|
||||
self.add_user_on_startup()
|
||||
self.add_user_from_file()
|
||||
|
||||
# register event receiver and connect signal for SSO group sync. The connected signal is
|
||||
# used for account updates whereas the receiver is used for the initial account creation.
|
||||
from InvenTree import sso
|
||||
|
||||
social_account_updated.connect(sso.ensure_sso_groups)
|
||||
|
||||
def remove_obsolete_tasks(self):
|
||||
"""Delete any obsolete scheduled tasks in the database."""
|
||||
obsolete = [
|
||||
|
@ -190,7 +190,7 @@ class CustomSignupForm(SignupForm):
|
||||
|
||||
# check for two password fields
|
||||
if not get_global_setting('LOGIN_SIGNUP_PWD_TWICE'):
|
||||
self.fields.pop('password2')
|
||||
self.fields.pop('password2', None)
|
||||
|
||||
# reorder fields
|
||||
set_form_field_order(
|
||||
@ -269,7 +269,9 @@ class RegistratonMixin:
|
||||
|
||||
# Check if a default group is set in settings
|
||||
start_group = get_global_setting('SIGNUP_GROUP')
|
||||
if start_group:
|
||||
if (
|
||||
start_group and user.groups.count() == 0
|
||||
): # check that no group has been added through SSO group sync
|
||||
try:
|
||||
group = Group.objects.get(id=start_group)
|
||||
user.groups.add(group)
|
||||
|
@ -429,8 +429,8 @@ def MakeBarcode(cls_name, object_pk: int, object_data=None, **kwargs):
|
||||
|
||||
|
||||
def GetExportFormats():
|
||||
"""Return a list of allowable file formats for exporting data."""
|
||||
return ['csv', 'tsv', 'xls', 'xlsx', 'json', 'yaml']
|
||||
"""Return a list of allowable file formats for importing or exporting tabular data."""
|
||||
return ['csv', 'xlsx', 'tsv', 'json']
|
||||
|
||||
|
||||
def DownloadFile(
|
||||
|
@ -2,8 +2,10 @@
|
||||
|
||||
import inspect
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
|
||||
from plugin import registry as plg_registry
|
||||
|
||||
@ -104,3 +106,37 @@ class ClassProviderMixin:
|
||||
except ValueError:
|
||||
# Path(...).relative_to throws an ValueError if its not relative to the InvenTree source base dir
|
||||
return False
|
||||
|
||||
|
||||
def get_shared_class_instance_state_mixin(get_state_key: Callable[[type], str]):
|
||||
"""Get a mixin class that provides shared state for classes across the main application and worker.
|
||||
|
||||
Arguments:
|
||||
get_state_key: A function that returns the key for the shared state when given a class instance.
|
||||
"""
|
||||
|
||||
class SharedClassStateMixinClass:
|
||||
"""Mixin to provide shared state for classes across the main application and worker."""
|
||||
|
||||
def set_shared_state(self, key: str, value: Any):
|
||||
"""Set a shared state value for this machine.
|
||||
|
||||
Arguments:
|
||||
key: The key for the shared state
|
||||
value: The value to set
|
||||
"""
|
||||
cache.set(self._get_key(key), value, timeout=None)
|
||||
|
||||
def get_shared_state(self, key: str, default=None):
|
||||
"""Get a shared state value for this machine.
|
||||
|
||||
Arguments:
|
||||
key: The key for the shared state
|
||||
"""
|
||||
return cache.get(self._get_key(key)) or default
|
||||
|
||||
def _get_key(self, key: str):
|
||||
"""Get the key for this class instance."""
|
||||
return f'{get_state_key(self)}:{key}'
|
||||
|
||||
return SharedClassStateMixinClass
|
||||
|
@ -252,7 +252,7 @@ def render_currency(
|
||||
|
||||
|
||||
def getModelsWithMixin(mixin_class) -> list:
|
||||
"""Return a list of models that inherit from the given mixin class.
|
||||
"""Return a list of database models that inherit from the given mixin class.
|
||||
|
||||
Args:
|
||||
mixin_class: The mixin class to search for
|
||||
|
@ -15,6 +15,7 @@ Additionally, update the following files with the new locale code:
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
LOCALES = [
|
||||
('ar', _('Arabic')),
|
||||
('bg', _('Bulgarian')),
|
||||
('cs', _('Czech')),
|
||||
('da', _('Danish')),
|
||||
@ -23,6 +24,7 @@ LOCALES = [
|
||||
('en', _('English')),
|
||||
('es', _('Spanish')),
|
||||
('es-mx', _('Spanish (Mexican)')),
|
||||
('et', _('Estonian')),
|
||||
('fa', _('Farsi / Persian')),
|
||||
('fi', _('Finnish')),
|
||||
('fr', _('French')),
|
||||
|
@ -137,10 +137,10 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
- field_value: The value of the field (if available)
|
||||
- model_value: The equivalent value of the model (if available)
|
||||
"""
|
||||
if model_value and not field_value:
|
||||
if field_value is None and model_value is not None:
|
||||
return model_value
|
||||
|
||||
if field_value and not model_value:
|
||||
if model_value is None and field_value is not None:
|
||||
return field_value
|
||||
|
||||
# Callable values will be evaluated later
|
||||
@ -160,6 +160,8 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
"""Override get_serializer_info so that we can add 'default' values to any fields whose Meta.model specifies a default value."""
|
||||
self.serializer = serializer
|
||||
|
||||
request = getattr(self, 'request', None)
|
||||
|
||||
serializer_info = super().get_serializer_info(serializer)
|
||||
|
||||
# Look for any dynamic fields which were not available when the serializer was instantiated
|
||||
@ -169,12 +171,19 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
# Already know about this one
|
||||
continue
|
||||
|
||||
if hasattr(serializer, field_name):
|
||||
field = getattr(serializer, field_name)
|
||||
if field := getattr(serializer, field_name, None):
|
||||
serializer_info[field_name] = self.get_field_info(field)
|
||||
|
||||
model_class = None
|
||||
|
||||
# Extract read_only_fields and write_only_fields from the Meta class (if available)
|
||||
if meta := getattr(serializer, 'Meta', None):
|
||||
read_only_fields = getattr(meta, 'read_only_fields', [])
|
||||
write_only_fields = getattr(meta, 'write_only_fields', [])
|
||||
else:
|
||||
read_only_fields = []
|
||||
write_only_fields = []
|
||||
|
||||
# Attributes to copy extra attributes from the model to the field (if they don't exist)
|
||||
# Note that the attributes may be named differently on the underlying model!
|
||||
extra_attributes = {
|
||||
@ -188,16 +197,20 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
|
||||
model_fields = model_meta.get_field_info(model_class)
|
||||
|
||||
model_default_func = getattr(model_class, 'api_defaults', None)
|
||||
|
||||
if model_default_func:
|
||||
model_default_values = model_class.api_defaults(self.request)
|
||||
if model_default_func := getattr(model_class, 'api_defaults', None):
|
||||
model_default_values = model_default_func(request=request) or {}
|
||||
else:
|
||||
model_default_values = {}
|
||||
|
||||
# Iterate through simple fields
|
||||
for name, field in model_fields.fields.items():
|
||||
if name in serializer_info.keys():
|
||||
if name in read_only_fields:
|
||||
serializer_info[name]['read_only'] = True
|
||||
|
||||
if name in write_only_fields:
|
||||
serializer_info[name]['write_only'] = True
|
||||
|
||||
if field.has_default():
|
||||
default = field.default
|
||||
|
||||
@ -231,6 +244,12 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
# Ignore reverse relations
|
||||
continue
|
||||
|
||||
if name in read_only_fields:
|
||||
serializer_info[name]['read_only'] = True
|
||||
|
||||
if name in write_only_fields:
|
||||
serializer_info[name]['write_only'] = True
|
||||
|
||||
# Extract and provide the "limit_choices_to" filters
|
||||
# This is used to automatically filter AJAX requests
|
||||
serializer_info[name]['filters'] = (
|
||||
@ -261,7 +280,8 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
|
||||
if instance is None and model_class is not None:
|
||||
# Attempt to find the instance based on kwargs lookup
|
||||
kwargs = getattr(self.view, 'kwargs', None)
|
||||
view = getattr(self, 'view', None)
|
||||
kwargs = getattr(view, 'kwargs', None) if view else None
|
||||
|
||||
if kwargs:
|
||||
pk = None
|
||||
@ -318,8 +338,10 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
|
||||
# Force non-nullable fields to read as "required"
|
||||
# (even if there is a default value!)
|
||||
if not field.allow_null and not (
|
||||
hasattr(field, 'allow_blank') and field.allow_blank
|
||||
if (
|
||||
'required' not in field_info
|
||||
and not field.allow_null
|
||||
and not (hasattr(field, 'allow_blank') and field.allow_blank)
|
||||
):
|
||||
field_info['required'] = True
|
||||
|
||||
@ -346,8 +368,11 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
field_info['api_url'] = '/api/user/'
|
||||
elif field_info['model'] == 'contenttype':
|
||||
field_info['api_url'] = '/api/contenttype/'
|
||||
else:
|
||||
elif hasattr(model, 'get_api_url'):
|
||||
field_info['api_url'] = model.get_api_url()
|
||||
else:
|
||||
logger.warning("'get_api_url' method not defined for %s", model)
|
||||
field_info['api_url'] = getattr(model, 'api_url', None)
|
||||
|
||||
# Handle custom 'primary key' field
|
||||
field_info['pk_field'] = getattr(field, 'pk_field', 'pk') or 'pk'
|
||||
|
@ -216,12 +216,15 @@ class MetadataMixin(models.Model):
|
||||
self.save()
|
||||
|
||||
|
||||
class DataImportMixin(object):
|
||||
class DataImportMixin:
|
||||
"""Model mixin class which provides support for 'data import' functionality.
|
||||
|
||||
Models which implement this mixin should provide information on the fields available for import
|
||||
"""
|
||||
|
||||
# TODO: This mixin should be removed after https://github.com/inventree/InvenTree/pull/6911 is implemented
|
||||
# TODO: This approach to data import functionality is *outdated*
|
||||
|
||||
# Define a map of fields available for import
|
||||
IMPORT_FIELDS = {}
|
||||
|
||||
|
@ -856,7 +856,7 @@ class RemoteImageMixin(metaclass=serializers.SerializerMetaclass):
|
||||
|
||||
remote_image = serializers.URLField(
|
||||
required=False,
|
||||
allow_blank=False,
|
||||
allow_blank=True,
|
||||
write_only=True,
|
||||
label=_('Remote Image'),
|
||||
help_text=_('URL of remote image file'),
|
||||
|
@ -198,6 +198,7 @@ INSTALLED_APPS = [
|
||||
'stock.apps.StockConfig',
|
||||
'users.apps.UsersConfig',
|
||||
'machine.apps.MachineConfig',
|
||||
'importer.apps.ImporterConfig',
|
||||
'web',
|
||||
'generic',
|
||||
'InvenTree.apps.InvenTreeConfig', # InvenTree app runs last
|
||||
|
@ -1,7 +1,14 @@
|
||||
"""Helper functions for Single Sign On functionality."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
from django.contrib.auth.models import Group
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
|
||||
from allauth.socialaccount.models import SocialAccount, SocialLogin
|
||||
|
||||
from common.settings import get_global_setting
|
||||
from InvenTree.helpers import str2bool
|
||||
|
||||
@ -75,3 +82,55 @@ def registration_enabled() -> bool:
|
||||
def auto_registration_enabled() -> bool:
|
||||
"""Return True if SSO auto-registration is enabled."""
|
||||
return str2bool(get_global_setting('LOGIN_SIGNUP_SSO_AUTO'))
|
||||
|
||||
|
||||
def ensure_sso_groups(sender, sociallogin: SocialLogin, **kwargs):
|
||||
"""Sync groups from IdP each time a SSO user logs on.
|
||||
|
||||
This event listener is registered in the apps ready method.
|
||||
"""
|
||||
if not get_global_setting('LOGIN_ENABLE_SSO_GROUP_SYNC'):
|
||||
return
|
||||
|
||||
group_key = get_global_setting('SSO_GROUP_KEY')
|
||||
group_map = json.loads(get_global_setting('SSO_GROUP_MAP'))
|
||||
# map SSO groups to InvenTree groups
|
||||
group_names = []
|
||||
for sso_group in sociallogin.account.extra_data.get(group_key, []):
|
||||
if mapped_name := group_map.get(sso_group):
|
||||
group_names.append(mapped_name)
|
||||
|
||||
# ensure user has groups
|
||||
user = sociallogin.account.user
|
||||
for group_name in group_names:
|
||||
try:
|
||||
user.groups.get(name=group_name)
|
||||
except Group.DoesNotExist:
|
||||
# user not in group yet
|
||||
try:
|
||||
group = Group.objects.get(name=group_name)
|
||||
except Group.DoesNotExist:
|
||||
logger.info(f'Creating group {group_name} as it did not exist')
|
||||
group = Group(name=group_name)
|
||||
group.save()
|
||||
logger.info(f'Adding group {group_name} to user {user}')
|
||||
user.groups.add(group)
|
||||
|
||||
# remove groups not listed by SSO if not disabled
|
||||
if get_global_setting('SSO_REMOVE_GROUPS'):
|
||||
for group in user.groups.all():
|
||||
if not group.name in group_names:
|
||||
logger.info(f'Removing group {group.name} from {user}')
|
||||
user.groups.remove(group)
|
||||
|
||||
|
||||
@receiver(post_save, sender=SocialAccount)
|
||||
def on_social_account_created(sender, instance: SocialAccount, created: bool, **kwargs):
|
||||
"""Sync SSO groups when new SocialAccount is added.
|
||||
|
||||
Since the allauth `social_account_added` signal is not sent for some reason, this
|
||||
signal is simulated using post_save signals. The issue has been reported as
|
||||
https://github.com/pennersr/django-allauth/issues/3834
|
||||
"""
|
||||
if created:
|
||||
ensure_sso_groups(None, SocialLogin(account=instance))
|
||||
|
@ -60,10 +60,6 @@ function exportFormatOptions() {
|
||||
value: 'tsv',
|
||||
display_name: 'TSV',
|
||||
},
|
||||
{
|
||||
value: 'xls',
|
||||
display_name: 'XLS',
|
||||
},
|
||||
{
|
||||
value: 'xlsx',
|
||||
display_name: 'XLSX',
|
||||
|
@ -256,8 +256,8 @@ def offload_task(
|
||||
_func(*args, **kwargs)
|
||||
except Exception as exc:
|
||||
log_error('InvenTree.offload_task')
|
||||
raise_warning(f"WARNING: '{taskname}' not started due to {str(exc)}")
|
||||
return False
|
||||
raise_warning(f"WARNING: '{taskname}' failed due to {str(exc)}")
|
||||
raise exc
|
||||
|
||||
# Finally, task either completed successfully or was offloaded
|
||||
return True
|
||||
|
@ -438,9 +438,9 @@ def progress_bar(val, max_val, *args, **kwargs):
|
||||
|
||||
|
||||
@register.simple_tag()
|
||||
def get_color_theme_css(username):
|
||||
def get_color_theme_css(user):
|
||||
"""Return the custom theme .css file for the selected user."""
|
||||
user_theme_name = get_user_color_theme(username)
|
||||
user_theme_name = get_user_color_theme(user)
|
||||
# Build path to CSS sheet
|
||||
inventree_css_sheet = os.path.join('css', 'color-themes', user_theme_name + '.css')
|
||||
|
||||
@ -451,12 +451,18 @@ def get_color_theme_css(username):
|
||||
|
||||
|
||||
@register.simple_tag()
|
||||
def get_user_color_theme(username):
|
||||
def get_user_color_theme(user):
|
||||
"""Get current user color theme."""
|
||||
from common.models import ColorTheme
|
||||
|
||||
try:
|
||||
user_theme = ColorTheme.objects.filter(user=username).get()
|
||||
if not user.is_authenticated:
|
||||
return 'default'
|
||||
except Exception:
|
||||
return 'default'
|
||||
|
||||
try:
|
||||
user_theme = ColorTheme.objects.filter(user_obj=user).get()
|
||||
user_theme_name = user_theme.name
|
||||
if not user_theme_name or not ColorTheme.is_valid_choice(user_theme):
|
||||
user_theme_name = 'default'
|
||||
|
122
src/backend/InvenTree/InvenTree/test_sso.py
Normal file
122
src/backend/InvenTree/InvenTree/test_sso.py
Normal file
@ -0,0 +1,122 @@
|
||||
"""Test the sso module functionality."""
|
||||
|
||||
from django.contrib.auth.models import Group, User
|
||||
from django.test import override_settings
|
||||
from django.test.testcases import TransactionTestCase
|
||||
|
||||
from allauth.socialaccount.models import SocialAccount, SocialLogin
|
||||
|
||||
from common.models import InvenTreeSetting
|
||||
from InvenTree import sso
|
||||
from InvenTree.forms import RegistratonMixin
|
||||
from InvenTree.unit_test import InvenTreeTestCase
|
||||
|
||||
|
||||
class Dummy:
|
||||
"""Simulate super class of RegistratonMixin."""
|
||||
|
||||
def save_user(self, _request, user: User, *args) -> User:
|
||||
"""This method is only used that the super() call of RegistrationMixin does not fail."""
|
||||
return user
|
||||
|
||||
|
||||
class MockRegistrationMixin(RegistratonMixin, Dummy):
|
||||
"""Mocked implementation of the RegistrationMixin."""
|
||||
|
||||
|
||||
class TestSsoGroupSync(TransactionTestCase):
|
||||
"""Tests for the SSO group sync feature."""
|
||||
|
||||
def setUp(self):
|
||||
"""Construct sociallogin object for test cases."""
|
||||
# configure SSO
|
||||
InvenTreeSetting.set_setting('LOGIN_ENABLE_SSO_GROUP_SYNC', True)
|
||||
InvenTreeSetting.set_setting('SSO_GROUP_KEY', 'groups')
|
||||
InvenTreeSetting.set_setting(
|
||||
'SSO_GROUP_MAP', '{"idp_group": "inventree_group"}'
|
||||
)
|
||||
# configure sociallogin
|
||||
extra_data = {'groups': ['idp_group']}
|
||||
self.group = Group(name='inventree_group')
|
||||
self.group.save()
|
||||
# ensure default group exists
|
||||
user = User(username='testuser', first_name='Test', last_name='User')
|
||||
user.save()
|
||||
account = SocialAccount(user=user, extra_data=extra_data)
|
||||
self.sociallogin = SocialLogin(account=account)
|
||||
|
||||
def test_group_added_to_user(self):
|
||||
"""Check that a new SSO group is added to the user."""
|
||||
user: User = self.sociallogin.account.user
|
||||
self.assertEqual(user.groups.count(), 0)
|
||||
sso.ensure_sso_groups(None, self.sociallogin)
|
||||
self.assertEqual(user.groups.count(), 1)
|
||||
self.assertEqual(user.groups.first().name, 'inventree_group')
|
||||
|
||||
def test_group_already_exists(self):
|
||||
"""Check that existing SSO group is not modified."""
|
||||
user: User = self.sociallogin.account.user
|
||||
user.groups.add(self.group)
|
||||
self.assertEqual(user.groups.count(), 1)
|
||||
self.assertEqual(user.groups.first().name, 'inventree_group')
|
||||
sso.ensure_sso_groups(None, self.sociallogin)
|
||||
self.assertEqual(user.groups.count(), 1)
|
||||
self.assertEqual(user.groups.first().name, 'inventree_group')
|
||||
|
||||
@override_settings(SSO_REMOVE_GROUPS=True)
|
||||
def test_remove_non_sso_group(self):
|
||||
"""Check that any group not provided by IDP is removed."""
|
||||
user: User = self.sociallogin.account.user
|
||||
# group must be saved to database first
|
||||
group = Group(name='local_group')
|
||||
group.save()
|
||||
user.groups.add(group)
|
||||
self.assertEqual(user.groups.count(), 1)
|
||||
self.assertEqual(user.groups.first().name, 'local_group')
|
||||
sso.ensure_sso_groups(None, self.sociallogin)
|
||||
self.assertEqual(user.groups.count(), 1)
|
||||
self.assertEqual(user.groups.first().name, 'inventree_group')
|
||||
|
||||
def test_override_default_group_with_sso_group(self):
|
||||
"""The default group should be overridden if SSO groups are available."""
|
||||
user: User = self.sociallogin.account.user
|
||||
self.assertEqual(user.groups.count(), 0)
|
||||
Group(id=42, name='default_group').save()
|
||||
InvenTreeSetting.set_setting('SIGNUP_GROUP', 42)
|
||||
sso.ensure_sso_groups(None, self.sociallogin)
|
||||
MockRegistrationMixin().save_user(None, user, None)
|
||||
self.assertEqual(user.groups.count(), 1)
|
||||
self.assertEqual(user.groups.first().name, 'inventree_group')
|
||||
|
||||
def test_default_group_without_sso_group(self):
|
||||
"""If no SSO group is specified, the default group should be applied."""
|
||||
self.sociallogin.account.extra_data = {}
|
||||
user: User = self.sociallogin.account.user
|
||||
self.assertEqual(user.groups.count(), 0)
|
||||
Group(id=42, name='default_group').save()
|
||||
InvenTreeSetting.set_setting('SIGNUP_GROUP', 42)
|
||||
sso.ensure_sso_groups(None, self.sociallogin)
|
||||
MockRegistrationMixin().save_user(None, user, None)
|
||||
self.assertEqual(user.groups.count(), 1)
|
||||
self.assertEqual(user.groups.first().name, 'default_group')
|
||||
|
||||
@override_settings(SSO_REMOVE_GROUPS=True)
|
||||
def test_remove_groups_overrides_default_group(self):
|
||||
"""If no SSO group is specified, the default group should not be added if SSO_REMOVE_GROUPS=True."""
|
||||
user: User = self.sociallogin.account.user
|
||||
self.sociallogin.account.extra_data = {}
|
||||
self.assertEqual(user.groups.count(), 0)
|
||||
Group(id=42, name='default_group').save()
|
||||
InvenTreeSetting.set_setting('SIGNUP_GROUP', 42)
|
||||
sso.ensure_sso_groups(None, self.sociallogin)
|
||||
MockRegistrationMixin().save_user(None, user, None)
|
||||
# second ensure_sso_groups will be called by signal if social account changes
|
||||
sso.ensure_sso_groups(None, self.sociallogin)
|
||||
self.assertEqual(user.groups.count(), 0)
|
||||
|
||||
def test_sso_group_created_if_not_exists(self):
|
||||
"""If the mapped group does not exist, a new group with the same name should be created."""
|
||||
self.group.delete()
|
||||
self.assertEqual(Group.objects.filter(name='inventree_group').count(), 0)
|
||||
sso.ensure_sso_groups(None, self.sociallogin)
|
||||
self.assertEqual(Group.objects.filter(name='inventree_group').count(), 1)
|
@ -22,9 +22,6 @@ from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExport
|
||||
import InvenTree.ready
|
||||
from InvenTree.version import inventreeVersion
|
||||
|
||||
# Logger configuration
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def setup_tracing(
|
||||
endpoint: str,
|
||||
@ -46,6 +43,9 @@ def setup_tracing(
|
||||
if InvenTree.ready.isImportingData() or InvenTree.ready.isRunningMigrations():
|
||||
return
|
||||
|
||||
# Logger configuration
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
if resources_input is None:
|
||||
resources_input = {}
|
||||
if auth is None:
|
||||
|
@ -84,6 +84,9 @@ def getNewestMigrationFile(app, exclude_extension=True):
|
||||
newest_num = num
|
||||
newest_file = f
|
||||
|
||||
if not newest_file: # pragma: no cover
|
||||
return newest_file
|
||||
|
||||
if exclude_extension:
|
||||
newest_file = newest_file.replace('.py', '')
|
||||
|
||||
@ -412,12 +415,12 @@ class InvenTreeAPITestCase(ExchangeRateMixin, UserMixin, APITestCase):
|
||||
# Extract filename
|
||||
disposition = response.headers['Content-Disposition']
|
||||
|
||||
result = re.search(r'attachment; filename="([\w.]+)"', disposition)
|
||||
result = re.search(r'attachment; filename="([\w\d\-.]+)"', disposition)
|
||||
|
||||
fn = result.groups()[0]
|
||||
|
||||
if expected_fn is not None:
|
||||
self.assertEqual(expected_fn, fn)
|
||||
self.assertRegex(fn, expected_fn)
|
||||
|
||||
if decode:
|
||||
# Decode data and return as StringIO file object
|
||||
|
@ -21,6 +21,7 @@ from sesame.views import LoginView
|
||||
import build.api
|
||||
import common.api
|
||||
import company.api
|
||||
import importer.api
|
||||
import machine.api
|
||||
import order.api
|
||||
import part.api
|
||||
@ -80,11 +81,19 @@ admin.site.site_header = 'InvenTree Admin'
|
||||
|
||||
apipatterns = [
|
||||
# Global search
|
||||
path('admin/', include(common.api.admin_api_urls)),
|
||||
path('bom/', include(part.api.bom_api_urls)),
|
||||
path('build/', include(build.api.build_api_urls)),
|
||||
path('company/', include(company.api.company_api_urls)),
|
||||
path('importer/', include(importer.api.importer_api_urls)),
|
||||
path('label/', include(report.api.label_api_urls)),
|
||||
path('machine/', include(machine.api.machine_api_urls)),
|
||||
path('order/', include(order.api.order_api_urls)),
|
||||
path('part/', include(part.api.part_api_urls)),
|
||||
path('report/', include(report.api.report_api_urls)),
|
||||
path('search/', APISearchView.as_view(), name='api-search'),
|
||||
path('settings/', include(common.api.settings_api_urls)),
|
||||
path('part/', include(part.api.part_api_urls)),
|
||||
path('bom/', include(part.api.bom_api_urls)),
|
||||
path('company/', include(company.api.company_api_urls)),
|
||||
path('stock/', include(stock.api.stock_api_urls)),
|
||||
path(
|
||||
'generate/',
|
||||
include([
|
||||
@ -100,14 +109,7 @@ apipatterns = [
|
||||
),
|
||||
]),
|
||||
),
|
||||
path('stock/', include(stock.api.stock_api_urls)),
|
||||
path('build/', include(build.api.build_api_urls)),
|
||||
path('order/', include(order.api.order_api_urls)),
|
||||
path('label/', include(report.api.label_api_urls)),
|
||||
path('report/', include(report.api.report_api_urls)),
|
||||
path('machine/', include(machine.api.machine_api_urls)),
|
||||
path('user/', include(users.api.user_urls)),
|
||||
path('admin/', include(common.api.admin_api_urls)),
|
||||
path('web/', include(web_api_urls)),
|
||||
# Plugin endpoints
|
||||
path('', include(plugin.api.plugin_api_urls)),
|
||||
|
@ -614,7 +614,7 @@ class AppearanceSelectView(RedirectView):
|
||||
"""Get current user color theme."""
|
||||
try:
|
||||
user_theme = common_models.ColorTheme.objects.filter(
|
||||
user=self.request.user
|
||||
user_obj=self.request.user
|
||||
).get()
|
||||
except common_models.ColorTheme.DoesNotExist:
|
||||
user_theme = None
|
||||
@ -631,7 +631,7 @@ class AppearanceSelectView(RedirectView):
|
||||
# Create theme entry if user did not select one yet
|
||||
if not user_theme:
|
||||
user_theme = common_models.ColorTheme()
|
||||
user_theme.user = request.user
|
||||
user_theme.user_obj = request.user
|
||||
|
||||
if theme:
|
||||
try:
|
||||
|
@ -8,12 +8,13 @@ from django.contrib.auth.models import User
|
||||
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from django_filters import rest_framework as rest_filters
|
||||
|
||||
from InvenTree.api import APIDownloadMixin, MetadataView
|
||||
from importer.mixins import DataExportViewMixin
|
||||
|
||||
from InvenTree.api import BulkDeleteMixin, MetadataView
|
||||
from generic.states.api import StatusView
|
||||
from InvenTree.helpers import str2bool, isNull, DownloadFile
|
||||
from InvenTree.helpers import str2bool, isNull
|
||||
from build.status_codes import BuildStatus, BuildStatusGroups
|
||||
from InvenTree.mixins import CreateAPI, RetrieveUpdateDestroyAPI, ListCreateAPI
|
||||
|
||||
@ -125,7 +126,7 @@ class BuildMixin:
|
||||
return queryset
|
||||
|
||||
|
||||
class BuildList(APIDownloadMixin, BuildMixin, ListCreateAPI):
|
||||
class BuildList(DataExportViewMixin, BuildMixin, ListCreateAPI):
|
||||
"""API endpoint for accessing a list of Build objects.
|
||||
|
||||
- GET: Return list of objects (with filters)
|
||||
@ -176,15 +177,6 @@ class BuildList(APIDownloadMixin, BuildMixin, ListCreateAPI):
|
||||
|
||||
return queryset
|
||||
|
||||
def download_queryset(self, queryset, export_format):
|
||||
"""Download the queryset data as a file."""
|
||||
dataset = build.admin.BuildResource().export(queryset=queryset)
|
||||
|
||||
filedata = dataset.export(export_format)
|
||||
filename = f"InvenTree_BuildOrders.{export_format}"
|
||||
|
||||
return DownloadFile(filedata, filename)
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
"""Custom query filtering for the BuildList endpoint."""
|
||||
queryset = super().filter_queryset(queryset)
|
||||
@ -351,7 +343,7 @@ class BuildLineEndpoint:
|
||||
return queryset
|
||||
|
||||
|
||||
class BuildLineList(BuildLineEndpoint, ListCreateAPI):
|
||||
class BuildLineList(BuildLineEndpoint, DataExportViewMixin, ListCreateAPI):
|
||||
"""API endpoint for accessing a list of BuildLine objects"""
|
||||
|
||||
filterset_class = BuildLineFilter
|
||||
@ -553,15 +545,17 @@ class BuildItemFilter(rest_filters.FilterSet):
|
||||
return queryset.filter(install_into=None)
|
||||
|
||||
|
||||
class BuildItemList(ListCreateAPI):
|
||||
class BuildItemList(DataExportViewMixin, BulkDeleteMixin, ListCreateAPI):
|
||||
"""API endpoint for accessing a list of BuildItem objects.
|
||||
|
||||
- GET: Return list of objects
|
||||
- POST: Create a new BuildItem object
|
||||
"""
|
||||
|
||||
queryset = BuildItem.objects.all()
|
||||
serializer_class = build.serializers.BuildItemSerializer
|
||||
filterset_class = BuildItemFilter
|
||||
filter_backends = SEARCH_ORDER_FILTER_ALIAS
|
||||
|
||||
def get_serializer(self, *args, **kwargs):
|
||||
"""Returns a BuildItemSerializer instance based on the request."""
|
||||
@ -578,15 +572,20 @@ class BuildItemList(ListCreateAPI):
|
||||
|
||||
def get_queryset(self):
|
||||
"""Override the queryset method, to allow filtering by stock_item.part."""
|
||||
queryset = BuildItem.objects.all()
|
||||
queryset = super().get_queryset()
|
||||
|
||||
queryset = queryset.select_related(
|
||||
'build_line',
|
||||
'build_line__build',
|
||||
'build_line__bom_item',
|
||||
'install_into',
|
||||
'stock_item',
|
||||
'stock_item__location',
|
||||
'stock_item__part',
|
||||
'stock_item__supplier_part',
|
||||
'stock_item__supplier_part__manufacturer_part',
|
||||
).prefetch_related(
|
||||
'stock_item__location__tags',
|
||||
)
|
||||
|
||||
return queryset
|
||||
@ -609,8 +608,25 @@ class BuildItemList(ListCreateAPI):
|
||||
|
||||
return queryset
|
||||
|
||||
filter_backends = [
|
||||
DjangoFilterBackend,
|
||||
ordering_fields = [
|
||||
'part',
|
||||
'sku',
|
||||
'quantity',
|
||||
'location',
|
||||
'reference',
|
||||
]
|
||||
|
||||
ordering_field_aliases = {
|
||||
'part': 'stock_item__part__name',
|
||||
'sku': 'stock_item__supplier_part__SKU',
|
||||
'location': 'stock_item__location__name',
|
||||
'reference': 'build_line__bom_item__reference',
|
||||
}
|
||||
|
||||
search_fields = [
|
||||
'stock_item__supplier_part__SKU',
|
||||
'stock_item__part__name',
|
||||
'build_line__bom_item__reference',
|
||||
]
|
||||
|
||||
|
||||
|
@ -104,7 +104,7 @@ class Build(
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def api_defaults(cls, request):
|
||||
def api_defaults(cls, request=None):
|
||||
"""Return default values for this model when issuing an API OPTIONS request."""
|
||||
defaults = {
|
||||
'reference': generate_next_build_reference(),
|
||||
@ -120,8 +120,32 @@ class Build(
|
||||
self.validate_reference_field(self.reference)
|
||||
self.reference_int = self.rebuild_reference_field(self.reference)
|
||||
|
||||
# Check part when initially creating the build order
|
||||
if not self.pk or self.has_field_changed('part'):
|
||||
if get_global_setting('BUILDORDER_REQUIRE_VALID_BOM'):
|
||||
# Check that the BOM is valid
|
||||
if not self.part.is_bom_valid():
|
||||
raise ValidationError({
|
||||
'part': _('Assembly BOM has not been validated')
|
||||
})
|
||||
|
||||
if get_global_setting('BUILDORDER_REQUIRE_ACTIVE_PART'):
|
||||
# Check that the part is active
|
||||
if not self.part.active:
|
||||
raise ValidationError({
|
||||
'part': _('Build order cannot be created for an inactive part')
|
||||
})
|
||||
|
||||
if get_global_setting('BUILDORDER_REQUIRE_LOCKED_PART'):
|
||||
# Check that the part is locked
|
||||
if not self.part.locked:
|
||||
raise ValidationError({
|
||||
'part': _('Build order cannot be created for an unlocked part')
|
||||
})
|
||||
|
||||
# On first save (i.e. creation), run some extra checks
|
||||
if self.pk is None:
|
||||
|
||||
# Set the destination location (if not specified)
|
||||
if not self.destination:
|
||||
self.destination = self.part.get_default_location()
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""JSON serializers for Build API."""
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
from django.db import transaction
|
||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@ -25,14 +27,16 @@ from stock.serializers import StockItemSerializerBrief, LocationSerializer
|
||||
|
||||
import common.models
|
||||
from common.serializers import ProjectCodeSerializer
|
||||
from importer.mixins import DataImportExportSerializerMixin
|
||||
import company.serializers
|
||||
import part.filters
|
||||
from part.serializers import BomItemSerializer, PartSerializer, PartBriefSerializer
|
||||
import part.serializers as part_serializers
|
||||
from users.serializers import OwnerSerializer
|
||||
|
||||
from .models import Build, BuildLine, BuildItem
|
||||
|
||||
|
||||
class BuildSerializer(NotesFieldMixin, InvenTreeModelSerializer):
|
||||
class BuildSerializer(NotesFieldMixin, DataImportExportSerializerMixin, InvenTreeModelSerializer):
|
||||
"""Serializes a Build object."""
|
||||
|
||||
class Meta:
|
||||
@ -50,8 +54,10 @@ class BuildSerializer(NotesFieldMixin, InvenTreeModelSerializer):
|
||||
'destination',
|
||||
'parent',
|
||||
'part',
|
||||
'part_name',
|
||||
'part_detail',
|
||||
'project_code',
|
||||
'project_code_label',
|
||||
'project_code_detail',
|
||||
'overdue',
|
||||
'reference',
|
||||
@ -82,7 +88,9 @@ class BuildSerializer(NotesFieldMixin, InvenTreeModelSerializer):
|
||||
|
||||
status_text = serializers.CharField(source='get_status_display', read_only=True)
|
||||
|
||||
part_detail = PartBriefSerializer(source='part', many=False, read_only=True)
|
||||
part_detail = part_serializers.PartBriefSerializer(source='part', many=False, read_only=True)
|
||||
|
||||
part_name = serializers.CharField(source='part.name', read_only=True, label=_('Part Name'))
|
||||
|
||||
quantity = InvenTreeDecimalField()
|
||||
|
||||
@ -94,6 +102,8 @@ class BuildSerializer(NotesFieldMixin, InvenTreeModelSerializer):
|
||||
|
||||
barcode_hash = serializers.CharField(read_only=True)
|
||||
|
||||
project_code_label = serializers.CharField(source='project_code.code', read_only=True, label=_('Project Code Label'))
|
||||
|
||||
project_code_detail = ProjectCodeSerializer(source='project_code', many=False, read_only=True)
|
||||
|
||||
@staticmethod
|
||||
@ -124,7 +134,7 @@ class BuildSerializer(NotesFieldMixin, InvenTreeModelSerializer):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if part_detail is not True:
|
||||
self.fields.pop('part_detail')
|
||||
self.fields.pop('part_detail', None)
|
||||
|
||||
reference = serializers.CharField(required=True)
|
||||
|
||||
@ -201,7 +211,7 @@ class BuildOutputQuantitySerializer(BuildOutputSerializer):
|
||||
quantity = serializers.DecimalField(
|
||||
max_digits=15,
|
||||
decimal_places=5,
|
||||
min_value=0,
|
||||
min_value=Decimal(0),
|
||||
required=True,
|
||||
label=_('Quantity'),
|
||||
help_text=_('Enter quantity for build output'),
|
||||
@ -248,7 +258,7 @@ class BuildOutputCreateSerializer(serializers.Serializer):
|
||||
quantity = serializers.DecimalField(
|
||||
max_digits=15,
|
||||
decimal_places=5,
|
||||
min_value=0,
|
||||
min_value=Decimal(0),
|
||||
required=True,
|
||||
label=_('Quantity'),
|
||||
help_text=_('Enter quantity for build output'),
|
||||
@ -856,7 +866,7 @@ class BuildAllocationItemSerializer(serializers.Serializer):
|
||||
quantity = serializers.DecimalField(
|
||||
max_digits=15,
|
||||
decimal_places=5,
|
||||
min_value=0,
|
||||
min_value=Decimal(0),
|
||||
required=True
|
||||
)
|
||||
|
||||
@ -1049,8 +1059,22 @@ class BuildAutoAllocationSerializer(serializers.Serializer):
|
||||
raise ValidationError(_("Failed to start auto-allocation task"))
|
||||
|
||||
|
||||
class BuildItemSerializer(InvenTreeModelSerializer):
|
||||
"""Serializes a BuildItem object."""
|
||||
class BuildItemSerializer(DataImportExportSerializerMixin, InvenTreeModelSerializer):
|
||||
"""Serializes a BuildItem object, which is an allocation of a stock item against a build order."""
|
||||
|
||||
# These fields are only used for data export
|
||||
export_only_fields = [
|
||||
'build_reference',
|
||||
'sku',
|
||||
'mpn',
|
||||
'location_name',
|
||||
'part_id',
|
||||
'part_name',
|
||||
'part_ipn',
|
||||
'available_quantity',
|
||||
'item_batch_code',
|
||||
'item_serial',
|
||||
]
|
||||
|
||||
class Meta:
|
||||
"""Serializer metaclass"""
|
||||
@ -1062,23 +1086,29 @@ class BuildItemSerializer(InvenTreeModelSerializer):
|
||||
'install_into',
|
||||
'stock_item',
|
||||
'quantity',
|
||||
'location',
|
||||
|
||||
# Detail fields, can be included or excluded
|
||||
'build_detail',
|
||||
'location_detail',
|
||||
'part_detail',
|
||||
'stock_item_detail',
|
||||
'build_detail',
|
||||
'supplier_part_detail',
|
||||
|
||||
# The following fields are only used for data export
|
||||
'bom_reference',
|
||||
'build_reference',
|
||||
'location_name',
|
||||
'mpn',
|
||||
'sku',
|
||||
'part_id',
|
||||
'part_name',
|
||||
'part_ipn',
|
||||
'available_quantity',
|
||||
'item_batch_code',
|
||||
'item_serial_number',
|
||||
]
|
||||
|
||||
# Annotated fields
|
||||
build = serializers.PrimaryKeyRelatedField(source='build_line.build', many=False, read_only=True)
|
||||
|
||||
# Extra (optional) detail fields
|
||||
part_detail = PartBriefSerializer(source='stock_item.part', many=False, read_only=True, pricing=False)
|
||||
stock_item_detail = StockItemSerializerBrief(source='stock_item', read_only=True)
|
||||
location_detail = LocationSerializer(source='stock_item.location', read_only=True)
|
||||
build_detail = BuildSerializer(source='build_line.build', many=False, read_only=True)
|
||||
|
||||
quantity = InvenTreeDecimalField()
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Determine which extra details fields should be included"""
|
||||
part_detail = kwargs.pop('part_detail', True)
|
||||
@ -1089,21 +1119,59 @@ class BuildItemSerializer(InvenTreeModelSerializer):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if not part_detail:
|
||||
self.fields.pop('part_detail')
|
||||
self.fields.pop('part_detail', None)
|
||||
|
||||
if not location_detail:
|
||||
self.fields.pop('location_detail')
|
||||
self.fields.pop('location_detail', None)
|
||||
|
||||
if not stock_detail:
|
||||
self.fields.pop('stock_item_detail')
|
||||
self.fields.pop('stock_item_detail', None)
|
||||
|
||||
if not build_detail:
|
||||
self.fields.pop('build_detail')
|
||||
self.fields.pop('build_detail', None)
|
||||
|
||||
# Export-only fields
|
||||
sku = serializers.CharField(source='stock_item.supplier_part.SKU', label=_('Supplier Part Number'), read_only=True)
|
||||
mpn = serializers.CharField(source='stock_item.supplier_part.manufacturer_part.MPN', label=_('Manufacturer Part Number'), read_only=True)
|
||||
location_name = serializers.CharField(source='stock_item.location.name', label=_('Location Name'), read_only=True)
|
||||
build_reference = serializers.CharField(source='build.reference', label=_('Build Reference'), read_only=True)
|
||||
bom_reference = serializers.CharField(source='build_line.bom_item.reference', label=_('BOM Reference'), read_only=True)
|
||||
|
||||
# Part detail fields
|
||||
part_id = serializers.PrimaryKeyRelatedField(source='stock_item.part', label=_('Part ID'), many=False, read_only=True)
|
||||
part_name = serializers.CharField(source='stock_item.part.name', label=_('Part Name'), read_only=True)
|
||||
part_ipn = serializers.CharField(source='stock_item.part.IPN', label=_('Part IPN'), read_only=True)
|
||||
|
||||
item_batch_code = serializers.CharField(source='stock_item.batch', label=_('Batch Code'), read_only=True)
|
||||
item_serial_number = serializers.CharField(source='stock_item.serial', label=_('Serial Number'), read_only=True)
|
||||
|
||||
# Annotated fields
|
||||
build = serializers.PrimaryKeyRelatedField(source='build_line.build', many=False, read_only=True)
|
||||
|
||||
# Extra (optional) detail fields
|
||||
part_detail = part_serializers.PartBriefSerializer(source='stock_item.part', many=False, read_only=True, pricing=False)
|
||||
stock_item_detail = StockItemSerializerBrief(source='stock_item', read_only=True)
|
||||
location = serializers.PrimaryKeyRelatedField(source='stock_item.location', many=False, read_only=True)
|
||||
location_detail = LocationSerializer(source='stock_item.location', read_only=True)
|
||||
build_detail = BuildSerializer(source='build_line.build', many=False, read_only=True)
|
||||
supplier_part_detail = company.serializers.SupplierPartSerializer(source='stock_item.supplier_part', many=False, read_only=True)
|
||||
|
||||
quantity = InvenTreeDecimalField(label=_('Allocated Quantity'))
|
||||
available_quantity = InvenTreeDecimalField(source='stock_item.quantity', read_only=True, label=_('Available Quantity'))
|
||||
|
||||
|
||||
class BuildLineSerializer(InvenTreeModelSerializer):
|
||||
class BuildLineSerializer(DataImportExportSerializerMixin, InvenTreeModelSerializer):
|
||||
"""Serializer for a BuildItem object."""
|
||||
|
||||
export_exclude_fields = [
|
||||
'allocations',
|
||||
]
|
||||
|
||||
export_only_fields = [
|
||||
'part_description',
|
||||
'part_category_name',
|
||||
]
|
||||
|
||||
class Meta:
|
||||
"""Serializer metaclass"""
|
||||
|
||||
@ -1117,6 +1185,20 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
||||
'quantity',
|
||||
'allocations',
|
||||
|
||||
# BOM item detail fields
|
||||
'reference',
|
||||
'consumable',
|
||||
'optional',
|
||||
'trackable',
|
||||
'inherited',
|
||||
'allow_variants',
|
||||
|
||||
# Part detail fields
|
||||
'part',
|
||||
'part_name',
|
||||
'part_IPN',
|
||||
'part_category_id',
|
||||
|
||||
# Annotated fields
|
||||
'allocated',
|
||||
'in_production',
|
||||
@ -1126,6 +1208,10 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
||||
'available_variant_stock',
|
||||
'total_available_stock',
|
||||
'external_stock',
|
||||
|
||||
# Extra fields only for data export
|
||||
'part_description',
|
||||
'part_category_name',
|
||||
]
|
||||
|
||||
read_only_fields = [
|
||||
@ -1134,13 +1220,30 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
||||
'allocations',
|
||||
]
|
||||
|
||||
quantity = serializers.FloatField()
|
||||
# Part info fields
|
||||
part = serializers.PrimaryKeyRelatedField(source='bom_item.sub_part', label=_('Part'), many=False, read_only=True)
|
||||
part_name = serializers.CharField(source='bom_item.sub_part.name', label=_('Part Name'), read_only=True)
|
||||
part_IPN = serializers.CharField(source='bom_item.sub_part.IPN', label=_('Part IPN'), read_only=True)
|
||||
|
||||
part_description = serializers.CharField(source='bom_item.sub_part.description', label=_('Part Description'), read_only=True)
|
||||
part_category_id = serializers.PrimaryKeyRelatedField(source='bom_item.sub_part.category', label=_('Part Category ID'), read_only=True)
|
||||
part_category_name = serializers.CharField(source='bom_item.sub_part.category.name', label=_('Part Category Name'), read_only=True)
|
||||
|
||||
# BOM item info fields
|
||||
reference = serializers.CharField(source='bom_item.reference', label=_('Reference'), read_only=True)
|
||||
consumable = serializers.BooleanField(source='bom_item.consumable', label=_('Consumable'), read_only=True)
|
||||
optional = serializers.BooleanField(source='bom_item.optional', label=_('Optional'), read_only=True)
|
||||
trackable = serializers.BooleanField(source='bom_item.sub_part.trackable', label=_('Trackable'), read_only=True)
|
||||
inherited = serializers.BooleanField(source='bom_item.inherited', label=_('Inherited'), read_only=True)
|
||||
allow_variants = serializers.BooleanField(source='bom_item.allow_variants', label=_('Allow Variants'), read_only=True)
|
||||
|
||||
quantity = serializers.FloatField(label=_('Quantity'))
|
||||
|
||||
bom_item = serializers.PrimaryKeyRelatedField(label=_('BOM Item'), read_only=True)
|
||||
|
||||
# Foreign key fields
|
||||
bom_item_detail = BomItemSerializer(source='bom_item', many=False, read_only=True, pricing=False)
|
||||
part_detail = PartSerializer(source='bom_item.sub_part', many=False, read_only=True, pricing=False)
|
||||
bom_item_detail = part_serializers.BomItemSerializer(source='bom_item', many=False, read_only=True, pricing=False)
|
||||
part_detail = part_serializers.PartSerializer(source='bom_item.sub_part', many=False, read_only=True, pricing=False)
|
||||
allocations = BuildItemSerializer(many=True, read_only=True)
|
||||
|
||||
# Annotated (calculated) fields
|
||||
@ -1164,10 +1267,10 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
||||
read_only=True
|
||||
)
|
||||
|
||||
available_substitute_stock = serializers.FloatField(read_only=True)
|
||||
available_variant_stock = serializers.FloatField(read_only=True)
|
||||
total_available_stock = serializers.FloatField(read_only=True)
|
||||
external_stock = serializers.FloatField(read_only=True)
|
||||
available_substitute_stock = serializers.FloatField(read_only=True, label=_('Available Substitute Stock'))
|
||||
available_variant_stock = serializers.FloatField(read_only=True, label=_('Available Variant Stock'))
|
||||
total_available_stock = serializers.FloatField(read_only=True, label=_('Total Available Stock'))
|
||||
external_stock = serializers.FloatField(read_only=True, label=_('External Stock'))
|
||||
|
||||
@staticmethod
|
||||
def annotate_queryset(queryset, build=None):
|
||||
|
@ -174,7 +174,7 @@
|
||||
<div class='panel panel-hidden' id='panel-allocate'>
|
||||
<div class='panel-heading'>
|
||||
<div class='d-flex flex-wrap'>
|
||||
<h4>{% trans "Allocate Stock to Build" %}</h4>
|
||||
<h4>{% trans "Build Order Line Items" %}</h4>
|
||||
{% include "spacer.html" %}
|
||||
<div class='btn-group' role='group'>
|
||||
{% if roles.build.add and build.active %}
|
||||
@ -231,6 +231,18 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class='panel panel-hidden' id='panel-allocated'>
|
||||
<div class='panel-heading'>
|
||||
<h4>{% trans "Allocated Stock" %}</h4>
|
||||
</div>
|
||||
<div class='panel-content'>
|
||||
<div id='build-allocated-stock-toolbar'>
|
||||
{% include "filter_list.html" with id='buildorderallocatedstock' %}
|
||||
</div>
|
||||
<table class='table table-striped table-condensed' id='allocated-stock-table' data-toolbar='#build-allocated-stock-toolbar'></table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class='panel panel-hidden' id='panel-consumed'>
|
||||
<div class='panel-heading'>
|
||||
<h4>
|
||||
@ -290,6 +302,10 @@
|
||||
{% block js_ready %}
|
||||
{{ block.super }}
|
||||
|
||||
onPanelLoad('allocated', function() {
|
||||
loadBuildOrderAllocatedStockTable($('#allocated-stock-table'), {{ build.pk }});
|
||||
});
|
||||
|
||||
onPanelLoad('consumed', function() {
|
||||
loadStockTable($('#consumed-stock-table'), {
|
||||
filterTarget: '#filter-list-consumed-stock',
|
||||
|
@ -5,15 +5,19 @@
|
||||
{% trans "Build Order Details" as text %}
|
||||
{% include "sidebar_item.html" with label='details' text=text icon="fa-info-circle" %}
|
||||
{% if build.is_active %}
|
||||
{% trans "Allocate Stock" as text %}
|
||||
{% include "sidebar_item.html" with label='allocate' text=text icon="fa-tasks" %}
|
||||
{% trans "Line Items" as text %}
|
||||
{% include "sidebar_item.html" with label='allocate' text=text icon="fa-list-ol" %}
|
||||
{% trans "Incomplete Outputs" as text %}
|
||||
{% include "sidebar_item.html" with label='outputs' text=text icon="fa-tools" %}
|
||||
{% endif %}
|
||||
{% trans "Completed Outputs" as text %}
|
||||
{% include "sidebar_item.html" with label='completed' text=text icon="fa-boxes" %}
|
||||
{% if build.is_active %}
|
||||
{% trans "Allocated Stock" as text %}
|
||||
{% include "sidebar_item.html" with label='allocated' text=text icon="fa-list" %}
|
||||
{% endif %}
|
||||
{% trans "Consumed Stock" as text %}
|
||||
{% include "sidebar_item.html" with label='consumed' text=text icon="fa-list" %}
|
||||
{% include "sidebar_item.html" with label='consumed' text=text icon="fa-tasks" %}
|
||||
{% trans "Child Build Orders" as text %}
|
||||
{% include "sidebar_item.html" with label='children' text=text icon="fa-sitemap" %}
|
||||
{% trans "Attachments" as text %}
|
||||
|
@ -564,16 +564,16 @@ class BuildTest(BuildAPITest):
|
||||
def test_download_build_orders(self):
|
||||
"""Test that we can download a list of build orders via the API"""
|
||||
required_cols = [
|
||||
'reference',
|
||||
'status',
|
||||
'completed',
|
||||
'batch',
|
||||
'notes',
|
||||
'title',
|
||||
'part',
|
||||
'part_name',
|
||||
'id',
|
||||
'quantity',
|
||||
'Reference',
|
||||
'Build Status',
|
||||
'Completed items',
|
||||
'Batch Code',
|
||||
'Notes',
|
||||
'Description',
|
||||
'Part',
|
||||
'Part Name',
|
||||
'ID',
|
||||
'Quantity',
|
||||
]
|
||||
|
||||
excluded_cols = [
|
||||
@ -597,13 +597,13 @@ class BuildTest(BuildAPITest):
|
||||
|
||||
for row in data:
|
||||
|
||||
build = Build.objects.get(pk=row['id'])
|
||||
build = Build.objects.get(pk=row['ID'])
|
||||
|
||||
self.assertEqual(str(build.part.pk), row['part'])
|
||||
self.assertEqual(build.part.full_name, row['part_name'])
|
||||
self.assertEqual(str(build.part.pk), row['Part'])
|
||||
self.assertEqual(build.part.name, row['Part Name'])
|
||||
|
||||
self.assertEqual(build.reference, row['reference'])
|
||||
self.assertEqual(build.title, row['title'])
|
||||
self.assertEqual(build.reference, row['Reference'])
|
||||
self.assertEqual(build.title, row['Description'])
|
||||
|
||||
|
||||
class BuildAllocationTest(BuildAPITest):
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""Basic unit tests for the BuildOrder app"""
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.test import tag
|
||||
from django.urls import reverse
|
||||
|
||||
@ -9,8 +10,10 @@ from datetime import datetime, timedelta
|
||||
from InvenTree.unit_test import InvenTreeTestCase
|
||||
|
||||
from .models import Build
|
||||
from part.models import Part, BomItem
|
||||
from stock.models import StockItem
|
||||
|
||||
from common.settings import get_global_setting, set_global_setting
|
||||
from build.status_codes import BuildStatus
|
||||
|
||||
|
||||
@ -88,6 +91,79 @@ class BuildTestSimple(InvenTreeTestCase):
|
||||
|
||||
self.assertEqual(build.status, BuildStatus.CANCELLED)
|
||||
|
||||
def test_build_create(self):
|
||||
"""Test creation of build orders via API."""
|
||||
|
||||
n = Build.objects.count()
|
||||
|
||||
# Find an assembly part
|
||||
assembly = Part.objects.filter(assembly=True).first()
|
||||
|
||||
assembly.active = True
|
||||
assembly.locked = False
|
||||
assembly.save()
|
||||
|
||||
self.assertEqual(assembly.get_bom_items().count(), 0)
|
||||
|
||||
# Let's create some BOM items for this assembly
|
||||
for component in Part.objects.filter(assembly=False, component=True)[:15]:
|
||||
|
||||
try:
|
||||
BomItem.objects.create(
|
||||
part=assembly,
|
||||
sub_part=component,
|
||||
reference='xxx',
|
||||
quantity=5
|
||||
)
|
||||
except ValidationError:
|
||||
pass
|
||||
|
||||
# The assembly has a BOM, and is now *invalid*
|
||||
self.assertGreater(assembly.get_bom_items().count(), 0)
|
||||
self.assertFalse(assembly.is_bom_valid())
|
||||
|
||||
# Create a build for an assembly with an *invalid* BOM
|
||||
set_global_setting('BUILDORDER_REQUIRE_VALID_BOM', False)
|
||||
set_global_setting('BUILDORDER_REQUIRE_ACTIVE_PART', True)
|
||||
set_global_setting('BUILDORDER_REQUIRE_LOCKED_PART', False)
|
||||
|
||||
bo = Build.objects.create(part=assembly, quantity=10, reference='BO-9990')
|
||||
bo.save()
|
||||
|
||||
# Now, require a *valid* BOM
|
||||
set_global_setting('BUILDORDER_REQUIRE_VALID_BOM', True)
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
bo = Build.objects.create(part=assembly, quantity=10, reference='BO-9991')
|
||||
|
||||
# Now, validate the BOM, and try again
|
||||
assembly.validate_bom(None)
|
||||
self.assertTrue(assembly.is_bom_valid())
|
||||
|
||||
bo = Build.objects.create(part=assembly, quantity=10, reference='BO-9992')
|
||||
|
||||
# Now, try and create a build for an inactive assembly
|
||||
assembly.active = False
|
||||
assembly.save()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
bo = Build.objects.create(part=assembly, quantity=10, reference='BO-9993')
|
||||
|
||||
set_global_setting('BUILDORDER_REQUIRE_ACTIVE_PART', False)
|
||||
Build.objects.create(part=assembly, quantity=10, reference='BO-9994')
|
||||
|
||||
# Check that the "locked" requirement works
|
||||
set_global_setting('BUILDORDER_REQUIRE_LOCKED_PART', True)
|
||||
with self.assertRaises(ValidationError):
|
||||
Build.objects.create(part=assembly, quantity=10, reference='BO-9995')
|
||||
|
||||
assembly.locked = True
|
||||
assembly.save()
|
||||
|
||||
Build.objects.create(part=assembly, quantity=10, reference='BO-9996')
|
||||
|
||||
# Check that expected quantity of new builds is created
|
||||
self.assertEqual(Build.objects.count(), n + 4)
|
||||
|
||||
class TestBuildViews(InvenTreeTestCase):
|
||||
"""Tests for Build app views."""
|
||||
|
@ -27,6 +27,7 @@ import common.models
|
||||
import common.serializers
|
||||
from common.settings import get_global_setting
|
||||
from generic.states.api import AllStatusViews, StatusView
|
||||
from importer.mixins import DataExportViewMixin
|
||||
from InvenTree.api import BulkDeleteMixin, MetadataView
|
||||
from InvenTree.config import CONFIG_LOOKUPS
|
||||
from InvenTree.filters import ORDER_FILTER, SEARCH_ORDER_FILTER
|
||||
@ -494,7 +495,7 @@ class NotesImageList(ListCreateAPI):
|
||||
image.save()
|
||||
|
||||
|
||||
class ProjectCodeList(ListCreateAPI):
|
||||
class ProjectCodeList(DataExportViewMixin, ListCreateAPI):
|
||||
"""List view for all project codes."""
|
||||
|
||||
queryset = common.models.ProjectCode.objects.all()
|
||||
@ -515,7 +516,7 @@ class ProjectCodeDetail(RetrieveUpdateDestroyAPI):
|
||||
permission_classes = [permissions.IsAuthenticated, IsStaffOrReadOnly]
|
||||
|
||||
|
||||
class CustomUnitList(ListCreateAPI):
|
||||
class CustomUnitList(DataExportViewMixin, ListCreateAPI):
|
||||
"""List view for custom units."""
|
||||
|
||||
queryset = common.models.CustomUnit.objects.all()
|
||||
@ -949,7 +950,7 @@ common_api_urls = [
|
||||
'<int:pk>/', ContentTypeDetail.as_view(), name='api-contenttype-detail'
|
||||
),
|
||||
path(
|
||||
'<str:model>/',
|
||||
'model/<str:model>/',
|
||||
ContentTypeModelDetail.as_view(),
|
||||
name='api-contenttype-detail-modelname',
|
||||
),
|
||||
|
@ -59,7 +59,9 @@ def currency_codes() -> list:
|
||||
"""Returns the current currency codes."""
|
||||
from common.settings import get_global_setting
|
||||
|
||||
codes = get_global_setting('CURRENCY_CODES', create=False).strip()
|
||||
codes = get_global_setting(
|
||||
'CURRENCY_CODES', create=False, enviroment_key='INVENTREE_CURRENCY_CODES'
|
||||
).strip()
|
||||
|
||||
if not codes:
|
||||
codes = currency_codes_default_list()
|
||||
|
@ -51,6 +51,9 @@ class MatchFieldForm(forms.Form):
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if not file_manager: # pragma: no cover
|
||||
return
|
||||
|
||||
# Setup FileManager
|
||||
file_manager.setup()
|
||||
# Get columns
|
||||
@ -87,6 +90,9 @@ class MatchItemForm(forms.Form):
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if not file_manager: # pragma: no cover
|
||||
return
|
||||
|
||||
# Setup FileManager
|
||||
file_manager.setup()
|
||||
|
||||
|
@ -17,5 +17,8 @@ class Migration(migrations.Migration):
|
||||
('code', models.CharField(help_text='Unique project code', max_length=50, unique=True, verbose_name='Project Code')),
|
||||
('description', models.CharField(blank=True, help_text='Project description', max_length=200, verbose_name='Description')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Project Code',
|
||||
},
|
||||
),
|
||||
]
|
||||
|
@ -18,5 +18,8 @@ class Migration(migrations.Migration):
|
||||
('symbol', models.CharField(blank=True, help_text='Optional unit symbol', max_length=10, unique=True, verbose_name='Symbol')),
|
||||
('definition', models.CharField(help_text='Unit definition', max_length=50, verbose_name='Definition')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Custom Unit',
|
||||
},
|
||||
),
|
||||
]
|
||||
|
@ -1,5 +1,6 @@
|
||||
# Generated by Django 4.2.12 on 2024-06-02 13:32
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
|
||||
from moneyed import CURRENCIES
|
||||
@ -47,16 +48,20 @@ def set_currencies(apps, schema_editor):
|
||||
return
|
||||
|
||||
value = ','.join(valid_codes)
|
||||
print(f"Found existing currency codes:", value)
|
||||
|
||||
if not settings.TESTING:
|
||||
print(f"Found existing currency codes:", value)
|
||||
|
||||
setting = InvenTreeSetting.objects.filter(key=key).first()
|
||||
|
||||
if setting:
|
||||
print(f"- Updating existing setting for currency codes")
|
||||
if not settings.TESTING:
|
||||
print(f"- Updating existing setting for currency codes")
|
||||
setting.value = value
|
||||
setting.save()
|
||||
else:
|
||||
print(f"- Creating new setting for currency codes")
|
||||
if not settings.TESTING:
|
||||
print(f"- Creating new setting for currency codes")
|
||||
setting = InvenTreeSetting(key=key, value=value)
|
||||
setting.save()
|
||||
|
||||
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.12 on 2024-07-04 10:32
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('common', '0026_auto_20240608_1238'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='customunit',
|
||||
name='symbol',
|
||||
field=models.CharField(blank=True, help_text='Optional unit symbol', max_length=10, verbose_name='Symbol'),
|
||||
),
|
||||
]
|
@ -0,0 +1,39 @@
|
||||
# Generated by Django 4.2.12 on 2024-07-04 10:23
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
def migrate_userthemes(apps, schema_editor):
|
||||
"""Mgrate text-based user references to ForeignKey references."""
|
||||
ColorTheme = apps.get_model("common", "ColorTheme")
|
||||
User = apps.get_model(settings.AUTH_USER_MODEL)
|
||||
|
||||
for theme in ColorTheme.objects.all():
|
||||
try:
|
||||
theme.user_obj = User.objects.get(username=theme.user)
|
||||
theme.save()
|
||||
except User.DoesNotExist:
|
||||
pass
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("common", "0027_alter_customunit_symbol"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="colortheme",
|
||||
name="user_obj",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
migrations.RunPython(migrate_userthemes, migrations.RunPython.noop),
|
||||
]
|
@ -14,7 +14,7 @@ from datetime import timedelta, timezone
|
||||
from enum import Enum
|
||||
from io import BytesIO
|
||||
from secrets import compare_digest
|
||||
from typing import Any, Callable, TypedDict, Union
|
||||
from typing import Any, Callable, Collection, TypedDict, Union
|
||||
|
||||
from django.apps import apps
|
||||
from django.conf import settings as django_settings
|
||||
@ -116,6 +116,11 @@ class BaseURLValidator(URLValidator):
|
||||
class ProjectCode(InvenTree.models.InvenTreeMetadataModel):
|
||||
"""A ProjectCode is a unique identifier for a project."""
|
||||
|
||||
class Meta:
|
||||
"""Class options for the ProjectCode model."""
|
||||
|
||||
verbose_name = _('Project Code')
|
||||
|
||||
@staticmethod
|
||||
def get_api_url():
|
||||
"""Return the API URL for this model."""
|
||||
@ -1391,12 +1396,24 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
||||
'default': True,
|
||||
'validator': bool,
|
||||
},
|
||||
'BARCODE_SHOW_TEXT': {
|
||||
'name': _('Barcode Show Data'),
|
||||
'description': _('Display barcode data in browser as text'),
|
||||
'default': False,
|
||||
'validator': bool,
|
||||
},
|
||||
'PART_ENABLE_REVISION': {
|
||||
'name': _('Part Revisions'),
|
||||
'description': _('Enable revision field for Part'),
|
||||
'validator': bool,
|
||||
'default': True,
|
||||
},
|
||||
'PART_REVISION_ASSEMBLY_ONLY': {
|
||||
'name': _('Assembly Revision Only'),
|
||||
'description': _('Only allow revisions for assembly parts'),
|
||||
'validator': bool,
|
||||
'default': False,
|
||||
},
|
||||
'PART_ALLOW_DELETE_FROM_ASSEMBLY': {
|
||||
'name': _('Allow Deletion from Assembly'),
|
||||
'description': _('Allow deletion of parts which are used in an assembly'),
|
||||
@ -1780,6 +1797,26 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
||||
'default': False,
|
||||
'validator': bool,
|
||||
},
|
||||
'BUILDORDER_REQUIRE_ACTIVE_PART': {
|
||||
'name': _('Require Active Part'),
|
||||
'description': _('Prevent build order creation for inactive parts'),
|
||||
'default': False,
|
||||
'validator': bool,
|
||||
},
|
||||
'BUILDORDER_REQUIRE_LOCKED_PART': {
|
||||
'name': _('Require Locked Part'),
|
||||
'description': _('Prevent build order creation for unlocked parts'),
|
||||
'default': False,
|
||||
'validator': bool,
|
||||
},
|
||||
'BUILDORDER_REQUIRE_VALID_BOM': {
|
||||
'name': _('Require Valid BOM'),
|
||||
'description': _(
|
||||
'Prevent build order creation unless BOM has been validated'
|
||||
),
|
||||
'default': False,
|
||||
'validator': bool,
|
||||
},
|
||||
'PREVENT_BUILD_COMPLETION_HAVING_INCOMPLETED_TESTS': {
|
||||
'name': _('Block Until Tests Pass'),
|
||||
'description': _(
|
||||
@ -1909,6 +1946,38 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
||||
'default': False,
|
||||
'validator': bool,
|
||||
},
|
||||
'LOGIN_ENABLE_SSO_GROUP_SYNC': {
|
||||
'name': _('Enable SSO group sync'),
|
||||
'description': _(
|
||||
'Enable synchronizing InvenTree groups with groups provided by the IdP'
|
||||
),
|
||||
'default': False,
|
||||
'validator': bool,
|
||||
},
|
||||
'SSO_GROUP_KEY': {
|
||||
'name': _('SSO group key'),
|
||||
'description': _(
|
||||
'The name of the groups claim attribute provided by the IdP'
|
||||
),
|
||||
'default': 'groups',
|
||||
'validator': str,
|
||||
},
|
||||
'SSO_GROUP_MAP': {
|
||||
'name': _('SSO group map'),
|
||||
'description': _(
|
||||
'A mapping from SSO groups to local InvenTree groups. If the local group does not exist, it will be created.'
|
||||
),
|
||||
'validator': json.loads,
|
||||
'default': '{}',
|
||||
},
|
||||
'SSO_REMOVE_GROUPS': {
|
||||
'name': _('Remove groups outside of SSO'),
|
||||
'description': _(
|
||||
'Whether groups assigned to the user should be removed if they are not backend by the IdP. Disabling this setting might cause security issues'
|
||||
),
|
||||
'default': True,
|
||||
'validator': bool,
|
||||
},
|
||||
'LOGIN_MAIL_REQUIRED': {
|
||||
'name': _('Email required'),
|
||||
'description': _('Require user to supply mail on signup'),
|
||||
@ -1945,7 +2014,9 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
||||
},
|
||||
'SIGNUP_GROUP': {
|
||||
'name': _('Group on signup'),
|
||||
'description': _('Group to which new users are assigned on registration'),
|
||||
'description': _(
|
||||
'Group to which new users are assigned on registration. If SSO group sync is enabled, this group is only set if no group can be assigned from the IdP.'
|
||||
),
|
||||
'default': '',
|
||||
'choices': settings_group_options,
|
||||
},
|
||||
@ -2426,36 +2497,6 @@ class InvenTreeUserSetting(BaseInvenTreeSetting):
|
||||
'validator': [int, MinValueValidator(0)],
|
||||
'default': 100,
|
||||
},
|
||||
'DEFAULT_PART_LABEL_TEMPLATE': {
|
||||
'name': _('Default part label template'),
|
||||
'description': _('The part label template to be automatically selected'),
|
||||
'validator': [int],
|
||||
'default': '',
|
||||
},
|
||||
'DEFAULT_ITEM_LABEL_TEMPLATE': {
|
||||
'name': _('Default stock item template'),
|
||||
'description': _(
|
||||
'The stock item label template to be automatically selected'
|
||||
),
|
||||
'validator': [int],
|
||||
'default': '',
|
||||
},
|
||||
'DEFAULT_LOCATION_LABEL_TEMPLATE': {
|
||||
'name': _('Default stock location label template'),
|
||||
'description': _(
|
||||
'The stock location label template to be automatically selected'
|
||||
),
|
||||
'validator': [int],
|
||||
'default': '',
|
||||
},
|
||||
'DEFAULT_LINE_LABEL_TEMPLATE': {
|
||||
'name': _('Default build line label template'),
|
||||
'description': _(
|
||||
'The build line label template to be automatically selected'
|
||||
),
|
||||
'validator': [int],
|
||||
'default': '',
|
||||
},
|
||||
'NOTIFICATION_ERROR_REPORT': {
|
||||
'name': _('Receive error reports'),
|
||||
'description': _('Receive notifications for system errors'),
|
||||
@ -2543,6 +2584,7 @@ class ColorTheme(models.Model):
|
||||
name = models.CharField(max_length=20, default='', blank=True)
|
||||
|
||||
user = models.CharField(max_length=150, unique=True)
|
||||
user_obj = models.ForeignKey(User, on_delete=models.CASCADE, blank=True, null=True)
|
||||
|
||||
@classmethod
|
||||
def get_color_themes_choices(cls):
|
||||
@ -2993,6 +3035,11 @@ class CustomUnit(models.Model):
|
||||
https://pint.readthedocs.io/en/stable/advanced/defining.html
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
"""Class meta options."""
|
||||
|
||||
verbose_name = _('Custom Unit')
|
||||
|
||||
def fmt_string(self):
|
||||
"""Construct a unit definition string e.g. 'dog_year = 52 * day = dy'."""
|
||||
fmt = f'{self.name} = {self.definition}'
|
||||
@ -3002,6 +3049,18 @@ class CustomUnit(models.Model):
|
||||
|
||||
return fmt
|
||||
|
||||
def validate_unique(self, exclude=None) -> None:
|
||||
"""Ensure that the custom unit is unique."""
|
||||
super().validate_unique(exclude)
|
||||
|
||||
if self.symbol:
|
||||
if (
|
||||
CustomUnit.objects.filter(symbol=self.symbol)
|
||||
.exclude(pk=self.pk)
|
||||
.exists()
|
||||
):
|
||||
raise ValidationError({'symbol': _('Unit symbol must be unique')})
|
||||
|
||||
def clean(self):
|
||||
"""Validate that the provided custom unit is indeed valid."""
|
||||
super().clean()
|
||||
@ -3043,7 +3102,6 @@ class CustomUnit(models.Model):
|
||||
max_length=10,
|
||||
verbose_name=_('Symbol'),
|
||||
help_text=_('Optional unit symbol'),
|
||||
unique=True,
|
||||
blank=True,
|
||||
)
|
||||
|
||||
|
@ -14,6 +14,8 @@ from taggit.serializers import TagListSerializerField
|
||||
|
||||
import common.models as common_models
|
||||
import common.validators
|
||||
from importer.mixins import DataImportExportSerializerMixin
|
||||
from importer.registry import register_importer
|
||||
from InvenTree.helpers import get_objectreference
|
||||
from InvenTree.helpers_model import construct_absolute_url
|
||||
from InvenTree.serializers import (
|
||||
@ -293,7 +295,8 @@ class NotesImageSerializer(InvenTreeModelSerializer):
|
||||
image = InvenTreeImageSerializerField(required=True)
|
||||
|
||||
|
||||
class ProjectCodeSerializer(InvenTreeModelSerializer):
|
||||
@register_importer()
|
||||
class ProjectCodeSerializer(DataImportExportSerializerMixin, InvenTreeModelSerializer):
|
||||
"""Serializer for the ProjectCode model."""
|
||||
|
||||
class Meta:
|
||||
@ -341,7 +344,8 @@ class ContentTypeSerializer(serializers.Serializer):
|
||||
return obj.app_label in plugin_registry.installed_apps
|
||||
|
||||
|
||||
class CustomUnitSerializer(InvenTreeModelSerializer):
|
||||
@register_importer()
|
||||
class CustomUnitSerializer(DataImportExportSerializerMixin, InvenTreeModelSerializer):
|
||||
"""DRF serializer for CustomUnit model."""
|
||||
|
||||
class Meta:
|
||||
|
@ -1,10 +1,17 @@
|
||||
"""User-configurable settings for the common app."""
|
||||
|
||||
from os import environ
|
||||
|
||||
def get_global_setting(key, backup_value=None, **kwargs):
|
||||
|
||||
def get_global_setting(key, backup_value=None, enviroment_key=None, **kwargs):
|
||||
"""Return the value of a global setting using the provided key."""
|
||||
from common.models import InvenTreeSetting
|
||||
|
||||
if enviroment_key:
|
||||
value = environ.get(enviroment_key)
|
||||
if value:
|
||||
return value
|
||||
|
||||
if backup_value is not None:
|
||||
kwargs['backup_value'] = backup_value
|
||||
|
||||
|
@ -1376,7 +1376,7 @@ class ProjectCodesTest(InvenTreeAPITestCase):
|
||||
)
|
||||
|
||||
self.assertIn(
|
||||
'project code with this Project Code already exists',
|
||||
'Project Code with this Project Code already exists',
|
||||
str(response.data['code']),
|
||||
)
|
||||
|
||||
|
@ -6,6 +6,8 @@ from import_export import widgets
|
||||
from import_export.admin import ImportExportModelAdmin
|
||||
from import_export.fields import Field
|
||||
|
||||
import company.serializers
|
||||
import importer.admin
|
||||
from InvenTree.admin import InvenTreeResource
|
||||
from part.models import Part
|
||||
|
||||
@ -33,9 +35,10 @@ class CompanyResource(InvenTreeResource):
|
||||
|
||||
|
||||
@admin.register(Company)
|
||||
class CompanyAdmin(ImportExportModelAdmin):
|
||||
class CompanyAdmin(importer.admin.DataExportAdmin, ImportExportModelAdmin):
|
||||
"""Admin class for the Company model."""
|
||||
|
||||
serializer_class = company.serializers.CompanySerializer
|
||||
resource_class = CompanyResource
|
||||
|
||||
list_display = ('name', 'website', 'contact')
|
||||
|
@ -7,12 +7,9 @@ from django.utils.translation import gettext_lazy as _
|
||||
from django_filters import rest_framework as rest_filters
|
||||
|
||||
import part.models
|
||||
from importer.mixins import DataExportViewMixin
|
||||
from InvenTree.api import ListCreateDestroyAPIView, MetadataView
|
||||
from InvenTree.filters import (
|
||||
ORDER_FILTER,
|
||||
SEARCH_ORDER_FILTER,
|
||||
SEARCH_ORDER_FILTER_ALIAS,
|
||||
)
|
||||
from InvenTree.filters import SEARCH_ORDER_FILTER, SEARCH_ORDER_FILTER_ALIAS
|
||||
from InvenTree.helpers import str2bool
|
||||
from InvenTree.mixins import ListCreateAPI, RetrieveUpdateDestroyAPI
|
||||
|
||||
@ -36,7 +33,7 @@ from .serializers import (
|
||||
)
|
||||
|
||||
|
||||
class CompanyList(ListCreateAPI):
|
||||
class CompanyList(DataExportViewMixin, ListCreateAPI):
|
||||
"""API endpoint for accessing a list of Company objects.
|
||||
|
||||
Provides two methods:
|
||||
@ -84,7 +81,7 @@ class CompanyDetail(RetrieveUpdateDestroyAPI):
|
||||
return queryset
|
||||
|
||||
|
||||
class ContactList(ListCreateDestroyAPIView):
|
||||
class ContactList(DataExportViewMixin, ListCreateDestroyAPIView):
|
||||
"""API endpoint for list view of Company model."""
|
||||
|
||||
queryset = Contact.objects.all()
|
||||
@ -108,7 +105,7 @@ class ContactDetail(RetrieveUpdateDestroyAPI):
|
||||
serializer_class = ContactSerializer
|
||||
|
||||
|
||||
class AddressList(ListCreateDestroyAPIView):
|
||||
class AddressList(DataExportViewMixin, ListCreateDestroyAPIView):
|
||||
"""API endpoint for list view of Address model."""
|
||||
|
||||
queryset = Address.objects.all()
|
||||
@ -149,7 +146,7 @@ class ManufacturerPartFilter(rest_filters.FilterSet):
|
||||
)
|
||||
|
||||
|
||||
class ManufacturerPartList(ListCreateDestroyAPIView):
|
||||
class ManufacturerPartList(DataExportViewMixin, ListCreateDestroyAPIView):
|
||||
"""API endpoint for list view of ManufacturerPart object.
|
||||
|
||||
- GET: Return list of ManufacturerPart objects
|
||||
@ -297,7 +294,7 @@ class SupplierPartFilter(rest_filters.FilterSet):
|
||||
)
|
||||
|
||||
|
||||
class SupplierPartList(ListCreateDestroyAPIView):
|
||||
class SupplierPartList(DataExportViewMixin, ListCreateDestroyAPIView):
|
||||
"""API endpoint for list view of SupplierPart object.
|
||||
|
||||
- GET: Return list of SupplierPart objects
|
||||
|
@ -44,6 +44,9 @@ class Migration(migrations.Migration):
|
||||
('email', models.EmailField(blank=True, max_length=254)),
|
||||
('role', models.CharField(blank=True, max_length=100)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Contact',
|
||||
}
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SupplierPart',
|
||||
@ -75,6 +78,7 @@ class Migration(migrations.Migration):
|
||||
('part', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='pricebreaks', to='company.SupplierPart')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Supplier Price Break',
|
||||
'db_table': 'part_supplierpricebreak',
|
||||
},
|
||||
),
|
||||
|
@ -23,17 +23,17 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='company',
|
||||
name='is_customer',
|
||||
field=models.BooleanField(default=False, help_text='Do you sell items to this company?', verbose_name='is customer'),
|
||||
field=models.BooleanField(default=False, help_text='Do you sell items to this company?', verbose_name='Is customer'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='company',
|
||||
name='is_manufacturer',
|
||||
field=models.BooleanField(default=False, help_text='Does this company manufacture parts?', verbose_name='is manufacturer'),
|
||||
field=models.BooleanField(default=False, help_text='Does this company manufacture parts?', verbose_name='Is manufacturer'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='company',
|
||||
name='is_supplier',
|
||||
field=models.BooleanField(default=True, help_text='Do you purchase items from this company?', verbose_name='is supplier'),
|
||||
field=models.BooleanField(default=True, help_text='Do you purchase items from this company?', verbose_name='Is supplier'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='company',
|
||||
|
@ -21,6 +21,7 @@ class Migration(migrations.Migration):
|
||||
('manufacturer_part', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parameters', to='company.manufacturerpart', verbose_name='Manufacturer Part')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Manufacturer Part Parameter',
|
||||
'unique_together': {('manufacturer_part', 'name')},
|
||||
},
|
||||
),
|
||||
|
@ -12,7 +12,10 @@ class Migration(migrations.Migration):
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='address',
|
||||
options={'verbose_name_plural': 'Addresses'},
|
||||
options={
|
||||
'verbose_name': 'Address',
|
||||
'verbose_name_plural': 'Addresses'
|
||||
},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='address',
|
||||
|
@ -0,0 +1,24 @@
|
||||
# Generated by Django 4.2.11 on 2024-07-16 12:58
|
||||
|
||||
import InvenTree.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('company', '0070_remove_manufacturerpartattachment_manufacturer_part_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='manufacturerpart',
|
||||
name='notes',
|
||||
field=InvenTree.fields.InvenTreeNotesField(blank=True, help_text='Markdown notes (optional)', max_length=50000, null=True, verbose_name='Notes'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='supplierpart',
|
||||
name='notes',
|
||||
field=InvenTree.fields.InvenTreeNotesField(blank=True, help_text='Markdown notes (optional)', max_length=50000, null=True, verbose_name='Notes'),
|
||||
),
|
||||
]
|
@ -165,19 +165,19 @@ class Company(
|
||||
|
||||
is_customer = models.BooleanField(
|
||||
default=False,
|
||||
verbose_name=_('is customer'),
|
||||
verbose_name=_('Is customer'),
|
||||
help_text=_('Do you sell items to this company?'),
|
||||
)
|
||||
|
||||
is_supplier = models.BooleanField(
|
||||
default=True,
|
||||
verbose_name=_('is supplier'),
|
||||
verbose_name=_('Is supplier'),
|
||||
help_text=_('Do you purchase items from this company?'),
|
||||
)
|
||||
|
||||
is_manufacturer = models.BooleanField(
|
||||
default=False,
|
||||
verbose_name=_('is manufacturer'),
|
||||
verbose_name=_('Is manufacturer'),
|
||||
help_text=_('Does this company manufacture parts?'),
|
||||
)
|
||||
|
||||
@ -269,6 +269,11 @@ class Contact(InvenTree.models.InvenTreeMetadataModel):
|
||||
role: position in company
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
"""Metaclass defines extra model options."""
|
||||
|
||||
verbose_name = _('Contact')
|
||||
|
||||
@staticmethod
|
||||
def get_api_url():
|
||||
"""Return the API URL associated with the Contcat model."""
|
||||
@ -306,7 +311,8 @@ class Address(InvenTree.models.InvenTreeModel):
|
||||
class Meta:
|
||||
"""Metaclass defines extra model options."""
|
||||
|
||||
verbose_name_plural = 'Addresses'
|
||||
verbose_name = _('Address')
|
||||
verbose_name_plural = _('Addresses')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Custom init function."""
|
||||
@ -445,6 +451,7 @@ class Address(InvenTree.models.InvenTreeModel):
|
||||
class ManufacturerPart(
|
||||
InvenTree.models.InvenTreeAttachmentMixin,
|
||||
InvenTree.models.InvenTreeBarcodeMixin,
|
||||
InvenTree.models.InvenTreeNotesMixin,
|
||||
InvenTree.models.InvenTreeMetadataModel,
|
||||
):
|
||||
"""Represents a unique part as provided by a Manufacturer Each ManufacturerPart is identified by a MPN (Manufacturer Part Number) Each ManufacturerPart is also linked to a Part object. A Part may be available from multiple manufacturers.
|
||||
@ -560,6 +567,7 @@ class ManufacturerPartParameter(InvenTree.models.InvenTreeModel):
|
||||
class Meta:
|
||||
"""Metaclass defines extra model options."""
|
||||
|
||||
verbose_name = _('Manufacturer Part Parameter')
|
||||
unique_together = ('manufacturer_part', 'name')
|
||||
|
||||
@staticmethod
|
||||
@ -617,6 +625,7 @@ class SupplierPartManager(models.Manager):
|
||||
class SupplierPart(
|
||||
InvenTree.models.MetadataMixin,
|
||||
InvenTree.models.InvenTreeBarcodeMixin,
|
||||
InvenTree.models.InvenTreeNotesMixin,
|
||||
common.models.MetaMixin,
|
||||
InvenTree.models.InvenTreeModel,
|
||||
):
|
||||
@ -1005,6 +1014,7 @@ class SupplierPriceBreak(common.models.PriceBreak):
|
||||
class Meta:
|
||||
"""Metaclass defines extra model options."""
|
||||
|
||||
verbose_name = _('Supplier Price Break')
|
||||
unique_together = ('part', 'quantity')
|
||||
|
||||
# This model was moved from the 'Part' app
|
||||
|
@ -10,6 +10,9 @@ from sql_util.utils import SubqueryCount
|
||||
from taggit.serializers import TagListSerializerField
|
||||
|
||||
import part.filters
|
||||
import part.serializers as part_serializers
|
||||
from importer.mixins import DataImportExportSerializerMixin
|
||||
from importer.registry import register_importer
|
||||
from InvenTree.serializers import (
|
||||
InvenTreeCurrencySerializer,
|
||||
InvenTreeDecimalField,
|
||||
@ -20,7 +23,6 @@ from InvenTree.serializers import (
|
||||
NotesFieldMixin,
|
||||
RemoteImageMixin,
|
||||
)
|
||||
from part.serializers import PartBriefSerializer
|
||||
|
||||
from .models import (
|
||||
Address,
|
||||
@ -56,7 +58,8 @@ class CompanyBriefSerializer(InvenTreeModelSerializer):
|
||||
thumbnail = serializers.CharField(source='get_thumbnail_url', read_only=True)
|
||||
|
||||
|
||||
class AddressSerializer(InvenTreeModelSerializer):
|
||||
@register_importer()
|
||||
class AddressSerializer(DataImportExportSerializerMixin, InvenTreeModelSerializer):
|
||||
"""Serializer for the Address Model."""
|
||||
|
||||
class Meta:
|
||||
@ -100,9 +103,19 @@ class AddressBriefSerializer(InvenTreeModelSerializer):
|
||||
]
|
||||
|
||||
|
||||
class CompanySerializer(NotesFieldMixin, RemoteImageMixin, InvenTreeModelSerializer):
|
||||
@register_importer()
|
||||
class CompanySerializer(
|
||||
DataImportExportSerializerMixin,
|
||||
NotesFieldMixin,
|
||||
RemoteImageMixin,
|
||||
InvenTreeModelSerializer,
|
||||
):
|
||||
"""Serializer for Company object (full detail)."""
|
||||
|
||||
export_exclude_fields = ['url', 'primary_address']
|
||||
|
||||
import_exclude_fields = ['image']
|
||||
|
||||
class Meta:
|
||||
"""Metaclass options."""
|
||||
|
||||
@ -183,17 +196,25 @@ class CompanySerializer(NotesFieldMixin, RemoteImageMixin, InvenTreeModelSeriali
|
||||
return self.instance
|
||||
|
||||
|
||||
class ContactSerializer(InvenTreeModelSerializer):
|
||||
@register_importer()
|
||||
class ContactSerializer(DataImportExportSerializerMixin, InvenTreeModelSerializer):
|
||||
"""Serializer class for the Contact model."""
|
||||
|
||||
class Meta:
|
||||
"""Metaclass options."""
|
||||
|
||||
model = Contact
|
||||
fields = ['pk', 'company', 'name', 'phone', 'email', 'role']
|
||||
fields = ['pk', 'company', 'company_name', 'name', 'phone', 'email', 'role']
|
||||
|
||||
company_name = serializers.CharField(
|
||||
label=_('Company Name'), source='company.name', read_only=True
|
||||
)
|
||||
|
||||
|
||||
class ManufacturerPartSerializer(InvenTreeTagModelSerializer):
|
||||
@register_importer()
|
||||
class ManufacturerPartSerializer(
|
||||
DataImportExportSerializerMixin, InvenTreeTagModelSerializer, NotesFieldMixin
|
||||
):
|
||||
"""Serializer for ManufacturerPart object."""
|
||||
|
||||
class Meta:
|
||||
@ -211,6 +232,7 @@ class ManufacturerPartSerializer(InvenTreeTagModelSerializer):
|
||||
'MPN',
|
||||
'link',
|
||||
'barcode_hash',
|
||||
'notes',
|
||||
'tags',
|
||||
]
|
||||
|
||||
@ -225,15 +247,17 @@ class ManufacturerPartSerializer(InvenTreeTagModelSerializer):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if part_detail is not True:
|
||||
self.fields.pop('part_detail')
|
||||
self.fields.pop('part_detail', None)
|
||||
|
||||
if manufacturer_detail is not True:
|
||||
self.fields.pop('manufacturer_detail')
|
||||
self.fields.pop('manufacturer_detail', None)
|
||||
|
||||
if prettify is not True:
|
||||
self.fields.pop('pretty_name')
|
||||
self.fields.pop('pretty_name', None)
|
||||
|
||||
part_detail = PartBriefSerializer(source='part', many=False, read_only=True)
|
||||
part_detail = part_serializers.PartBriefSerializer(
|
||||
source='part', many=False, read_only=True
|
||||
)
|
||||
|
||||
manufacturer_detail = CompanyBriefSerializer(
|
||||
source='manufacturer', many=False, read_only=True
|
||||
@ -246,7 +270,10 @@ class ManufacturerPartSerializer(InvenTreeTagModelSerializer):
|
||||
)
|
||||
|
||||
|
||||
class ManufacturerPartParameterSerializer(InvenTreeModelSerializer):
|
||||
@register_importer()
|
||||
class ManufacturerPartParameterSerializer(
|
||||
DataImportExportSerializerMixin, InvenTreeModelSerializer
|
||||
):
|
||||
"""Serializer for the ManufacturerPartParameter model."""
|
||||
|
||||
class Meta:
|
||||
@ -270,14 +297,17 @@ class ManufacturerPartParameterSerializer(InvenTreeModelSerializer):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if not man_detail:
|
||||
self.fields.pop('manufacturer_part_detail')
|
||||
self.fields.pop('manufacturer_part_detail', None)
|
||||
|
||||
manufacturer_part_detail = ManufacturerPartSerializer(
|
||||
source='manufacturer_part', many=False, read_only=True
|
||||
)
|
||||
|
||||
|
||||
class SupplierPartSerializer(InvenTreeTagModelSerializer):
|
||||
@register_importer()
|
||||
class SupplierPartSerializer(
|
||||
DataImportExportSerializerMixin, InvenTreeTagModelSerializer, NotesFieldMixin
|
||||
):
|
||||
"""Serializer for SupplierPart object."""
|
||||
|
||||
class Meta:
|
||||
@ -311,6 +341,7 @@ class SupplierPartSerializer(InvenTreeTagModelSerializer):
|
||||
'supplier_detail',
|
||||
'url',
|
||||
'updated',
|
||||
'notes',
|
||||
'tags',
|
||||
]
|
||||
|
||||
@ -341,17 +372,17 @@ class SupplierPartSerializer(InvenTreeTagModelSerializer):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if part_detail is not True:
|
||||
self.fields.pop('part_detail')
|
||||
self.fields.pop('part_detail', None)
|
||||
|
||||
if supplier_detail is not True:
|
||||
self.fields.pop('supplier_detail')
|
||||
self.fields.pop('supplier_detail', None)
|
||||
|
||||
if manufacturer_detail is not True:
|
||||
self.fields.pop('manufacturer_detail')
|
||||
self.fields.pop('manufacturer_part_detail')
|
||||
self.fields.pop('manufacturer_detail', None)
|
||||
self.fields.pop('manufacturer_part_detail', None)
|
||||
|
||||
if prettify is not True:
|
||||
self.fields.pop('pretty_name')
|
||||
self.fields.pop('pretty_name', None)
|
||||
|
||||
# Annotated field showing total in-stock quantity
|
||||
in_stock = serializers.FloatField(read_only=True, label=_('In Stock'))
|
||||
@ -360,7 +391,9 @@ class SupplierPartSerializer(InvenTreeTagModelSerializer):
|
||||
|
||||
pack_quantity_native = serializers.FloatField(read_only=True)
|
||||
|
||||
part_detail = PartBriefSerializer(source='part', many=False, read_only=True)
|
||||
part_detail = part_serializers.PartBriefSerializer(
|
||||
source='part', many=False, read_only=True
|
||||
)
|
||||
|
||||
supplier_detail = CompanyBriefSerializer(
|
||||
source='supplier', many=False, read_only=True
|
||||
@ -435,7 +468,10 @@ class SupplierPartSerializer(InvenTreeTagModelSerializer):
|
||||
return supplier_part
|
||||
|
||||
|
||||
class SupplierPriceBreakSerializer(InvenTreeModelSerializer):
|
||||
@register_importer()
|
||||
class SupplierPriceBreakSerializer(
|
||||
DataImportExportSerializerMixin, InvenTreeModelSerializer
|
||||
):
|
||||
"""Serializer for SupplierPriceBreak object."""
|
||||
|
||||
class Meta:
|
||||
@ -462,10 +498,10 @@ class SupplierPriceBreakSerializer(InvenTreeModelSerializer):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if not supplier_detail:
|
||||
self.fields.pop('supplier_detail')
|
||||
self.fields.pop('supplier_detail', None)
|
||||
|
||||
if not part_detail:
|
||||
self.fields.pop('part_detail')
|
||||
self.fields.pop('part_detail', None)
|
||||
|
||||
quantity = InvenTreeDecimalField()
|
||||
|
||||
|
@ -171,11 +171,40 @@ src="{% static 'img/blank_image.png' %}"
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class='panel panel-hidden' id='panel-manufacturer-part-notes'>
|
||||
<div class='panel-heading'>
|
||||
<div class='d-flex flex-wrap'>
|
||||
<h4>{% trans "Manufacturer Part Notes" %}</h4>
|
||||
{% include "spacer.html" %}
|
||||
<div class='btn-group' role='group'>
|
||||
{% include "notes_buttons.html" %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class='panel-content'>
|
||||
<textarea id='manufacturer-part-notes'></textarea>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock page_content %}
|
||||
|
||||
{% block js_ready %}
|
||||
{{ block.super }}
|
||||
|
||||
// Load the "notes" tab
|
||||
onPanelLoad('manufacturer-part-notes', function() {
|
||||
|
||||
setupNotesField(
|
||||
'manufacturer-part-notes',
|
||||
'{% url "api-manufacturer-part-detail" part.pk %}',
|
||||
{
|
||||
model_type: "manufacturerpart",
|
||||
model_id: {{ part.pk }},
|
||||
editable: {% js_bool roles.purchase_order.change %},
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
onPanelLoad("attachments", function() {
|
||||
loadAttachmentTable('manufacturerpart', {{ part.pk }});
|
||||
});
|
||||
|
@ -8,3 +8,5 @@
|
||||
{% include "sidebar_item.html" with label='supplier-parts' text=text icon="fa-building" %}
|
||||
{% trans "Attachments" as text %}
|
||||
{% include "sidebar_item.html" with label='attachments' text=text icon="fa-paperclip" %}
|
||||
{% trans "Notes" as text %}
|
||||
{% include "sidebar_item.html" with label="manufacturer-part-notes" text=text icon="fa-clipboard" %}
|
||||
|
@ -264,11 +264,40 @@ src="{% static 'img/blank_image.png' %}"
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class='panel panel-hidden' id='panel-supplier-part-notes'>
|
||||
<div class='panel-heading'>
|
||||
<div class='d-flex flex-wrap'>
|
||||
<h4>{% trans "Supplier Part Notes" %}</h4>
|
||||
{% include "spacer.html" %}
|
||||
<div class='btn-group' role='group'>
|
||||
{% include "notes_buttons.html" %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class='panel-content'>
|
||||
<textarea id='supplier-part-notes'></textarea>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock page_content %}
|
||||
|
||||
{% block js_ready %}
|
||||
{{ block.super }}
|
||||
|
||||
// Load the "notes" tab
|
||||
onPanelLoad('supplier-part-notes', function() {
|
||||
|
||||
setupNotesField(
|
||||
'supplier-part-notes',
|
||||
'{% url "api-supplier-part-detail" part.pk %}',
|
||||
{
|
||||
model_type: "supplierpart",
|
||||
model_id: {{ part.pk }},
|
||||
editable: {% js_bool roles.purchase_order.change %},
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
{% if barcodes %}
|
||||
|
||||
$("#show-qr-code").click(function() {
|
||||
|
@ -8,3 +8,5 @@
|
||||
{% include "sidebar_item.html" with label='purchase-orders' text=text icon="fa-shopping-cart" %}
|
||||
{% trans "Supplier Part Pricing" as text %}
|
||||
{% include "sidebar_item.html" with label='pricing' text=text icon="fa-dollar-sign" %}
|
||||
{% trans "Notes" as text %}
|
||||
{% include "sidebar_item.html" with label="supplier-part-notes" text=text icon="fa-clipboard" %}
|
||||
|
@ -57,22 +57,20 @@ class CompanyTest(InvenTreeAPITestCase):
|
||||
def test_company_detail(self):
|
||||
"""Tests for the Company detail endpoint."""
|
||||
url = reverse('api-company-detail', kwargs={'pk': self.acme.pk})
|
||||
response = self.get(url)
|
||||
response = self.get(url, expected_code=200)
|
||||
|
||||
self.assertIn('name', response.data.keys())
|
||||
self.assertEqual(response.data['name'], 'ACME')
|
||||
|
||||
# Change the name of the company
|
||||
# Note we should not have the correct permissions (yet)
|
||||
data = response.data
|
||||
response = self.client.patch(url, data, format='json', expected_code=400)
|
||||
|
||||
self.assignRole('company.change')
|
||||
|
||||
# Update the name and set the currency to a valid value
|
||||
data['name'] = 'ACMOO'
|
||||
data['currency'] = 'NZD'
|
||||
|
||||
response = self.client.patch(url, data, format='json', expected_code=200)
|
||||
response = self.patch(url, data, expected_code=200)
|
||||
|
||||
self.assertEqual(response.data['name'], 'ACMOO')
|
||||
self.assertEqual(response.data['currency'], 'NZD')
|
||||
|
0
src/backend/InvenTree/importer/__init__.py
Normal file
0
src/backend/InvenTree/importer/__init__.py
Normal file
80
src/backend/InvenTree/importer/admin.py
Normal file
80
src/backend/InvenTree/importer/admin.py
Normal file
@ -0,0 +1,80 @@
|
||||
"""Admin site specification for the 'importer' app."""
|
||||
|
||||
from django.contrib import admin
|
||||
from django.urls import path
|
||||
|
||||
import importer.models
|
||||
import importer.registry
|
||||
|
||||
|
||||
class DataImportColumnMapAdmin(admin.TabularInline):
|
||||
"""Inline admin for DataImportColumnMap model."""
|
||||
|
||||
model = importer.models.DataImportColumnMap
|
||||
can_delete = False
|
||||
max_num = 0
|
||||
|
||||
def get_readonly_fields(self, request, obj=None):
|
||||
"""Return the readonly fields for the admin interface."""
|
||||
return ['field']
|
||||
|
||||
def formfield_for_dbfield(self, db_field, request, **kwargs):
|
||||
"""Override the choices for the column field."""
|
||||
if db_field.name == 'column':
|
||||
# TODO: Implement this!
|
||||
queryset = self.get_queryset(request)
|
||||
|
||||
if queryset.count() > 0:
|
||||
session = queryset.first().session
|
||||
db_field.choices = [(col, col) for col in session.columns]
|
||||
|
||||
return super().formfield_for_choice_field(db_field, request, **kwargs)
|
||||
|
||||
|
||||
@admin.register(importer.models.DataImportSession)
|
||||
class DataImportSessionAdmin(admin.ModelAdmin):
|
||||
"""Admin interface for the DataImportSession model."""
|
||||
|
||||
list_display = ['id', 'data_file', 'status', 'user']
|
||||
|
||||
list_filter = ['status']
|
||||
|
||||
inlines = [DataImportColumnMapAdmin]
|
||||
|
||||
def get_readonly_fields(self, request, obj=None):
|
||||
"""Update the readonly fields for the admin interface."""
|
||||
fields = ['columns', 'status', 'timestamp']
|
||||
|
||||
# Prevent data file from being edited after upload!
|
||||
if obj:
|
||||
fields += ['data_file']
|
||||
else:
|
||||
fields += ['field_mapping']
|
||||
|
||||
return fields
|
||||
|
||||
def formfield_for_dbfield(self, db_field, request, **kwargs):
|
||||
"""Override the choices for the model_type field."""
|
||||
if db_field.name == 'model_type':
|
||||
db_field.choices = importer.registry.supported_model_options()
|
||||
|
||||
return super().formfield_for_dbfield(db_field, request, **kwargs)
|
||||
|
||||
|
||||
@admin.register(importer.models.DataImportRow)
|
||||
class DataImportRowAdmin(admin.ModelAdmin):
|
||||
"""Admin interface for the DataImportRow model."""
|
||||
|
||||
list_display = ['id', 'session', 'row_index']
|
||||
|
||||
def get_readonly_fields(self, request, obj=None):
|
||||
"""Return the readonly fields for the admin interface."""
|
||||
return ['session', 'row_index', 'row_data', 'errors', 'valid']
|
||||
|
||||
|
||||
class DataExportAdmin(admin.ModelAdmin):
|
||||
"""Custom admin class mixin allowing for data export functionality."""
|
||||
|
||||
serializer_class = None
|
||||
|
||||
# TODO: Add custom admin action to export queryset data
|
200
src/backend/InvenTree/importer/api.py
Normal file
200
src/backend/InvenTree/importer/api.py
Normal file
@ -0,0 +1,200 @@
|
||||
"""API endpoints for the importer app."""
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.urls import include, path
|
||||
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import permissions
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
import importer.models
|
||||
import importer.registry
|
||||
import importer.serializers
|
||||
from InvenTree.api import BulkDeleteMixin
|
||||
from InvenTree.filters import SEARCH_ORDER_FILTER
|
||||
from InvenTree.mixins import (
|
||||
CreateAPI,
|
||||
ListAPI,
|
||||
ListCreateAPI,
|
||||
RetrieveUpdateAPI,
|
||||
RetrieveUpdateDestroyAPI,
|
||||
)
|
||||
|
||||
|
||||
class DataImporterModelList(APIView):
|
||||
"""API endpoint for displaying a list of models available for import."""
|
||||
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
|
||||
def get(self, request):
|
||||
"""Return a list of models available for import."""
|
||||
models = []
|
||||
|
||||
for serializer in importer.registry.get_supported_serializers():
|
||||
model = serializer.Meta.model
|
||||
url = model.get_api_url() if hasattr(model, 'get_api_url') else None
|
||||
|
||||
models.append({
|
||||
'serializer': str(serializer.__name__),
|
||||
'model_type': model.__name__.lower(),
|
||||
'api_url': url,
|
||||
})
|
||||
|
||||
return Response(models)
|
||||
|
||||
|
||||
class DataImportSessionList(BulkDeleteMixin, ListCreateAPI):
|
||||
"""API endpoint for accessing a list of DataImportSession objects."""
|
||||
|
||||
queryset = importer.models.DataImportSession.objects.all()
|
||||
serializer_class = importer.serializers.DataImportSessionSerializer
|
||||
|
||||
filter_backends = SEARCH_ORDER_FILTER
|
||||
|
||||
filterset_fields = ['model_type', 'status', 'user']
|
||||
|
||||
ordering_fields = ['timestamp', 'status', 'model_type']
|
||||
|
||||
|
||||
class DataImportSessionDetail(RetrieveUpdateDestroyAPI):
|
||||
"""Detail endpoint for a single DataImportSession object."""
|
||||
|
||||
queryset = importer.models.DataImportSession.objects.all()
|
||||
serializer_class = importer.serializers.DataImportSessionSerializer
|
||||
|
||||
|
||||
class DataImportSessionAcceptFields(APIView):
|
||||
"""API endpoint to accept the field mapping for a DataImportSession."""
|
||||
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
|
||||
@extend_schema(
|
||||
responses={200: importer.serializers.DataImportSessionSerializer(many=False)}
|
||||
)
|
||||
def post(self, request, pk):
|
||||
"""Accept the field mapping for a DataImportSession."""
|
||||
session = get_object_or_404(importer.models.DataImportSession, pk=pk)
|
||||
|
||||
# Attempt to accept the mapping (may raise an exception if the mapping is invalid)
|
||||
session.accept_mapping()
|
||||
|
||||
return Response(importer.serializers.DataImportSessionSerializer(session).data)
|
||||
|
||||
|
||||
class DataImportSessionAcceptRows(CreateAPI):
|
||||
"""API endpoint to accept the rows for a DataImportSession."""
|
||||
|
||||
queryset = importer.models.DataImportSession.objects.all()
|
||||
serializer_class = importer.serializers.DataImportAcceptRowSerializer
|
||||
|
||||
def get_serializer_context(self):
|
||||
"""Add the import session object to the serializer context."""
|
||||
ctx = super().get_serializer_context()
|
||||
|
||||
try:
|
||||
ctx['session'] = importer.models.DataImportSession.objects.get(
|
||||
pk=self.kwargs.get('pk', None)
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
ctx['request'] = self.request
|
||||
return ctx
|
||||
|
||||
|
||||
class DataImportColumnMappingList(ListAPI):
|
||||
"""API endpoint for accessing a list of DataImportColumnMap objects."""
|
||||
|
||||
queryset = importer.models.DataImportColumnMap.objects.all()
|
||||
serializer_class = importer.serializers.DataImportColumnMapSerializer
|
||||
|
||||
filter_backends = SEARCH_ORDER_FILTER
|
||||
|
||||
filterset_fields = ['session']
|
||||
|
||||
|
||||
class DataImportColumnMappingDetail(RetrieveUpdateAPI):
|
||||
"""Detail endpoint for a single DataImportColumnMap object."""
|
||||
|
||||
queryset = importer.models.DataImportColumnMap.objects.all()
|
||||
serializer_class = importer.serializers.DataImportColumnMapSerializer
|
||||
|
||||
|
||||
class DataImportRowList(BulkDeleteMixin, ListAPI):
|
||||
"""API endpoint for accessing a list of DataImportRow objects."""
|
||||
|
||||
queryset = importer.models.DataImportRow.objects.all()
|
||||
serializer_class = importer.serializers.DataImportRowSerializer
|
||||
|
||||
filter_backends = SEARCH_ORDER_FILTER
|
||||
|
||||
filterset_fields = ['session', 'valid', 'complete']
|
||||
|
||||
ordering_fields = ['pk', 'row_index', 'valid']
|
||||
|
||||
ordering = 'row_index'
|
||||
|
||||
|
||||
class DataImportRowDetail(RetrieveUpdateDestroyAPI):
|
||||
"""Detail endpoint for a single DataImportRow object."""
|
||||
|
||||
queryset = importer.models.DataImportRow.objects.all()
|
||||
serializer_class = importer.serializers.DataImportRowSerializer
|
||||
|
||||
|
||||
importer_api_urls = [
|
||||
path('models/', DataImporterModelList.as_view(), name='api-importer-model-list'),
|
||||
path(
|
||||
'session/',
|
||||
include([
|
||||
path(
|
||||
'<int:pk>/',
|
||||
include([
|
||||
path(
|
||||
'accept_fields/',
|
||||
DataImportSessionAcceptFields.as_view(),
|
||||
name='api-import-session-accept-fields',
|
||||
),
|
||||
path(
|
||||
'accept_rows/',
|
||||
DataImportSessionAcceptRows.as_view(),
|
||||
name='api-import-session-accept-rows',
|
||||
),
|
||||
path(
|
||||
'',
|
||||
DataImportSessionDetail.as_view(),
|
||||
name='api-import-session-detail',
|
||||
),
|
||||
]),
|
||||
),
|
||||
path('', DataImportSessionList.as_view(), name='api-importer-session-list'),
|
||||
]),
|
||||
),
|
||||
path(
|
||||
'column-mapping/',
|
||||
include([
|
||||
path(
|
||||
'<int:pk>/',
|
||||
DataImportColumnMappingDetail.as_view(),
|
||||
name='api-importer-mapping-detail',
|
||||
),
|
||||
path(
|
||||
'',
|
||||
DataImportColumnMappingList.as_view(),
|
||||
name='api-importer-mapping-list',
|
||||
),
|
||||
]),
|
||||
),
|
||||
path(
|
||||
'row/',
|
||||
include([
|
||||
path(
|
||||
'<int:pk>/',
|
||||
DataImportRowDetail.as_view(),
|
||||
name='api-importer-row-detail',
|
||||
),
|
||||
path('', DataImportRowList.as_view(), name='api-importer-row-list'),
|
||||
]),
|
||||
),
|
||||
]
|
10
src/backend/InvenTree/importer/apps.py
Normal file
10
src/backend/InvenTree/importer/apps.py
Normal file
@ -0,0 +1,10 @@
|
||||
"""AppConfig for the 'importer' app."""
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ImporterConfig(AppConfig):
|
||||
"""AppConfig class for the 'importer' app."""
|
||||
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'importer'
|
56
src/backend/InvenTree/importer/migrations/0001_initial.py
Normal file
56
src/backend/InvenTree/importer/migrations/0001_initial.py
Normal file
@ -0,0 +1,56 @@
|
||||
# Generated by Django 4.2.12 on 2024-06-30 04:42
|
||||
|
||||
from django.conf import settings
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import importer.validators
|
||||
import InvenTree.helpers
|
||||
from importer.status_codes import DataImportStatusCode
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='DataImportSession',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('timestamp', models.DateTimeField(auto_now_add=True, verbose_name='Timestamp')),
|
||||
('data_file', models.FileField(help_text='Data file to import', upload_to='import', validators=[django.core.validators.FileExtensionValidator(allowed_extensions=InvenTree.helpers.GetExportFormats()), importer.validators.validate_data_file], verbose_name='Data File')),
|
||||
('columns', models.JSONField(blank=True, null=True, verbose_name='Columns')),
|
||||
('model_type', models.CharField(max_length=100, validators=[importer.validators.validate_importer_model_type])),
|
||||
('status', models.PositiveIntegerField(choices=DataImportStatusCode.items(), default=DataImportStatusCode.INITIAL.value, help_text='Import status')),
|
||||
('field_defaults', models.JSONField(blank=True, null=True, validators=[importer.validators.validate_field_defaults], verbose_name='Field Defaults')),
|
||||
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='User')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='DataImportRow',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('row_index', models.PositiveIntegerField(default=0, verbose_name='Row Index')),
|
||||
('row_data', models.JSONField(blank=True, null=True, verbose_name='Original row data')),
|
||||
('data', models.JSONField(blank=True, null=True, verbose_name='Data')),
|
||||
('errors', models.JSONField(blank=True, null=True, verbose_name='Errors')),
|
||||
('valid', models.BooleanField(default=False, verbose_name='Valid')),
|
||||
('complete', models.BooleanField(default=False, verbose_name='Complete')),
|
||||
('session', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='rows', to='importer.dataimportsession', verbose_name='Import Session')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='DataImportColumnMap',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('field', models.CharField(max_length=100, verbose_name='Field')),
|
||||
('column', models.CharField(blank=True, max_length=100, verbose_name='Column')),
|
||||
('session', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='column_mappings', to='importer.dataimportsession', verbose_name='Import Session')),
|
||||
],
|
||||
),
|
||||
]
|
@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.14 on 2024-07-12 03:35
|
||||
|
||||
from django.db import migrations, models
|
||||
import importer.validators
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('importer', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='dataimportsession',
|
||||
name='field_overrides',
|
||||
field=models.JSONField(blank=True, null=True, validators=[importer.validators.validate_field_defaults], verbose_name='Field Overrides'),
|
||||
),
|
||||
]
|
@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.14 on 2024-07-16 03:04
|
||||
|
||||
from django.db import migrations, models
|
||||
import importer.validators
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('importer', '0002_dataimportsession_field_overrides'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='dataimportsession',
|
||||
name='field_filters',
|
||||
field=models.JSONField(blank=True, null=True, validators=[importer.validators.validate_field_defaults], verbose_name='Field Filters'),
|
||||
),
|
||||
]
|
267
src/backend/InvenTree/importer/mixins.py
Normal file
267
src/backend/InvenTree/importer/mixins.py
Normal file
@ -0,0 +1,267 @@
|
||||
"""Mixin classes for data import/export functionality."""
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
import tablib
|
||||
from rest_framework import fields, serializers
|
||||
|
||||
import importer.operations
|
||||
from InvenTree.helpers import DownloadFile, GetExportFormats, current_date
|
||||
|
||||
|
||||
class DataImportSerializerMixin:
|
||||
"""Mixin class for adding data import functionality to a DRF serializer."""
|
||||
|
||||
import_only_fields = []
|
||||
import_exclude_fields = []
|
||||
|
||||
def get_import_only_fields(self, **kwargs) -> list:
|
||||
"""Return the list of field names which are only used during data import."""
|
||||
return self.import_only_fields
|
||||
|
||||
def get_import_exclude_fields(self, **kwargs) -> list:
|
||||
"""Return the list of field names which are excluded during data import."""
|
||||
return self.import_exclude_fields
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialise the DataImportSerializerMixin.
|
||||
|
||||
Determine if the serializer is being used for data import,
|
||||
and if so, adjust the serializer fields accordingly.
|
||||
"""
|
||||
importing = kwargs.pop('importing', False)
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if importing:
|
||||
# Exclude any fields which are not able to be imported
|
||||
importable_field_names = list(self.get_importable_fields().keys())
|
||||
field_names = list(self.fields.keys())
|
||||
|
||||
for field in field_names:
|
||||
if field not in importable_field_names:
|
||||
self.fields.pop(field, None)
|
||||
|
||||
# Exclude fields which are excluded for data import
|
||||
for field in self.get_import_exclude_fields(**kwargs):
|
||||
self.fields.pop(field, None)
|
||||
|
||||
else:
|
||||
# Exclude fields which are only used for data import
|
||||
for field in self.get_import_only_fields(**kwargs):
|
||||
self.fields.pop(field, None)
|
||||
|
||||
def get_importable_fields(self) -> dict:
|
||||
"""Return a dict of fields which can be imported against this serializer instance.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary of field names and field objects
|
||||
"""
|
||||
importable_fields = {}
|
||||
|
||||
if meta := getattr(self, 'Meta', None):
|
||||
read_only_fields = getattr(meta, 'read_only_fields', [])
|
||||
else:
|
||||
read_only_fields = []
|
||||
|
||||
for name, field in self.fields.items():
|
||||
# Skip read-only fields
|
||||
if getattr(field, 'read_only', False):
|
||||
continue
|
||||
|
||||
if name in read_only_fields:
|
||||
continue
|
||||
|
||||
# Skip fields which are themselves serializers
|
||||
if issubclass(field.__class__, serializers.Serializer):
|
||||
continue
|
||||
|
||||
# Skip file fields
|
||||
if issubclass(field.__class__, fields.FileField):
|
||||
continue
|
||||
|
||||
importable_fields[name] = field
|
||||
|
||||
return importable_fields
|
||||
|
||||
|
||||
class DataExportSerializerMixin:
|
||||
"""Mixin class for adding data export functionality to a DRF serializer."""
|
||||
|
||||
export_only_fields = []
|
||||
export_exclude_fields = []
|
||||
|
||||
def get_export_only_fields(self, **kwargs) -> list:
|
||||
"""Return the list of field names which are only used during data export."""
|
||||
return self.export_only_fields
|
||||
|
||||
def get_export_exclude_fields(self, **kwargs) -> list:
|
||||
"""Return the list of field names which are excluded during data export."""
|
||||
return self.export_exclude_fields
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialise the DataExportSerializerMixin.
|
||||
|
||||
Determine if the serializer is being used for data export,
|
||||
and if so, adjust the serializer fields accordingly.
|
||||
"""
|
||||
exporting = kwargs.pop('exporting', False)
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if exporting:
|
||||
# Exclude fields which are not required for data export
|
||||
for field in self.get_export_exclude_fields(**kwargs):
|
||||
self.fields.pop(field, None)
|
||||
else:
|
||||
# Exclude fields which are only used for data export
|
||||
for field in self.get_export_only_fields(**kwargs):
|
||||
self.fields.pop(field, None)
|
||||
|
||||
def get_exportable_fields(self) -> dict:
|
||||
"""Return a dict of fields which can be exported against this serializer instance.
|
||||
|
||||
Note: Any fields which should be excluded from export have already been removed
|
||||
|
||||
Returns:
|
||||
dict: A dictionary of field names and field objects
|
||||
"""
|
||||
fields = {}
|
||||
|
||||
if meta := getattr(self, 'Meta', None):
|
||||
write_only_fields = getattr(meta, 'write_only_fields', [])
|
||||
else:
|
||||
write_only_fields = []
|
||||
|
||||
for name, field in self.fields.items():
|
||||
# Skip write-only fields
|
||||
if getattr(field, 'write_only', False):
|
||||
continue
|
||||
|
||||
if name in write_only_fields:
|
||||
continue
|
||||
|
||||
# Skip fields which are themselves serializers
|
||||
if issubclass(field.__class__, serializers.Serializer):
|
||||
continue
|
||||
|
||||
fields[name] = field
|
||||
|
||||
return fields
|
||||
|
||||
def get_exported_filename(self, export_format) -> str:
|
||||
"""Return the filename for the exported data file.
|
||||
|
||||
An implementing class can override this implementation if required.
|
||||
|
||||
Arguments:
|
||||
export_format: The file format to be exported
|
||||
|
||||
Returns:
|
||||
str: The filename for the exported file
|
||||
"""
|
||||
model = self.Meta.model
|
||||
date = current_date().isoformat()
|
||||
|
||||
return f'InvenTree_{model.__name__}_{date}.{export_format}'
|
||||
|
||||
@classmethod
|
||||
def arrange_export_headers(cls, headers: list) -> list:
|
||||
"""Optional method to arrange the export headers."""
|
||||
return headers
|
||||
|
||||
def process_row(self, row):
|
||||
"""Optional method to process a row before exporting it."""
|
||||
return row
|
||||
|
||||
def export_to_file(self, data, file_format):
|
||||
"""Export the queryset to a file in the specified format.
|
||||
|
||||
Arguments:
|
||||
queryset: The queryset to export
|
||||
data: The serialized dataset to export
|
||||
file_format: The file format to export to
|
||||
|
||||
Returns:
|
||||
File object containing the exported data
|
||||
"""
|
||||
# Extract all exportable fields from this serializer
|
||||
fields = self.get_exportable_fields()
|
||||
|
||||
field_names = self.arrange_export_headers(list(fields.keys()))
|
||||
|
||||
# Extract human-readable field names
|
||||
headers = []
|
||||
|
||||
for field_name, field in fields.items():
|
||||
field = fields[field_name]
|
||||
|
||||
headers.append(importer.operations.get_field_label(field) or field_name)
|
||||
|
||||
dataset = tablib.Dataset(headers=headers)
|
||||
|
||||
for row in data:
|
||||
row = self.process_row(row)
|
||||
dataset.append([row.get(field, None) for field in field_names])
|
||||
|
||||
return dataset.export(file_format)
|
||||
|
||||
|
||||
class DataImportExportSerializerMixin(
|
||||
DataImportSerializerMixin, DataExportSerializerMixin
|
||||
):
|
||||
"""Mixin class for adding data import/export functionality to a DRF serializer."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class DataExportViewMixin:
|
||||
"""Mixin class for exporting a dataset via the API.
|
||||
|
||||
Adding this mixin to an API view allows the user to export the dataset to file in a variety of formats.
|
||||
|
||||
We achieve this by overriding the 'get' method, and checking for the presence of the required query parameter.
|
||||
"""
|
||||
|
||||
EXPORT_QUERY_PARAMETER = 'export'
|
||||
|
||||
def export_data(self, export_format):
|
||||
"""Export the data in the specified format.
|
||||
|
||||
Use the provided serializer to generate the data, and return it as a file download.
|
||||
"""
|
||||
serializer_class = self.get_serializer_class()
|
||||
|
||||
if not issubclass(serializer_class, DataExportSerializerMixin):
|
||||
raise TypeError(
|
||||
'Serializer class must inherit from DataExportSerialierMixin'
|
||||
)
|
||||
|
||||
queryset = self.filter_queryset(self.get_queryset())
|
||||
|
||||
serializer = serializer_class(exporting=True)
|
||||
serializer.initial_data = queryset
|
||||
|
||||
# Export dataset with a second copy of the serializer
|
||||
# This is because when we pass many=True, the returned class is a ListSerializer
|
||||
data = serializer_class(queryset, many=True, exporting=True).data
|
||||
|
||||
filename = serializer.get_exported_filename(export_format)
|
||||
datafile = serializer.export_to_file(data, export_format)
|
||||
|
||||
return DownloadFile(datafile, filename=filename)
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Override the 'get' method to check for the export query parameter."""
|
||||
if export_format := request.query_params.get(self.EXPORT_QUERY_PARAMETER, None):
|
||||
export_format = str(export_format).strip().lower()
|
||||
if export_format in GetExportFormats():
|
||||
return self.export_data(export_format)
|
||||
else:
|
||||
raise ValidationError({
|
||||
self.EXPORT_QUERY_PARAMETER: _('Invalid export format')
|
||||
})
|
||||
|
||||
# If the export query parameter is not present, return the default response
|
||||
return super().get(request, *args, **kwargs)
|
659
src/backend/InvenTree/importer/models.py
Normal file
659
src/backend/InvenTree/importer/models.py
Normal file
@ -0,0 +1,659 @@
|
||||
"""Model definitions for the 'importer' app."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||
from django.core.validators import FileExtensionValidator
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from rest_framework.exceptions import ValidationError as DRFValidationError
|
||||
|
||||
import importer.operations
|
||||
import importer.registry
|
||||
import importer.tasks
|
||||
import importer.validators
|
||||
import InvenTree.helpers
|
||||
from importer.status_codes import DataImportStatusCode
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class DataImportSession(models.Model):
|
||||
"""Database model representing a data import session.
|
||||
|
||||
An initial file is uploaded, and used to populate the database.
|
||||
|
||||
Fields:
|
||||
timestamp: Timestamp for the import session
|
||||
data_file: FileField for the data file to import
|
||||
status: IntegerField for the status of the import session
|
||||
user: ForeignKey to the User who initiated the import
|
||||
field_defaults: JSONField for field default values - provides a backup value for a field
|
||||
field_overrides: JSONField for field override values - used to force a value for a field
|
||||
field_filters: JSONField for field filter values - optional field API filters
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def get_api_url():
|
||||
"""Return the API URL associated with the DataImportSession model."""
|
||||
return reverse('api-importer-session-list')
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""Save the DataImportSession object."""
|
||||
initial = self.pk is None
|
||||
|
||||
self.clean()
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
if initial:
|
||||
# New object - run initial setup
|
||||
self.status = DataImportStatusCode.INITIAL.value
|
||||
self.progress = 0
|
||||
self.extract_columns()
|
||||
|
||||
timestamp = models.DateTimeField(auto_now_add=True, verbose_name=_('Timestamp'))
|
||||
|
||||
data_file = models.FileField(
|
||||
upload_to='import',
|
||||
verbose_name=_('Data File'),
|
||||
help_text=_('Data file to import'),
|
||||
validators=[
|
||||
FileExtensionValidator(
|
||||
allowed_extensions=InvenTree.helpers.GetExportFormats()
|
||||
),
|
||||
importer.validators.validate_data_file,
|
||||
],
|
||||
)
|
||||
|
||||
columns = models.JSONField(blank=True, null=True, verbose_name=_('Columns'))
|
||||
|
||||
model_type = models.CharField(
|
||||
blank=False,
|
||||
max_length=100,
|
||||
validators=[importer.validators.validate_importer_model_type],
|
||||
)
|
||||
|
||||
status = models.PositiveIntegerField(
|
||||
default=DataImportStatusCode.INITIAL.value,
|
||||
choices=DataImportStatusCode.items(),
|
||||
help_text=_('Import status'),
|
||||
)
|
||||
|
||||
user = models.ForeignKey(
|
||||
User, on_delete=models.SET_NULL, blank=True, null=True, verbose_name=_('User')
|
||||
)
|
||||
|
||||
field_defaults = models.JSONField(
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_('Field Defaults'),
|
||||
validators=[importer.validators.validate_field_defaults],
|
||||
)
|
||||
|
||||
field_overrides = models.JSONField(
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_('Field Overrides'),
|
||||
validators=[importer.validators.validate_field_defaults],
|
||||
)
|
||||
|
||||
field_filters = models.JSONField(
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_('Field Filters'),
|
||||
validators=[importer.validators.validate_field_defaults],
|
||||
)
|
||||
|
||||
@property
|
||||
def field_mapping(self):
|
||||
"""Construct a dict of field mappings for this import session.
|
||||
|
||||
Returns: A dict of field: column mappings
|
||||
"""
|
||||
mapping = {}
|
||||
|
||||
for map in self.column_mappings.all():
|
||||
mapping[map.field] = map.column
|
||||
|
||||
return mapping
|
||||
|
||||
@property
|
||||
def serializer_class(self):
|
||||
"""Return the serializer class for this importer."""
|
||||
from importer.registry import supported_models
|
||||
|
||||
return supported_models().get(self.model_type, None)
|
||||
|
||||
def extract_columns(self):
|
||||
"""Run initial column extraction and mapping.
|
||||
|
||||
This method is called when the import session is first created.
|
||||
|
||||
- Extract column names from the data file
|
||||
- Create a default mapping for each field in the serializer
|
||||
"""
|
||||
# Extract list of column names from the file
|
||||
self.columns = importer.operations.extract_column_names(self.data_file)
|
||||
|
||||
serializer_fields = self.available_fields()
|
||||
|
||||
# Remove any existing mappings
|
||||
self.column_mappings.all().delete()
|
||||
|
||||
column_mappings = []
|
||||
|
||||
matched_columns = set()
|
||||
|
||||
field_overrides = self.field_overrides or {}
|
||||
|
||||
# Create a default mapping for each available field in the database
|
||||
for field, field_def in serializer_fields.items():
|
||||
# If an override value is provided for the field,
|
||||
# skip creating a mapping for this field
|
||||
if field in field_overrides:
|
||||
continue
|
||||
|
||||
# Generate a list of possible column names for this field
|
||||
field_options = [
|
||||
field,
|
||||
field_def.get('label', field),
|
||||
field_def.get('help_text', field),
|
||||
]
|
||||
column_name = ''
|
||||
|
||||
for column in self.columns:
|
||||
# No title provided for the column
|
||||
if not column:
|
||||
continue
|
||||
|
||||
# Ignore if we have already matched this column to a field
|
||||
if column in matched_columns:
|
||||
continue
|
||||
|
||||
# Try direct match
|
||||
if column in field_options:
|
||||
column_name = column
|
||||
break
|
||||
|
||||
# Try lower case match
|
||||
if column.lower() in [f.lower() for f in field_options]:
|
||||
column_name = column
|
||||
break
|
||||
|
||||
column_mappings.append(
|
||||
DataImportColumnMap(session=self, column=column_name, field=field)
|
||||
)
|
||||
|
||||
# Create the column mappings
|
||||
DataImportColumnMap.objects.bulk_create(column_mappings)
|
||||
|
||||
self.status = DataImportStatusCode.MAPPING.value
|
||||
self.save()
|
||||
|
||||
def accept_mapping(self):
|
||||
"""Accept current mapping configuration.
|
||||
|
||||
- Validate that the current column mapping is correct
|
||||
- Trigger the data import process
|
||||
"""
|
||||
# First, we need to ensure that all the *required* columns have been mapped
|
||||
required_fields = self.required_fields()
|
||||
|
||||
field_defaults = self.field_defaults or {}
|
||||
field_overrides = self.field_overrides or {}
|
||||
|
||||
missing_fields = []
|
||||
|
||||
for field in required_fields.keys():
|
||||
# An override value exists
|
||||
if field in field_overrides:
|
||||
continue
|
||||
|
||||
# A default value exists
|
||||
if field in field_defaults and field_defaults[field]:
|
||||
continue
|
||||
|
||||
# The field has been mapped to a data column
|
||||
if mapping := self.column_mappings.filter(field=field).first():
|
||||
if mapping.column:
|
||||
continue
|
||||
|
||||
missing_fields.append(field)
|
||||
|
||||
if len(missing_fields) > 0:
|
||||
raise DjangoValidationError({
|
||||
'error': _('Some required fields have not been mapped'),
|
||||
'fields': missing_fields,
|
||||
})
|
||||
|
||||
# No errors, so trigger the data import process
|
||||
self.trigger_data_import()
|
||||
|
||||
def trigger_data_import(self):
|
||||
"""Trigger the data import process for this session.
|
||||
|
||||
Offloads the task to the background worker process.
|
||||
"""
|
||||
from InvenTree.tasks import offload_task
|
||||
|
||||
# Mark the import task status as "IMPORTING"
|
||||
self.status = DataImportStatusCode.IMPORTING.value
|
||||
self.save()
|
||||
|
||||
offload_task(importer.tasks.import_data, self.pk)
|
||||
|
||||
def import_data(self):
|
||||
"""Perform the data import process for this session."""
|
||||
# Clear any existing data rows
|
||||
self.rows.all().delete()
|
||||
|
||||
df = importer.operations.load_data_file(self.data_file)
|
||||
|
||||
if df is None:
|
||||
# TODO: Log an error message against the import session
|
||||
logger.error('Failed to load data file')
|
||||
return
|
||||
|
||||
headers = df.headers
|
||||
|
||||
imported_rows = []
|
||||
|
||||
field_mapping = self.field_mapping
|
||||
available_fields = self.available_fields()
|
||||
|
||||
# Iterate through each "row" in the data file, and create a new DataImportRow object
|
||||
for idx, row in enumerate(df):
|
||||
row_data = dict(zip(headers, row))
|
||||
|
||||
# Skip completely empty rows
|
||||
if not any(row_data.values()):
|
||||
continue
|
||||
|
||||
row = importer.models.DataImportRow(
|
||||
session=self, row_data=row_data, row_index=idx
|
||||
)
|
||||
|
||||
row.extract_data(
|
||||
field_mapping=field_mapping,
|
||||
available_fields=available_fields,
|
||||
commit=False,
|
||||
)
|
||||
|
||||
row.valid = row.validate(commit=False)
|
||||
imported_rows.append(row)
|
||||
|
||||
# Perform database writes as a single operation
|
||||
importer.models.DataImportRow.objects.bulk_create(imported_rows)
|
||||
|
||||
# Mark the import task as "PROCESSING"
|
||||
self.status = DataImportStatusCode.PROCESSING.value
|
||||
self.save()
|
||||
|
||||
def check_complete(self) -> bool:
|
||||
"""Check if the import session is complete."""
|
||||
if self.completed_row_count < self.row_count:
|
||||
return False
|
||||
|
||||
# Update the status of this session
|
||||
if self.status != DataImportStatusCode.COMPLETE.value:
|
||||
self.status = DataImportStatusCode.COMPLETE.value
|
||||
self.save()
|
||||
|
||||
return True
|
||||
|
||||
@property
|
||||
def row_count(self):
|
||||
"""Return the number of rows in the import session."""
|
||||
return self.rows.count()
|
||||
|
||||
@property
|
||||
def completed_row_count(self):
|
||||
"""Return the number of completed rows for this session."""
|
||||
return self.rows.filter(complete=True).count()
|
||||
|
||||
def available_fields(self):
|
||||
"""Returns information on the available fields.
|
||||
|
||||
- This method is designed to be introspected by the frontend, for rendering the various fields.
|
||||
- We make use of the InvenTree.metadata module to provide extra information about the fields.
|
||||
|
||||
Note that we cache these fields, as they are expensive to compute.
|
||||
"""
|
||||
if fields := getattr(self, '_available_fields', None):
|
||||
return fields
|
||||
|
||||
from InvenTree.metadata import InvenTreeMetadata
|
||||
|
||||
metadata = InvenTreeMetadata()
|
||||
|
||||
if serializer_class := self.serializer_class:
|
||||
serializer = serializer_class(data={}, importing=True)
|
||||
fields = metadata.get_serializer_info(serializer)
|
||||
else:
|
||||
fields = {}
|
||||
|
||||
self._available_fields = fields
|
||||
return fields
|
||||
|
||||
def required_fields(self):
|
||||
"""Returns information on which fields are *required* for import."""
|
||||
fields = self.available_fields()
|
||||
|
||||
required = {}
|
||||
|
||||
for field, info in fields.items():
|
||||
if info.get('required', False):
|
||||
required[field] = info
|
||||
|
||||
return required
|
||||
|
||||
|
||||
class DataImportColumnMap(models.Model):
|
||||
"""Database model representing a mapping between a file column and serializer field.
|
||||
|
||||
- Each row maps a "column" (in the import file) to a "field" (in the serializer)
|
||||
- Column must exist in the file
|
||||
- Field must exist in the serializer (and not be read-only)
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def get_api_url():
|
||||
"""Return the API URL associated with the DataImportColumnMap model."""
|
||||
return reverse('api-importer-mapping-list')
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""Save the DataImportColumnMap object."""
|
||||
self.clean()
|
||||
self.validate_unique()
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def validate_unique(self, exclude=None):
|
||||
"""Ensure that the column mapping is unique within the session."""
|
||||
super().validate_unique(exclude)
|
||||
|
||||
columns = self.session.column_mappings.exclude(pk=self.pk)
|
||||
|
||||
if (
|
||||
self.column not in ['', None]
|
||||
and columns.filter(column=self.column).exists()
|
||||
):
|
||||
raise DjangoValidationError({
|
||||
'column': _('Column is already mapped to a database field')
|
||||
})
|
||||
|
||||
if columns.filter(field=self.field).exists():
|
||||
raise DjangoValidationError({
|
||||
'field': _('Field is already mapped to a data column')
|
||||
})
|
||||
|
||||
def clean(self):
|
||||
"""Validate the column mapping."""
|
||||
super().clean()
|
||||
|
||||
if not self.session:
|
||||
raise DjangoValidationError({
|
||||
'session': _('Column mapping must be linked to a valid import session')
|
||||
})
|
||||
|
||||
if self.column and self.column not in self.session.columns:
|
||||
raise DjangoValidationError({
|
||||
'column': _('Column does not exist in the data file')
|
||||
})
|
||||
|
||||
field_def = self.field_definition
|
||||
|
||||
if not field_def:
|
||||
raise DjangoValidationError({
|
||||
'field': _('Field does not exist in the target model')
|
||||
})
|
||||
|
||||
if field_def.get('read_only', False):
|
||||
raise DjangoValidationError({'field': _('Selected field is read-only')})
|
||||
|
||||
session = models.ForeignKey(
|
||||
DataImportSession,
|
||||
on_delete=models.CASCADE,
|
||||
verbose_name=_('Import Session'),
|
||||
related_name='column_mappings',
|
||||
)
|
||||
|
||||
field = models.CharField(max_length=100, verbose_name=_('Field'))
|
||||
|
||||
column = models.CharField(blank=True, max_length=100, verbose_name=_('Column'))
|
||||
|
||||
@property
|
||||
def available_fields(self):
|
||||
"""Return a list of available fields for this import session.
|
||||
|
||||
These fields get cached, as they are expensive to compute.
|
||||
"""
|
||||
if fields := getattr(self, '_available_fields', None):
|
||||
return fields
|
||||
|
||||
self._available_fields = self.session.available_fields()
|
||||
|
||||
return self._available_fields
|
||||
|
||||
@property
|
||||
def field_definition(self):
|
||||
"""Return the field definition associated with this column mapping."""
|
||||
fields = self.available_fields
|
||||
return fields.get(self.field, None)
|
||||
|
||||
@property
|
||||
def label(self):
|
||||
"""Extract the 'label' associated with the mapped field."""
|
||||
if field_def := self.field_definition:
|
||||
return field_def.get('label', None)
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
"""Extract the 'description' associated with the mapped field."""
|
||||
description = None
|
||||
|
||||
if field_def := self.field_definition:
|
||||
description = field_def.get('help_text', None)
|
||||
|
||||
if not description:
|
||||
description = self.label
|
||||
|
||||
return description
|
||||
|
||||
|
||||
class DataImportRow(models.Model):
|
||||
"""Database model representing a single row in a data import session.
|
||||
|
||||
Each row corresponds to a single row in the import file, and is used to populate the database.
|
||||
|
||||
Fields:
|
||||
session: ForeignKey to the parent DataImportSession object
|
||||
data: JSONField for the data in this row
|
||||
status: IntegerField for the status of the row import
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def get_api_url():
|
||||
"""Return the API URL associated with the DataImportRow model."""
|
||||
return reverse('api-importer-row-list')
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""Save the DataImportRow object."""
|
||||
self.valid = self.validate()
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
session = models.ForeignKey(
|
||||
DataImportSession,
|
||||
on_delete=models.CASCADE,
|
||||
verbose_name=_('Import Session'),
|
||||
related_name='rows',
|
||||
)
|
||||
|
||||
row_index = models.PositiveIntegerField(default=0, verbose_name=_('Row Index'))
|
||||
|
||||
row_data = models.JSONField(
|
||||
blank=True, null=True, verbose_name=_('Original row data')
|
||||
)
|
||||
|
||||
data = models.JSONField(blank=True, null=True, verbose_name=_('Data'))
|
||||
|
||||
errors = models.JSONField(blank=True, null=True, verbose_name=_('Errors'))
|
||||
|
||||
valid = models.BooleanField(default=False, verbose_name=_('Valid'))
|
||||
|
||||
complete = models.BooleanField(default=False, verbose_name=_('Complete'))
|
||||
|
||||
@property
|
||||
def default_values(self) -> dict:
|
||||
"""Return a dict object of the 'default' values for this row."""
|
||||
defaults = self.session.field_defaults or {}
|
||||
|
||||
if type(defaults) is not dict:
|
||||
try:
|
||||
defaults = json.loads(str(defaults))
|
||||
except json.JSONDecodeError:
|
||||
logger.warning('Failed to parse default values for import row')
|
||||
defaults = {}
|
||||
|
||||
return defaults
|
||||
|
||||
@property
|
||||
def override_values(self) -> dict:
|
||||
"""Return a dict object of the 'override' values for this row."""
|
||||
overrides = self.session.field_overrides or {}
|
||||
|
||||
if type(overrides) is not dict:
|
||||
try:
|
||||
overrides = json.loads(str(overrides))
|
||||
except json.JSONDecodeError:
|
||||
logger.warning('Failed to parse override values for import row')
|
||||
overrides = {}
|
||||
|
||||
return overrides
|
||||
|
||||
def extract_data(
|
||||
self, available_fields: dict = None, field_mapping: dict = None, commit=True
|
||||
):
|
||||
"""Extract row data from the provided data dictionary."""
|
||||
if not field_mapping:
|
||||
field_mapping = self.session.field_mapping
|
||||
|
||||
if not available_fields:
|
||||
available_fields = self.session.available_fields()
|
||||
|
||||
overrride_values = self.override_values
|
||||
default_values = self.default_values
|
||||
|
||||
data = {}
|
||||
|
||||
# We have mapped column (file) to field (serializer) already
|
||||
for field, col in field_mapping.items():
|
||||
# Data override (force value and skip any further checks)
|
||||
if field in overrride_values:
|
||||
data[field] = overrride_values[field]
|
||||
continue
|
||||
|
||||
# Default value (if provided)
|
||||
if field in default_values:
|
||||
data[field] = default_values[field]
|
||||
|
||||
# If this field is *not* mapped to any column, skip
|
||||
if not col or col not in self.row_data:
|
||||
continue
|
||||
|
||||
# Extract field type
|
||||
field_def = available_fields.get(field, {})
|
||||
|
||||
field_type = field_def.get('type', None)
|
||||
|
||||
value = self.row_data.get(col, None)
|
||||
|
||||
if field_type == 'boolean':
|
||||
value = InvenTree.helpers.str2bool(value)
|
||||
elif field_type == 'date':
|
||||
value = value or None
|
||||
|
||||
# Use the default value, if provided
|
||||
if value in [None, ''] and field in default_values:
|
||||
value = default_values[field]
|
||||
|
||||
data[field] = value
|
||||
|
||||
self.data = data
|
||||
|
||||
if commit:
|
||||
self.save()
|
||||
|
||||
def serializer_data(self):
|
||||
"""Construct data object to be sent to the serializer.
|
||||
|
||||
- If available, we use the "default" values provided by the import session
|
||||
- If available, we use the "override" values provided by the import session
|
||||
"""
|
||||
data = self.default_values
|
||||
|
||||
if self.data:
|
||||
data.update(self.data)
|
||||
|
||||
# Override values take priority, if present
|
||||
data.update(self.override_values)
|
||||
|
||||
return data
|
||||
|
||||
def construct_serializer(self):
|
||||
"""Construct a serializer object for this row."""
|
||||
if serializer_class := self.session.serializer_class:
|
||||
return serializer_class(data=self.serializer_data())
|
||||
|
||||
def validate(self, commit=False) -> bool:
|
||||
"""Validate the data in this row against the linked serializer.
|
||||
|
||||
Arguments:
|
||||
commit: If True, the data is saved to the database (if validation passes)
|
||||
|
||||
Returns:
|
||||
True if the data is valid, False otherwise
|
||||
|
||||
Raises:
|
||||
ValidationError: If the linked serializer is not valid
|
||||
"""
|
||||
if self.complete:
|
||||
# Row has already been completed
|
||||
return True
|
||||
|
||||
serializer = self.construct_serializer()
|
||||
|
||||
if not serializer:
|
||||
self.errors = {
|
||||
'non_field_errors': 'No serializer class linked to this import session'
|
||||
}
|
||||
return False
|
||||
|
||||
result = False
|
||||
|
||||
try:
|
||||
result = serializer.is_valid(raise_exception=True)
|
||||
except (DjangoValidationError, DRFValidationError) as e:
|
||||
self.errors = e.detail
|
||||
|
||||
if result:
|
||||
self.errors = None
|
||||
|
||||
if commit:
|
||||
try:
|
||||
serializer.save()
|
||||
self.complete = True
|
||||
self.save()
|
||||
|
||||
self.session.check_complete()
|
||||
|
||||
except Exception as e:
|
||||
self.errors = {'non_field_errors': str(e)}
|
||||
result = False
|
||||
|
||||
return result
|
122
src/backend/InvenTree/importer/operations.py
Normal file
122
src/backend/InvenTree/importer/operations.py
Normal file
@ -0,0 +1,122 @@
|
||||
"""Data import operational functions."""
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
import tablib
|
||||
|
||||
import InvenTree.helpers
|
||||
|
||||
|
||||
def load_data_file(data_file, file_format=None):
|
||||
"""Load data file into a tablib dataset.
|
||||
|
||||
Arguments:
|
||||
data_file: django file object containing data to import (should be already opened!)
|
||||
file_format: Format specifier for the data file
|
||||
"""
|
||||
# Introspect the file format based on the provided file
|
||||
if not file_format:
|
||||
file_format = data_file.name.split('.')[-1]
|
||||
|
||||
if file_format and file_format.startswith('.'):
|
||||
file_format = file_format[1:]
|
||||
|
||||
file_format = file_format.strip().lower()
|
||||
|
||||
if file_format not in InvenTree.helpers.GetExportFormats():
|
||||
raise ValidationError(_('Unsupported data file format'))
|
||||
|
||||
file_object = data_file.file
|
||||
|
||||
if hasattr(file_object, 'open'):
|
||||
file_object.open('r')
|
||||
|
||||
file_object.seek(0)
|
||||
|
||||
try:
|
||||
data = file_object.read()
|
||||
except (IOError, FileNotFoundError):
|
||||
raise ValidationError(_('Failed to open data file'))
|
||||
|
||||
# Excel formats expect binary data
|
||||
if file_format not in ['xls', 'xlsx']:
|
||||
data = data.decode()
|
||||
|
||||
try:
|
||||
data = tablib.Dataset().load(data, headers=True, format=file_format)
|
||||
except tablib.core.UnsupportedFormat:
|
||||
raise ValidationError(_('Unsupported data file format'))
|
||||
except tablib.core.InvalidDimensions:
|
||||
raise ValidationError(_('Invalid data file dimensions'))
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def extract_column_names(data_file) -> list:
|
||||
"""Extract column names from a data file.
|
||||
|
||||
Uses the tablib library to extract column names from a data file.
|
||||
|
||||
Args:
|
||||
data_file: File object containing data to import
|
||||
|
||||
Returns:
|
||||
List of column names extracted from the file
|
||||
|
||||
Raises:
|
||||
ValidationError: If the data file is not in a valid format
|
||||
"""
|
||||
data = load_data_file(data_file)
|
||||
|
||||
headers = []
|
||||
|
||||
for idx, header in enumerate(data.headers):
|
||||
if header:
|
||||
headers.append(header)
|
||||
else:
|
||||
# If the header is empty, generate a default header
|
||||
headers.append(f'Column {idx + 1}')
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def extract_rows(data_file) -> list:
|
||||
"""Extract rows from the data file.
|
||||
|
||||
Each returned row is a dictionary of column_name: value pairs.
|
||||
"""
|
||||
data = load_data_file(data_file)
|
||||
|
||||
headers = data.headers
|
||||
|
||||
rows = []
|
||||
|
||||
for row in data:
|
||||
rows.append(dict(zip(headers, row)))
|
||||
|
||||
return rows
|
||||
|
||||
|
||||
def get_field_label(field) -> str:
|
||||
"""Return the label for a field in a serializer class.
|
||||
|
||||
Check for labels in the following order of descending priority:
|
||||
|
||||
- The serializer class has a 'label' specified for the field
|
||||
- The underlying model has a 'verbose_name' specified
|
||||
- The field name is used as the label
|
||||
|
||||
Arguments:
|
||||
field: Field instance from a serializer class
|
||||
|
||||
Returns:
|
||||
str: Field label
|
||||
"""
|
||||
if field:
|
||||
if label := getattr(field, 'label', None):
|
||||
return label
|
||||
|
||||
# TODO: Check if the field is a model field
|
||||
|
||||
return None
|
72
src/backend/InvenTree/importer/registry.py
Normal file
72
src/backend/InvenTree/importer/registry.py
Normal file
@ -0,0 +1,72 @@
|
||||
"""Registry for supported serializers for data import operations."""
|
||||
|
||||
import logging
|
||||
|
||||
from rest_framework.serializers import Serializer
|
||||
|
||||
from importer.mixins import DataImportSerializerMixin
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class DataImportSerializerRegister:
|
||||
"""Registry for supported serializers for data import operations.
|
||||
|
||||
To add a new serializer to the registry, add the @register_importer decorator to the serializer class.
|
||||
"""
|
||||
|
||||
supported_serializers: list[Serializer] = []
|
||||
|
||||
def register(self, serializer) -> None:
|
||||
"""Register a new serializer with the importer registry."""
|
||||
if not issubclass(serializer, DataImportSerializerMixin):
|
||||
logger.debug('Invalid serializer class: %s', type(serializer))
|
||||
return
|
||||
|
||||
if not issubclass(serializer, Serializer):
|
||||
logger.debug('Invalid serializer class: %s', type(serializer))
|
||||
return
|
||||
|
||||
logger.debug('Registering serializer class for import: %s', type(serializer))
|
||||
|
||||
if serializer not in self.supported_serializers:
|
||||
self.supported_serializers.append(serializer)
|
||||
|
||||
|
||||
_serializer_registry = DataImportSerializerRegister()
|
||||
|
||||
|
||||
def get_supported_serializers():
|
||||
"""Return a list of supported serializers which can be used for importing data."""
|
||||
return _serializer_registry.supported_serializers
|
||||
|
||||
|
||||
def supported_models():
|
||||
"""Return a map of supported models to their respective serializers."""
|
||||
data = {}
|
||||
|
||||
for serializer in get_supported_serializers():
|
||||
model = serializer.Meta.model
|
||||
data[model.__name__.lower()] = serializer
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def supported_model_options():
|
||||
"""Return a list of supported model options for importing data."""
|
||||
options = []
|
||||
|
||||
for model_name, serializer in supported_models().items():
|
||||
options.append((model_name, serializer.Meta.model._meta.verbose_name))
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def register_importer():
|
||||
"""Decorator function to register a serializer with the importer registry."""
|
||||
|
||||
def _decorator(cls):
|
||||
_serializer_registry.register(cls)
|
||||
return cls
|
||||
|
||||
return _decorator
|
216
src/backend/InvenTree/importer/serializers.py
Normal file
216
src/backend/InvenTree/importer/serializers.py
Normal file
@ -0,0 +1,216 @@
|
||||
"""API serializers for the importer app."""
|
||||
|
||||
import json
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
import importer.models
|
||||
import importer.registry
|
||||
from InvenTree.serializers import (
|
||||
InvenTreeAttachmentSerializerField,
|
||||
InvenTreeModelSerializer,
|
||||
UserSerializer,
|
||||
)
|
||||
|
||||
|
||||
class DataImportColumnMapSerializer(InvenTreeModelSerializer):
|
||||
"""Serializer for the DataImportColumnMap model."""
|
||||
|
||||
class Meta:
|
||||
"""Meta class options for the serializer."""
|
||||
|
||||
model = importer.models.DataImportColumnMap
|
||||
fields = ['pk', 'session', 'column', 'field', 'label', 'description']
|
||||
read_only_fields = ['field', 'session']
|
||||
|
||||
label = serializers.CharField(read_only=True)
|
||||
description = serializers.CharField(read_only=True)
|
||||
|
||||
|
||||
class DataImportSessionSerializer(InvenTreeModelSerializer):
|
||||
"""Serializer for the DataImportSession model."""
|
||||
|
||||
class Meta:
|
||||
"""Meta class options for the serializer."""
|
||||
|
||||
model = importer.models.DataImportSession
|
||||
fields = [
|
||||
'pk',
|
||||
'timestamp',
|
||||
'data_file',
|
||||
'model_type',
|
||||
'available_fields',
|
||||
'status',
|
||||
'user',
|
||||
'user_detail',
|
||||
'columns',
|
||||
'column_mappings',
|
||||
'field_defaults',
|
||||
'field_overrides',
|
||||
'field_filters',
|
||||
'row_count',
|
||||
'completed_row_count',
|
||||
]
|
||||
read_only_fields = ['pk', 'user', 'status', 'columns']
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Override the constructor for the DataImportSession serializer."""
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.fields['model_type'].choices = importer.registry.supported_model_options()
|
||||
|
||||
data_file = InvenTreeAttachmentSerializerField()
|
||||
|
||||
model_type = serializers.ChoiceField(
|
||||
required=True,
|
||||
allow_blank=False,
|
||||
choices=importer.registry.supported_model_options(),
|
||||
)
|
||||
|
||||
available_fields = serializers.JSONField(read_only=True)
|
||||
|
||||
row_count = serializers.IntegerField(read_only=True)
|
||||
completed_row_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
column_mappings = DataImportColumnMapSerializer(many=True, read_only=True)
|
||||
|
||||
user_detail = UserSerializer(source='user', read_only=True, many=False)
|
||||
|
||||
def validate_field_defaults(self, defaults):
|
||||
"""De-stringify the field defaults."""
|
||||
if defaults is None:
|
||||
return None
|
||||
|
||||
if type(defaults) is not dict:
|
||||
try:
|
||||
defaults = json.loads(str(defaults))
|
||||
except:
|
||||
raise ValidationError(_('Invalid field defaults'))
|
||||
|
||||
return defaults
|
||||
|
||||
def validate_field_overrides(self, overrides):
|
||||
"""De-stringify the field overrides."""
|
||||
if overrides is None:
|
||||
return None
|
||||
|
||||
if type(overrides) is not dict:
|
||||
try:
|
||||
overrides = json.loads(str(overrides))
|
||||
except:
|
||||
raise ValidationError(_('Invalid field overrides'))
|
||||
|
||||
return overrides
|
||||
|
||||
def validate_field_filters(self, filters):
|
||||
"""De-stringify the field filters."""
|
||||
if filters is None:
|
||||
return None
|
||||
|
||||
if type(filters) is not dict:
|
||||
try:
|
||||
filters = json.loads(str(filters))
|
||||
except:
|
||||
raise ValidationError(_('Invalid field filters'))
|
||||
|
||||
return filters
|
||||
|
||||
def create(self, validated_data):
|
||||
"""Override create method for this serializer.
|
||||
|
||||
Attach user information based on provided session data.
|
||||
"""
|
||||
session = super().create(validated_data)
|
||||
|
||||
request = self.context.get('request', None)
|
||||
|
||||
if request:
|
||||
session.user = request.user
|
||||
session.save()
|
||||
|
||||
return session
|
||||
|
||||
|
||||
class DataImportRowSerializer(InvenTreeModelSerializer):
|
||||
"""Serializer for the DataImportRow model."""
|
||||
|
||||
class Meta:
|
||||
"""Meta class options for the serializer."""
|
||||
|
||||
model = importer.models.DataImportRow
|
||||
fields = [
|
||||
'pk',
|
||||
'session',
|
||||
'row_index',
|
||||
'row_data',
|
||||
'data',
|
||||
'errors',
|
||||
'valid',
|
||||
'complete',
|
||||
]
|
||||
|
||||
read_only_fields = [
|
||||
'pk',
|
||||
'session',
|
||||
'row_index',
|
||||
'row_data',
|
||||
'errors',
|
||||
'valid',
|
||||
'complete',
|
||||
]
|
||||
|
||||
|
||||
class DataImportAcceptRowSerializer(serializers.Serializer):
|
||||
"""Serializer for accepting rows of data."""
|
||||
|
||||
class Meta:
|
||||
"""Serializer meta options."""
|
||||
|
||||
fields = ['rows']
|
||||
|
||||
rows = serializers.PrimaryKeyRelatedField(
|
||||
queryset=importer.models.DataImportRow.objects.all(),
|
||||
many=True,
|
||||
required=True,
|
||||
label=_('Rows'),
|
||||
help_text=_('List of row IDs to accept'),
|
||||
)
|
||||
|
||||
def validate_rows(self, rows):
|
||||
"""Ensure that the provided rows are valid.
|
||||
|
||||
- Row must point to the same import session
|
||||
- Row must contain valid data
|
||||
- Row must not have already been completed
|
||||
"""
|
||||
session = self.context.get('session', None)
|
||||
|
||||
if not rows or len(rows) == 0:
|
||||
raise ValidationError(_('No rows provided'))
|
||||
|
||||
for row in rows:
|
||||
if row.session != session:
|
||||
raise ValidationError(_('Row does not belong to this session'))
|
||||
|
||||
if not row.valid:
|
||||
raise ValidationError(_('Row contains invalid data'))
|
||||
|
||||
if row.complete:
|
||||
raise ValidationError(_('Row has already been completed'))
|
||||
|
||||
return rows
|
||||
|
||||
def save(self):
|
||||
"""Complete the provided rows."""
|
||||
rows = self.validated_data['rows']
|
||||
|
||||
for row in rows:
|
||||
row.validate(commit=True)
|
||||
|
||||
if session := self.context.get('session', None):
|
||||
session.check_complete()
|
||||
|
||||
return rows
|
19
src/backend/InvenTree/importer/status_codes.py
Normal file
19
src/backend/InvenTree/importer/status_codes.py
Normal file
@ -0,0 +1,19 @@
|
||||
"""Status codes for common model types."""
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from generic.states import StatusCode
|
||||
|
||||
|
||||
class DataImportStatusCode(StatusCode):
|
||||
"""Defines a set of status codes for a DataImportSession."""
|
||||
|
||||
INITIAL = 0, _('Initializing'), 'secondary' # Import session has been created
|
||||
MAPPING = 10, _('Mapping Columns'), 'primary' # Import fields are being mapped
|
||||
IMPORTING = 20, _('Importing Data'), 'primary' # Data is being imported
|
||||
PROCESSING = (
|
||||
30,
|
||||
_('Processing Data'),
|
||||
'primary',
|
||||
) # Data is being processed by the user
|
||||
COMPLETE = 40, _('Complete'), 'success' # Import has been completed
|
53
src/backend/InvenTree/importer/tasks.py
Normal file
53
src/backend/InvenTree/importer/tasks.py
Normal file
@ -0,0 +1,53 @@
|
||||
"""Task definitions for the 'importer' app."""
|
||||
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
|
||||
import InvenTree.helpers
|
||||
import InvenTree.tasks
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def import_data(session_id: int):
|
||||
"""Load data from the provided file.
|
||||
|
||||
Attempt to load data from the provided file, and potentially handle any errors.
|
||||
"""
|
||||
import importer.models
|
||||
import importer.operations
|
||||
import importer.status_codes
|
||||
|
||||
try:
|
||||
session = importer.models.DataImportSession.objects.get(pk=session_id)
|
||||
logger.info("Loading data from session ID '%s'", session_id)
|
||||
session.import_data()
|
||||
except (ValueError, importer.models.DataImportSession.DoesNotExist):
|
||||
logger.error("Data import session with ID '%s' does not exist", session_id)
|
||||
return
|
||||
|
||||
|
||||
@InvenTree.tasks.scheduled_task(InvenTree.tasks.ScheduledTask.DAILY)
|
||||
def cleanup_import_sessions():
|
||||
"""Periodically remove old import sessions.
|
||||
|
||||
Every 5 days, remove any importer sessions that are more than 5 days old
|
||||
"""
|
||||
CLEANUP_DAYS = 5
|
||||
|
||||
import importer.models
|
||||
|
||||
if not InvenTree.tasks.check_daily_holdoff('cleanup_import_sessions', CLEANUP_DAYS):
|
||||
return
|
||||
|
||||
logger.info('Cleaning old data import sessions')
|
||||
|
||||
before = InvenTree.helpers.current_date() - timedelta(days=CLEANUP_DAYS)
|
||||
|
||||
sessions = importer.models.DataImportSession.objects.filter(timestamp__lte=before)
|
||||
|
||||
if sessions.count() > 0:
|
||||
logger.info('Deleting %s old data import sessions', sessions.count())
|
||||
sessions.delete()
|
||||
|
||||
InvenTree.tasks.record_task_success('cleanup_import_sessions')
|
13
src/backend/InvenTree/importer/test_data/companies.csv
Normal file
13
src/backend/InvenTree/importer/test_data/companies.csv
Normal file
@ -0,0 +1,13 @@
|
||||
ID,Company name,Company description,Website,Phone number,Address,Email,Currency,Contact,Link,Image,Active,Is customer,Is manufacturer,Is supplier,Notes,Parts supplied,Parts manufactured,Address count
|
||||
3,Arrow,Arrow Electronics,https://www.arrow.com/,,"70680 Shannon Rapid Apt. 570, 96124, Jenniferport, Arkansas, Holy See (Vatican City State)",,AUD,,,/media/company_images/company_3_img.jpg,True,False,False,True,,60,0,2
|
||||
1,DigiKey,DigiKey Electronics,https://www.digikey.com/,,"04964 Cox View Suite 815, 94832, Wesleyport, Delaware, Bolivia",,USD,,,/media/company_images/company_1_img.jpg,True,False,False,True,,200,0,2
|
||||
41,Future,Electronic components distributor,https://www.futureelectronics.com/,,"Wogan Terrace 79, 20157, Teasdale, Lebanon",,USD,,,/media/company_images/company_41_img.png,True,False,False,True,,60,0,4
|
||||
39,LCSC,Electronic components distributor,https://lcsc.com/,,"77673 Bishop Turnpike, 74969, North Cheryl, Hawaii, Portugal",,USD,,,/media/company_images/company_39_img.webp,True,False,False,True,,60,0,2
|
||||
38,McMaster-Carr,Supplier of mechanical components,https://www.mcmaster.com/,,"Schroeders Avenue 56, 8014, Sylvanite, Cayman Islands",,USD,,,/media/company_images/company_38_img.png,True,False,False,True,,240,0,1
|
||||
2,Mouser,Mouser Electronics,https://mouser.com/,,"Ashford Street 71, 24165, Leland, Jamaica",,AUD,,,/media/company_images/company_2_img.jpg,True,False,False,True,,61,0,2
|
||||
40,Newark,Online distributor of electronic components,https://www.newark.com/,,"Dekoven Court 3, 18301, Emison, Tuvalu",,USD,,,/media/company_images/company_40_img.png,True,False,False,True,,60,0,1
|
||||
36,Paint by Numbers,Supplier of high quality paint,,,"Orient Avenue 59, 18609, Corinne, Alabama, France, Metropolitan",,EUR,Pippy Painter,,/media/company_images/company_36_img.jpg,True,False,False,True,,15,0,1
|
||||
43,PCBWOY,PCB fabricator / supplier,,,"McKibben Street 77, 12370, Russellville, Benin",,USD,,,/media/company_images/company_43_img.png,True,False,False,True,,1,0,2
|
||||
29,Texas Instruments,,https://www.ti.com/,,"264 David Villages, 97718, Lake Michael, New Mexico, Kenya",,USD,,,/media/company_images/company_29_img.jpg,True,False,True,True,,0,1,2
|
||||
44,Wire-E-Coyote,American wire supplier,,,"Fountain Avenue 74, 12115, Gulf, Seychelles",,USD,,,,True,False,False,True,,5,0,3
|
||||
42,Wirey,Supplier of wire,,,"Preston Court 80, 4462, Manila, Russian Federation",,USD,,,/media/company_images/company_42_img.jpg,True,False,False,True,,11,0,2
|
|
64
src/backend/InvenTree/importer/tests.py
Normal file
64
src/backend/InvenTree/importer/tests.py
Normal file
@ -0,0 +1,64 @@
|
||||
"""Unit tests for the 'importer' app."""
|
||||
|
||||
import os
|
||||
|
||||
from django.core.files.base import ContentFile
|
||||
|
||||
from importer.models import DataImportSession
|
||||
from InvenTree.unit_test import InvenTreeTestCase
|
||||
|
||||
|
||||
class ImporterTest(InvenTreeTestCase):
|
||||
"""Basic tests for file imports."""
|
||||
|
||||
def test_import_session(self):
|
||||
"""Test creation of a data import session."""
|
||||
from company.models import Company
|
||||
|
||||
n = Company.objects.count()
|
||||
|
||||
fn = os.path.join(os.path.dirname(__file__), 'test_data', 'companies.csv')
|
||||
|
||||
with open(fn, 'r') as input_file:
|
||||
data = input_file.read()
|
||||
|
||||
session = DataImportSession.objects.create(
|
||||
data_file=ContentFile(data, 'companies.csv'), model_type='company'
|
||||
)
|
||||
|
||||
session.extract_columns()
|
||||
|
||||
self.assertEqual(session.column_mappings.count(), 14)
|
||||
|
||||
# Check some of the field mappings
|
||||
for field, col in [
|
||||
('website', 'Website'),
|
||||
('is_customer', 'Is customer'),
|
||||
('phone', 'Phone number'),
|
||||
('description', 'Company description'),
|
||||
('active', 'Active'),
|
||||
]:
|
||||
self.assertTrue(
|
||||
session.column_mappings.filter(field=field, column=col).exists()
|
||||
)
|
||||
|
||||
# Run the data import
|
||||
session.import_data()
|
||||
self.assertEqual(session.rows.count(), 12)
|
||||
|
||||
# Check that some data has been imported
|
||||
for row in session.rows.all():
|
||||
self.assertIsNotNone(row.data.get('name', None))
|
||||
self.assertTrue(row.valid)
|
||||
|
||||
row.validate(commit=True)
|
||||
self.assertTrue(row.complete)
|
||||
|
||||
self.assertEqual(session.completed_row_count, 12)
|
||||
|
||||
# Check that the new companies have been created
|
||||
self.assertEqual(n + 12, Company.objects.count())
|
||||
|
||||
def test_field_defaults(self):
|
||||
"""Test default field values."""
|
||||
...
|
53
src/backend/InvenTree/importer/validators.py
Normal file
53
src/backend/InvenTree/importer/validators.py
Normal file
@ -0,0 +1,53 @@
|
||||
"""Custom validation routines for the 'importer' app."""
|
||||
|
||||
import json
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# Define maximum limits for imported file data
|
||||
IMPORTER_MAX_FILE_SIZE = 32 * 1024 * 1042
|
||||
IMPORTER_MAX_ROWS = 5000
|
||||
IMPORTER_MAX_COLS = 1000
|
||||
|
||||
|
||||
def validate_data_file(data_file):
|
||||
"""Validate the provided data file."""
|
||||
import importer.operations
|
||||
|
||||
filesize = data_file.size
|
||||
|
||||
if filesize > IMPORTER_MAX_FILE_SIZE:
|
||||
raise ValidationError(_('Data file exceeds maximum size limit'))
|
||||
|
||||
dataset = importer.operations.load_data_file(data_file)
|
||||
|
||||
if not dataset.headers or len(dataset.headers) == 0:
|
||||
raise ValidationError(_('Data file contains no headers'))
|
||||
|
||||
if len(dataset.headers) > IMPORTER_MAX_COLS:
|
||||
raise ValidationError(_('Data file contains too many columns'))
|
||||
|
||||
if len(dataset) > IMPORTER_MAX_ROWS:
|
||||
raise ValidationError(_('Data file contains too many rows'))
|
||||
|
||||
|
||||
def validate_importer_model_type(value):
|
||||
"""Validate that the given model type is supported for importing."""
|
||||
from importer.registry import supported_models
|
||||
|
||||
if value not in supported_models().keys():
|
||||
raise ValidationError(f"Unsupported model type '{value}'")
|
||||
|
||||
|
||||
def validate_field_defaults(value):
|
||||
"""Validate that the provided value is a valid dict."""
|
||||
if value is None:
|
||||
return
|
||||
|
||||
if type(value) is not dict:
|
||||
# OK if we can parse it as JSON
|
||||
try:
|
||||
value = json.loads(value)
|
||||
except json.JSONDecodeError:
|
||||
raise ValidationError(_('Value must be a valid dictionary object'))
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user