2022-06-05 22:56:52 +00:00
|
|
|
"""Tasks for automating certain actions and interacting with InvenTree from the CLI."""
|
|
|
|
|
2021-04-25 01:29:07 +00:00
|
|
|
import json
|
2022-05-20 15:24:51 +00:00
|
|
|
import os
|
2021-11-24 22:07:48 +00:00
|
|
|
import pathlib
|
2021-11-24 23:46:23 +00:00
|
|
|
import re
|
2022-07-31 13:16:58 +00:00
|
|
|
import shutil
|
2023-07-20 00:12:08 +00:00
|
|
|
import subprocess
|
2022-05-20 15:24:51 +00:00
|
|
|
import sys
|
2022-07-27 00:42:34 +00:00
|
|
|
from pathlib import Path
|
2023-05-24 06:34:36 +00:00
|
|
|
from platform import python_version
|
2020-08-21 10:39:43 +00:00
|
|
|
|
2022-05-15 19:01:55 +00:00
|
|
|
from invoke import task
|
2021-04-11 04:05:55 +00:00
|
|
|
|
2021-04-01 13:06:17 +00:00
|
|
|
|
2023-12-13 11:20:50 +00:00
|
|
|
def checkPythonVersion():
|
|
|
|
"""Check that the installed python version meets minimum requirements.
|
|
|
|
|
|
|
|
If the python version is not sufficient, exits with a non-zero exit code.
|
|
|
|
"""
|
|
|
|
REQ_MAJOR = 3
|
|
|
|
REQ_MINOR = 9
|
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
version = sys.version.split(' ')[0]
|
2023-12-13 11:20:50 +00:00
|
|
|
|
|
|
|
valid = True
|
|
|
|
|
|
|
|
if sys.version_info.major < REQ_MAJOR:
|
|
|
|
valid = False
|
|
|
|
|
|
|
|
elif sys.version_info.major == REQ_MAJOR and sys.version_info.minor < REQ_MINOR:
|
|
|
|
valid = False
|
|
|
|
|
|
|
|
if not valid:
|
2024-01-11 00:28:58 +00:00
|
|
|
print(f'The installed python version ({version}) is not supported!')
|
|
|
|
print(f'InvenTree requires Python {REQ_MAJOR}.{REQ_MINOR} or above')
|
2023-12-13 11:20:50 +00:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ in ['__main__', 'tasks']:
|
|
|
|
checkPythonVersion()
|
|
|
|
|
|
|
|
|
2020-08-21 11:08:04 +00:00
|
|
|
def apps():
|
2022-06-05 22:56:52 +00:00
|
|
|
"""Returns a list of installed apps."""
|
2020-08-21 11:08:04 +00:00
|
|
|
return [
|
|
|
|
'build',
|
|
|
|
'common',
|
|
|
|
'company',
|
|
|
|
'order',
|
|
|
|
'part',
|
|
|
|
'report',
|
|
|
|
'stock',
|
2020-10-03 13:45:24 +00:00
|
|
|
'users',
|
2022-05-18 23:02:07 +00:00
|
|
|
'plugin',
|
|
|
|
'InvenTree',
|
2024-04-08 11:55:03 +00:00
|
|
|
'generic',
|
|
|
|
'machine',
|
|
|
|
'web',
|
2020-08-21 11:08:04 +00:00
|
|
|
]
|
|
|
|
|
2021-04-01 13:06:17 +00:00
|
|
|
|
2024-01-15 14:26:57 +00:00
|
|
|
def content_excludes(
|
2024-01-16 10:32:51 +00:00
|
|
|
allow_auth: bool = True,
|
|
|
|
allow_tokens: bool = True,
|
|
|
|
allow_plugins: bool = True,
|
|
|
|
allow_sso: bool = True,
|
2024-01-15 14:26:57 +00:00
|
|
|
):
|
|
|
|
"""Returns a list of content types to exclude from import/export.
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
allow_tokens (bool): Allow tokens to be exported/importe
|
|
|
|
allow_plugins (bool): Allow plugin information to be exported/imported
|
|
|
|
allow_sso (bool): Allow SSO tokens to be exported/imported
|
|
|
|
"""
|
2022-06-11 00:07:57 +00:00
|
|
|
excludes = [
|
2024-01-11 00:28:58 +00:00
|
|
|
'contenttypes',
|
|
|
|
'auth.permission',
|
|
|
|
'error_report.error',
|
|
|
|
'admin.logentry',
|
|
|
|
'django_q.schedule',
|
|
|
|
'django_q.task',
|
|
|
|
'django_q.ormq',
|
|
|
|
'exchange.rate',
|
|
|
|
'exchange.exchangebackend',
|
|
|
|
'common.notificationentry',
|
|
|
|
'common.notificationmessage',
|
|
|
|
'user_sessions.session',
|
2024-05-22 00:17:01 +00:00
|
|
|
'report.labeloutput',
|
|
|
|
'report.reportoutput',
|
2022-06-11 00:07:57 +00:00
|
|
|
]
|
|
|
|
|
2024-01-16 10:32:51 +00:00
|
|
|
# Optionally exclude user auth data
|
|
|
|
if not allow_auth:
|
|
|
|
excludes.append('auth.group')
|
|
|
|
excludes.append('auth.user')
|
|
|
|
|
2024-01-15 14:26:57 +00:00
|
|
|
# Optionally exclude user token information
|
|
|
|
if not allow_tokens:
|
|
|
|
excludes.append('users.apitoken')
|
|
|
|
|
|
|
|
# Optionally exclude plugin information
|
|
|
|
if not allow_plugins:
|
|
|
|
excludes.append('plugin.pluginconfig')
|
|
|
|
excludes.append('plugin.pluginsetting')
|
|
|
|
|
|
|
|
# Optionally exclude SSO application information
|
|
|
|
if not allow_sso:
|
|
|
|
excludes.append('socialaccount.socialapp')
|
2024-02-05 04:23:41 +00:00
|
|
|
excludes.append('socialaccount.socialtoken')
|
2024-01-15 14:26:57 +00:00
|
|
|
|
2024-02-18 12:26:01 +00:00
|
|
|
return ' '.join([f'--exclude {e}' for e in excludes])
|
2022-06-11 00:07:57 +00:00
|
|
|
|
|
|
|
|
2022-07-27 00:42:34 +00:00
|
|
|
def localDir() -> Path:
|
2022-06-01 15:37:39 +00:00
|
|
|
"""Returns the directory of *THIS* file.
|
|
|
|
|
2020-08-21 10:39:43 +00:00
|
|
|
Used to ensure that the various scripts always run
|
|
|
|
in the correct directory.
|
|
|
|
"""
|
2022-07-27 00:42:34 +00:00
|
|
|
return Path(__file__).parent.resolve()
|
2020-08-21 10:39:43 +00:00
|
|
|
|
2021-04-01 13:06:17 +00:00
|
|
|
|
2020-08-21 10:39:43 +00:00
|
|
|
def managePyDir():
|
2022-06-11 00:07:57 +00:00
|
|
|
"""Returns the directory of the manage.py file."""
|
2024-04-03 01:16:59 +00:00
|
|
|
return localDir().joinpath('src', 'backend', 'InvenTree')
|
2020-08-21 10:39:43 +00:00
|
|
|
|
2021-04-01 13:06:17 +00:00
|
|
|
|
2020-08-21 10:39:43 +00:00
|
|
|
def managePyPath():
|
2022-06-11 00:07:57 +00:00
|
|
|
"""Return the path of the manage.py file."""
|
2022-07-27 00:42:34 +00:00
|
|
|
return managePyDir().joinpath('manage.py')
|
2020-08-21 10:39:43 +00:00
|
|
|
|
2021-04-01 13:06:17 +00:00
|
|
|
|
2022-06-05 22:56:52 +00:00
|
|
|
def manage(c, cmd, pty: bool = False):
|
2022-06-01 15:37:39 +00:00
|
|
|
"""Runs a given command against django's "manage.py" script.
|
2020-08-21 10:39:43 +00:00
|
|
|
|
|
|
|
Args:
|
2022-06-05 22:56:52 +00:00
|
|
|
c: Command line context.
|
|
|
|
cmd: Django command to run.
|
|
|
|
pty (bool, optional): Run an interactive session. Defaults to False.
|
2020-08-21 10:39:43 +00:00
|
|
|
"""
|
2024-01-11 00:28:58 +00:00
|
|
|
c.run(
|
|
|
|
'cd "{path}" && python3 manage.py {cmd}'.format(path=managePyDir(), cmd=cmd),
|
|
|
|
pty=pty,
|
|
|
|
)
|
2020-08-21 10:39:43 +00:00
|
|
|
|
2022-05-20 11:37:12 +00:00
|
|
|
|
2023-07-18 12:45:49 +00:00
|
|
|
def yarn(c, cmd, pty: bool = False):
|
|
|
|
"""Runs a given command against the yarn package manager.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
c: Command line context.
|
|
|
|
cmd: Yarn command to run.
|
|
|
|
pty (bool, optional): Run an interactive session. Defaults to False.
|
|
|
|
"""
|
2024-04-03 01:16:59 +00:00
|
|
|
path = localDir().joinpath('src').joinpath('frontend')
|
2023-07-18 12:45:49 +00:00
|
|
|
c.run(f'cd "{path}" && {cmd}', pty=pty)
|
|
|
|
|
|
|
|
|
2023-07-20 00:12:08 +00:00
|
|
|
def node_available(versions: bool = False, bypass_yarn: bool = False):
|
|
|
|
"""Checks if the frontend environment (ie node and yarn in bash) is available."""
|
2024-01-11 00:28:58 +00:00
|
|
|
|
2023-07-20 00:12:08 +00:00
|
|
|
def ret(val, val0=None, val1=None):
|
|
|
|
if versions:
|
|
|
|
return val, val0, val1
|
|
|
|
return val
|
|
|
|
|
|
|
|
def check(cmd):
|
|
|
|
try:
|
2024-01-11 00:28:58 +00:00
|
|
|
return str(
|
|
|
|
subprocess.check_output([cmd], stderr=subprocess.STDOUT, shell=True),
|
|
|
|
encoding='utf-8',
|
|
|
|
).strip()
|
2023-07-20 00:12:08 +00:00
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
return None
|
|
|
|
except FileNotFoundError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
yarn_version = check('yarn --version')
|
|
|
|
node_version = check('node --version')
|
|
|
|
|
|
|
|
# Either yarn is available or we don't care about yarn
|
|
|
|
yarn_passes = bypass_yarn or yarn_version
|
|
|
|
|
|
|
|
# Print a warning if node is available but yarn is not
|
|
|
|
if node_version and not yarn_passes:
|
2024-01-11 00:28:58 +00:00
|
|
|
print(
|
|
|
|
'Node is available but yarn is not. Install yarn if you wish to build the frontend.'
|
|
|
|
)
|
2023-07-20 00:12:08 +00:00
|
|
|
|
|
|
|
# Return the result
|
2023-07-22 12:19:19 +00:00
|
|
|
return ret(yarn_passes and node_version, node_version, yarn_version)
|
2023-07-20 00:12:08 +00:00
|
|
|
|
|
|
|
|
2023-04-18 13:08:36 +00:00
|
|
|
def check_file_existance(filename: str, overwrite: bool = False):
|
|
|
|
"""Checks if a file exists and asks the user if it should be overwritten.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
filename (str): Name of the file to check.
|
|
|
|
overwrite (bool, optional): Overwrite the file without asking. Defaults to False.
|
|
|
|
"""
|
|
|
|
if Path(filename).is_file() and overwrite is False:
|
2024-01-11 00:28:58 +00:00
|
|
|
response = input(
|
|
|
|
'Warning: file already exists. Do you want to overwrite? [y/N]: '
|
|
|
|
)
|
2023-04-18 13:08:36 +00:00
|
|
|
response = str(response).strip().lower()
|
|
|
|
|
|
|
|
if response not in ['y', 'yes']:
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Cancelled export operation')
|
2023-04-18 13:08:36 +00:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
2022-06-11 00:07:57 +00:00
|
|
|
# Install tasks
|
2024-02-27 14:06:19 +00:00
|
|
|
@task(help={'uv': 'Use UV (experimental package manager)'})
|
|
|
|
def plugins(c, uv=False):
|
2022-06-11 00:07:57 +00:00
|
|
|
"""Installs all plugins as specified in 'plugins.txt'."""
|
2024-04-03 01:16:59 +00:00
|
|
|
from src.backend.InvenTree.InvenTree.config import get_plugin_file
|
2022-01-06 03:20:26 +00:00
|
|
|
|
|
|
|
plugin_file = get_plugin_file()
|
|
|
|
|
|
|
|
print(f"Installing plugin packages from '{plugin_file}'")
|
2022-01-06 01:25:07 +00:00
|
|
|
|
|
|
|
# Install the plugins
|
2024-02-27 14:06:19 +00:00
|
|
|
if not uv:
|
2024-02-22 22:55:19 +00:00
|
|
|
c.run(f"pip3 install --disable-pip-version-check -U -r '{plugin_file}'")
|
|
|
|
else:
|
|
|
|
c.run('pip3 install --no-cache-dir --disable-pip-version-check uv')
|
|
|
|
c.run(f"uv pip install -r '{plugin_file}'")
|
2022-01-06 01:25:07 +00:00
|
|
|
|
2022-05-20 11:37:12 +00:00
|
|
|
|
2024-02-27 14:06:19 +00:00
|
|
|
@task(help={'uv': 'Use UV package manager (experimental)'})
|
|
|
|
def install(c, uv=False):
|
2022-06-11 00:07:57 +00:00
|
|
|
"""Installs required python packages."""
|
2024-05-14 23:19:35 +00:00
|
|
|
INSTALL_FILE = 'src/backend/requirements.txt'
|
|
|
|
|
|
|
|
print(f"Installing required python packages from '{INSTALL_FILE}'")
|
|
|
|
|
|
|
|
if not Path(INSTALL_FILE).is_file():
|
|
|
|
raise FileNotFoundError(f"Requirements file '{INSTALL_FILE}' not found")
|
2022-01-06 02:31:04 +00:00
|
|
|
|
|
|
|
# Install required Python packages with PIP
|
2024-02-27 14:06:19 +00:00
|
|
|
if not uv:
|
2024-04-29 01:04:45 +00:00
|
|
|
c.run(
|
|
|
|
'pip3 install --no-cache-dir --disable-pip-version-check -U pip setuptools'
|
|
|
|
)
|
2024-02-22 22:55:19 +00:00
|
|
|
c.run(
|
2024-05-14 23:19:35 +00:00
|
|
|
f'pip3 install --no-cache-dir --disable-pip-version-check -U --require-hashes -r {INSTALL_FILE}'
|
2024-02-22 22:55:19 +00:00
|
|
|
)
|
|
|
|
else:
|
2024-04-29 01:04:45 +00:00
|
|
|
c.run(
|
|
|
|
'pip3 install --no-cache-dir --disable-pip-version-check -U uv setuptools'
|
|
|
|
)
|
2024-05-14 23:19:35 +00:00
|
|
|
c.run(f'uv pip install -U --require-hashes -r {INSTALL_FILE}')
|
2022-01-06 02:31:04 +00:00
|
|
|
|
2024-02-27 14:06:19 +00:00
|
|
|
# Run plugins install
|
|
|
|
plugins(c, uv=uv)
|
|
|
|
|
2024-04-03 23:31:20 +00:00
|
|
|
# Compile license information
|
|
|
|
lic_path = managePyDir().joinpath('InvenTree', 'licenses.txt')
|
|
|
|
c.run(
|
|
|
|
f'pip-licenses --format=json --with-license-file --no-license-path > {lic_path}'
|
|
|
|
)
|
|
|
|
|
2022-05-20 11:37:12 +00:00
|
|
|
|
2022-07-29 04:58:54 +00:00
|
|
|
@task(help={'tests': 'Set up test dataset at the end'})
|
|
|
|
def setup_dev(c, tests=False):
|
2023-06-03 14:04:52 +00:00
|
|
|
"""Sets up everything needed for the dev environment."""
|
2024-04-03 01:16:59 +00:00
|
|
|
print("Installing required python packages from 'src/backend/requirements-dev.txt'")
|
2022-05-15 22:15:25 +00:00
|
|
|
|
|
|
|
# Install required Python packages with PIP
|
2024-04-23 07:15:52 +00:00
|
|
|
c.run('pip3 install -U --require-hashes -r src/backend/requirements-dev.txt')
|
2022-05-15 22:15:25 +00:00
|
|
|
|
|
|
|
# Install pre-commit hook
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Installing pre-commit for checks before git commits...')
|
2022-05-15 22:15:25 +00:00
|
|
|
c.run('pre-commit install')
|
|
|
|
|
2022-05-15 22:19:37 +00:00
|
|
|
# Update all the hooks
|
|
|
|
c.run('pre-commit autoupdate')
|
2024-01-11 00:28:58 +00:00
|
|
|
print('pre-commit set up is done...')
|
2022-07-29 04:58:54 +00:00
|
|
|
|
|
|
|
# Set up test-data if flag is set
|
|
|
|
if tests:
|
|
|
|
setup_test(c)
|
2022-05-15 22:19:37 +00:00
|
|
|
|
2022-05-20 11:37:12 +00:00
|
|
|
|
2022-06-11 00:07:57 +00:00
|
|
|
# Setup / maintenance tasks
|
2020-08-21 11:24:02 +00:00
|
|
|
@task
|
|
|
|
def superuser(c):
|
2022-06-01 15:37:39 +00:00
|
|
|
"""Create a superuser/admin account for the database."""
|
2020-08-28 19:38:16 +00:00
|
|
|
manage(c, 'createsuperuser', pty=True)
|
2020-08-21 11:24:02 +00:00
|
|
|
|
2021-08-17 23:52:27 +00:00
|
|
|
|
2021-06-18 11:53:15 +00:00
|
|
|
@task
|
2021-10-04 21:05:26 +00:00
|
|
|
def rebuild_models(c):
|
2022-06-11 00:07:57 +00:00
|
|
|
"""Rebuild database models with MPTT structures."""
|
2024-01-11 00:28:58 +00:00
|
|
|
manage(c, 'rebuild_models', pty=True)
|
2021-06-18 11:53:15 +00:00
|
|
|
|
2021-11-19 20:50:41 +00:00
|
|
|
|
2021-10-04 21:05:26 +00:00
|
|
|
@task
|
|
|
|
def rebuild_thumbnails(c):
|
2022-06-11 00:07:57 +00:00
|
|
|
"""Rebuild missing image thumbnails."""
|
2024-01-11 00:28:58 +00:00
|
|
|
manage(c, 'rebuild_thumbnails', pty=True)
|
2021-08-17 23:52:27 +00:00
|
|
|
|
2021-11-19 20:50:41 +00:00
|
|
|
|
2021-07-31 23:06:17 +00:00
|
|
|
@task
|
|
|
|
def clean_settings(c):
|
2022-06-11 00:07:57 +00:00
|
|
|
"""Clean the setting tables of old settings."""
|
2024-01-11 00:28:58 +00:00
|
|
|
manage(c, 'clean_settings')
|
2021-07-31 23:06:17 +00:00
|
|
|
|
2021-11-19 20:50:41 +00:00
|
|
|
|
2023-06-03 14:04:52 +00:00
|
|
|
@task(help={'mail': "mail of the user who's MFA should be disabled"})
|
2021-12-11 22:07:37 +00:00
|
|
|
def remove_mfa(c, mail=''):
|
2022-06-11 00:07:57 +00:00
|
|
|
"""Remove MFA for a user."""
|
2021-12-11 22:07:37 +00:00
|
|
|
if not mail:
|
|
|
|
print('You must provide a users mail')
|
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
manage(c, f'remove_mfa {mail}')
|
2021-12-11 22:07:37 +00:00
|
|
|
|
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
@task(help={'frontend': 'Build the frontend'})
|
2023-07-20 00:12:08 +00:00
|
|
|
def static(c, frontend=False):
|
2022-06-01 15:37:39 +00:00
|
|
|
"""Copies required static files to the STATIC_ROOT directory, as per Django requirements."""
|
2024-01-11 00:28:58 +00:00
|
|
|
manage(c, 'prerender')
|
2024-04-15 15:05:56 +00:00
|
|
|
|
2023-07-20 00:12:08 +00:00
|
|
|
if frontend and node_available():
|
2024-04-15 15:05:56 +00:00
|
|
|
frontend_trans(c)
|
2023-07-20 00:12:08 +00:00
|
|
|
frontend_build(c)
|
2024-02-01 00:03:04 +00:00
|
|
|
|
|
|
|
print('Collecting static files...')
|
2024-02-27 14:06:19 +00:00
|
|
|
manage(c, 'collectstatic --no-input --clear --verbosity 0')
|
2020-08-21 11:08:04 +00:00
|
|
|
|
|
|
|
|
2021-08-19 21:36:54 +00:00
|
|
|
@task
|
|
|
|
def translate_stats(c):
|
2022-06-01 15:37:39 +00:00
|
|
|
"""Collect translation stats.
|
|
|
|
|
2021-08-19 21:36:54 +00:00
|
|
|
The file generated from this is needed for the UI.
|
|
|
|
"""
|
2023-01-09 20:54:25 +00:00
|
|
|
# Recompile the translation files (.mo)
|
|
|
|
# We do not run 'invoke translate' here, as that will touch the source (.po) files too!
|
|
|
|
try:
|
|
|
|
manage(c, 'compilemessages', pty=True)
|
|
|
|
except Exception:
|
2024-01-11 00:28:58 +00:00
|
|
|
print('WARNING: Translation files could not be compiled:')
|
2023-01-09 20:54:25 +00:00
|
|
|
|
2024-04-03 01:16:59 +00:00
|
|
|
path = Path('src', 'backend', 'InvenTree', 'script', 'translation_stats.py')
|
2022-07-08 00:41:42 +00:00
|
|
|
c.run(f'python3 {path}')
|
2021-08-19 21:36:54 +00:00
|
|
|
|
|
|
|
|
2023-07-19 00:43:40 +00:00
|
|
|
@task(post=[translate_stats])
|
2024-01-14 02:29:36 +00:00
|
|
|
def translate(c, ignore_static=False, no_frontend=False):
|
2022-06-11 00:07:57 +00:00
|
|
|
"""Rebuild translation source files. Advanced use only!
|
2020-08-21 11:13:28 +00:00
|
|
|
|
2022-05-02 00:53:43 +00:00
|
|
|
Note: This command should not be used on a local install,
|
|
|
|
it is performed as part of the InvenTree translation toolchain.
|
2020-08-21 11:13:28 +00:00
|
|
|
"""
|
2024-01-14 02:29:36 +00:00
|
|
|
# Translate applicable .py / .html / .js files
|
2024-01-11 00:28:58 +00:00
|
|
|
manage(c, 'makemessages --all -e py,html,js --no-wrap')
|
|
|
|
manage(c, 'compilemessages')
|
2020-08-21 11:13:28 +00:00
|
|
|
|
2024-01-14 02:29:36 +00:00
|
|
|
if not no_frontend and node_available():
|
2023-07-23 22:31:19 +00:00
|
|
|
frontend_install(c)
|
2023-07-20 23:00:37 +00:00
|
|
|
frontend_trans(c)
|
|
|
|
frontend_build(c)
|
2023-07-20 00:12:08 +00:00
|
|
|
|
2023-07-20 23:00:37 +00:00
|
|
|
# Update static files
|
2024-01-14 02:29:36 +00:00
|
|
|
if not ignore_static:
|
|
|
|
static(c)
|
2023-07-19 00:43:40 +00:00
|
|
|
|
2021-08-17 08:22:07 +00:00
|
|
|
|
2024-04-22 13:01:10 +00:00
|
|
|
@task(
|
|
|
|
help={
|
|
|
|
'clean': 'Clean up old backup files',
|
|
|
|
'path': 'Specify path for generated backup files (leave blank for default path)',
|
|
|
|
}
|
|
|
|
)
|
|
|
|
def backup(c, clean=False, path=None):
|
2022-10-16 13:09:31 +00:00
|
|
|
"""Backup the database and media files."""
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Backing up InvenTree database...')
|
2022-10-16 13:09:31 +00:00
|
|
|
|
2024-04-22 13:01:10 +00:00
|
|
|
cmd = '--noinput --compress -v 2'
|
2022-10-16 13:09:31 +00:00
|
|
|
|
2024-04-22 13:01:10 +00:00
|
|
|
if path:
|
|
|
|
cmd += f' -O {path}'
|
|
|
|
|
|
|
|
if clean:
|
|
|
|
cmd += ' --clean'
|
|
|
|
|
|
|
|
manage(c, f'dbbackup {cmd}')
|
|
|
|
print('Backing up InvenTree media files...')
|
|
|
|
manage(c, f'mediabackup {cmd}')
|
|
|
|
|
|
|
|
|
|
|
|
@task(
|
|
|
|
help={
|
|
|
|
'path': 'Specify path to locate backup files (leave blank for default path)',
|
|
|
|
'db_file': 'Specify filename of compressed database archive (leave blank to use most recent backup)',
|
|
|
|
'media_file': 'Specify filename of compressed media archive (leave blank to use most recent backup)',
|
|
|
|
'ignore_media': 'Do not import media archive (database restore only)',
|
|
|
|
'ignore_database': 'Do not import database archive (media restore only)',
|
|
|
|
}
|
|
|
|
)
|
|
|
|
def restore(
|
|
|
|
c,
|
|
|
|
path=None,
|
|
|
|
db_file=None,
|
|
|
|
media_file=None,
|
|
|
|
ignore_media=False,
|
|
|
|
ignore_database=False,
|
|
|
|
):
|
2022-10-16 13:09:31 +00:00
|
|
|
"""Restore the database and media files."""
|
2024-04-22 13:01:10 +00:00
|
|
|
base_cmd = '--no-input --uncompress -v 2'
|
|
|
|
|
|
|
|
if path:
|
|
|
|
base_cmd += f' -I {path}'
|
|
|
|
|
|
|
|
if ignore_database:
|
|
|
|
print('Skipping database archive...')
|
|
|
|
else:
|
|
|
|
print('Restoring InvenTree database')
|
|
|
|
cmd = f'dbrestore {base_cmd}'
|
|
|
|
|
|
|
|
if db_file:
|
|
|
|
cmd += f' -i {db_file}'
|
|
|
|
|
|
|
|
manage(c, cmd)
|
|
|
|
|
|
|
|
if ignore_media:
|
|
|
|
print('Skipping media restore...')
|
|
|
|
else:
|
|
|
|
print('Restoring InvenTree media files')
|
|
|
|
cmd = f'mediarestore {base_cmd}'
|
|
|
|
|
|
|
|
if media_file:
|
|
|
|
cmd += f' -i {media_file}'
|
|
|
|
|
|
|
|
manage(c, cmd)
|
2022-10-16 13:09:31 +00:00
|
|
|
|
|
|
|
|
2023-01-09 20:54:25 +00:00
|
|
|
@task(post=[rebuild_models, rebuild_thumbnails])
|
2022-06-11 00:07:57 +00:00
|
|
|
def migrate(c):
|
|
|
|
"""Performs database migrations.
|
|
|
|
|
|
|
|
This is a critical step if the database schema have been altered!
|
|
|
|
"""
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Running InvenTree database migrations...')
|
|
|
|
print('========================================')
|
2022-06-11 00:07:57 +00:00
|
|
|
|
2024-02-08 01:47:49 +00:00
|
|
|
# Run custom management command which wraps migrations in "maintenance mode"
|
2024-02-18 12:26:01 +00:00
|
|
|
manage(c, 'makemigrations')
|
2024-02-08 01:47:49 +00:00
|
|
|
manage(c, 'runmigrations', pty=True)
|
2024-01-11 00:28:58 +00:00
|
|
|
manage(c, 'migrate --run-syncdb')
|
2022-06-11 00:07:57 +00:00
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
print('========================================')
|
|
|
|
print('InvenTree database migrations completed!')
|
2022-06-11 00:07:57 +00:00
|
|
|
|
|
|
|
|
2023-01-09 20:54:25 +00:00
|
|
|
@task(
|
2024-01-16 20:10:42 +00:00
|
|
|
post=[clean_settings, translate_stats],
|
2023-01-09 20:54:25 +00:00
|
|
|
help={
|
2023-07-20 00:12:08 +00:00
|
|
|
'skip_backup': 'Skip database backup step (advanced users)',
|
|
|
|
'frontend': 'Force frontend compilation/download step (ignores INVENTREE_DOCKER)',
|
2024-01-11 00:28:58 +00:00
|
|
|
'no_frontend': 'Skip frontend compilation/download step',
|
2024-01-16 20:10:42 +00:00
|
|
|
'skip_static': 'Skip static file collection step',
|
2024-02-27 14:06:19 +00:00
|
|
|
'uv': 'Use UV (experimental package manager)',
|
2024-01-11 00:28:58 +00:00
|
|
|
},
|
2023-01-09 20:54:25 +00:00
|
|
|
)
|
2024-01-16 20:10:42 +00:00
|
|
|
def update(
|
|
|
|
c,
|
|
|
|
skip_backup: bool = False,
|
|
|
|
frontend: bool = False,
|
|
|
|
no_frontend: bool = False,
|
|
|
|
skip_static: bool = False,
|
2024-02-27 14:06:19 +00:00
|
|
|
uv: bool = False,
|
2024-01-16 20:10:42 +00:00
|
|
|
):
|
2022-06-01 15:37:39 +00:00
|
|
|
"""Update InvenTree installation.
|
2021-08-17 08:22:07 +00:00
|
|
|
|
|
|
|
This command should be invoked after source code has been updated,
|
|
|
|
e.g. downloading new code from GitHub.
|
|
|
|
|
|
|
|
The following tasks are performed, in order:
|
|
|
|
|
|
|
|
- install
|
2023-01-09 20:54:25 +00:00
|
|
|
- backup (optional)
|
2021-08-17 08:22:07 +00:00
|
|
|
- migrate
|
2024-01-16 20:10:42 +00:00
|
|
|
- frontend_compile or frontend_download (optional)
|
|
|
|
- static (optional)
|
2021-08-17 08:22:07 +00:00
|
|
|
- clean_settings
|
2022-06-11 00:07:57 +00:00
|
|
|
- translate_stats
|
2021-08-17 08:22:07 +00:00
|
|
|
"""
|
2023-01-09 20:54:25 +00:00
|
|
|
# Ensure required components are installed
|
2024-02-27 14:06:19 +00:00
|
|
|
install(c, uv=uv)
|
2023-01-09 20:54:25 +00:00
|
|
|
|
|
|
|
if not skip_backup:
|
|
|
|
backup(c)
|
|
|
|
|
|
|
|
# Perform database migrations
|
|
|
|
migrate(c)
|
2021-08-17 08:22:07 +00:00
|
|
|
|
2023-07-20 00:12:08 +00:00
|
|
|
# Stop here if we are not building/downloading the frontend
|
|
|
|
# If:
|
|
|
|
# - INVENTREE_DOCKER is set (by the docker image eg.) and not overridden by `--frontend` flag
|
|
|
|
# - `--no-frontend` flag is set
|
2024-01-10 11:41:22 +00:00
|
|
|
if (os.environ.get('INVENTREE_DOCKER', False) and not frontend) or no_frontend:
|
2024-02-01 01:25:24 +00:00
|
|
|
print('Skipping frontend update!')
|
|
|
|
frontend = False
|
|
|
|
no_frontend = True
|
2023-07-20 00:12:08 +00:00
|
|
|
else:
|
2024-02-01 01:25:24 +00:00
|
|
|
print('Updating frontend...')
|
2024-02-01 00:03:04 +00:00
|
|
|
# Decide if we should compile the frontend or try to download it
|
|
|
|
if node_available(bypass_yarn=True):
|
|
|
|
frontend_compile(c)
|
|
|
|
else:
|
|
|
|
frontend_download(c)
|
2023-07-18 12:45:49 +00:00
|
|
|
|
2024-01-16 20:10:42 +00:00
|
|
|
if not skip_static:
|
2024-02-01 00:03:04 +00:00
|
|
|
static(c, frontend=not no_frontend)
|
2024-01-16 20:10:42 +00:00
|
|
|
|
2021-08-17 08:22:07 +00:00
|
|
|
|
2022-06-11 00:07:57 +00:00
|
|
|
# Data tasks
|
2024-01-11 00:28:58 +00:00
|
|
|
@task(
|
|
|
|
help={
|
|
|
|
'filename': "Output filename (default = 'data.json')",
|
2024-01-15 14:26:57 +00:00
|
|
|
'overwrite': 'Overwrite existing files without asking first (default = False)',
|
|
|
|
'include_permissions': 'Include user and group permissions in the output file (default = False)',
|
|
|
|
'include_tokens': 'Include API tokens in the output file (default = False)',
|
2024-01-16 10:32:51 +00:00
|
|
|
'exclude_plugins': 'Exclude plugin data from the output file (default = False)',
|
2024-01-15 14:26:57 +00:00
|
|
|
'include_sso': 'Include SSO token data in the output file (default = False)',
|
2024-01-16 10:32:51 +00:00
|
|
|
'retain_temp': 'Retain temporary files (containing permissions) at end of process (default = False)',
|
2024-01-11 00:28:58 +00:00
|
|
|
}
|
|
|
|
)
|
|
|
|
def export_records(
|
|
|
|
c,
|
|
|
|
filename='data.json',
|
|
|
|
overwrite=False,
|
|
|
|
include_permissions=False,
|
2024-01-15 14:26:57 +00:00
|
|
|
include_tokens=False,
|
2024-01-16 10:32:51 +00:00
|
|
|
exclude_plugins=False,
|
2024-01-15 14:26:57 +00:00
|
|
|
include_sso=False,
|
2024-01-16 10:32:51 +00:00
|
|
|
retain_temp=False,
|
2024-01-11 00:28:58 +00:00
|
|
|
):
|
2022-06-09 01:47:29 +00:00
|
|
|
"""Export all database records to a file.
|
|
|
|
|
|
|
|
Write data to the file defined by filename.
|
|
|
|
If --overwrite is not set, the user will be prompted about overwriting an existing files.
|
|
|
|
If --include-permissions is not set, the file defined by filename will have permissions specified for a user or group removed.
|
|
|
|
If --delete-temp is not set, the temporary file (which includes permissions) will not be deleted. This file is named filename.tmp
|
|
|
|
|
|
|
|
For historical reasons, calling this function without any arguments will thus result in two files:
|
|
|
|
- data.json: does not include permissions
|
|
|
|
- data.json.tmp: includes permissions
|
|
|
|
|
|
|
|
If you want the script to overwrite any existing files without asking, add argument -o / --overwrite.
|
|
|
|
|
|
|
|
If you only want one file, add argument - d / --delete-temp.
|
|
|
|
|
|
|
|
If you want only one file, with permissions, then additionally add argument -i / --include-permissions
|
|
|
|
"""
|
2020-11-12 02:31:27 +00:00
|
|
|
# Get an absolute path to the file
|
|
|
|
if not os.path.isabs(filename):
|
2022-07-27 00:42:34 +00:00
|
|
|
filename = localDir().joinpath(filename).resolve()
|
2020-11-12 02:31:27 +00:00
|
|
|
|
|
|
|
print(f"Exporting database records to file '{filename}'")
|
|
|
|
|
2023-04-18 13:08:36 +00:00
|
|
|
check_file_existance(filename, overwrite)
|
2020-11-12 02:31:27 +00:00
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
tmpfile = f'{filename}.tmp'
|
2020-11-12 02:31:27 +00:00
|
|
|
|
2024-01-15 14:26:57 +00:00
|
|
|
excludes = content_excludes(
|
|
|
|
allow_tokens=include_tokens,
|
2024-01-16 10:32:51 +00:00
|
|
|
allow_plugins=not exclude_plugins,
|
2024-01-15 14:26:57 +00:00
|
|
|
allow_sso=include_sso,
|
|
|
|
)
|
|
|
|
|
2024-01-16 10:32:51 +00:00
|
|
|
cmd = f"dumpdata --natural-foreign --indent 2 --output '{tmpfile}' {excludes}"
|
2021-04-25 01:29:07 +00:00
|
|
|
|
|
|
|
# Dump data to temporary file
|
2020-11-12 05:10:00 +00:00
|
|
|
manage(c, cmd, pty=True)
|
2020-11-12 02:31:27 +00:00
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Running data post-processing step...')
|
2021-04-25 01:41:48 +00:00
|
|
|
|
2021-04-25 02:07:58 +00:00
|
|
|
# Post-process the file, to remove any "permissions" specified for a user or group
|
2024-01-11 00:28:58 +00:00
|
|
|
with open(tmpfile, 'r') as f_in:
|
2021-04-25 01:29:07 +00:00
|
|
|
data = json.loads(f_in.read())
|
|
|
|
|
2024-02-07 12:26:51 +00:00
|
|
|
data_out = []
|
|
|
|
|
2022-06-09 01:47:29 +00:00
|
|
|
if include_permissions is False:
|
|
|
|
for entry in data:
|
2024-02-07 12:26:51 +00:00
|
|
|
model_name = entry.get('model', None)
|
|
|
|
|
|
|
|
# Ignore any temporary settings (start with underscore)
|
|
|
|
if model_name in ['common.inventreesetting', 'common.inventreeusersetting']:
|
|
|
|
if entry['fields'].get('key', '').startswith('_'):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if model_name == 'auth.group':
|
|
|
|
entry['fields']['permissions'] = []
|
|
|
|
|
|
|
|
if model_name == 'auth.user':
|
|
|
|
entry['fields']['user_permissions'] = []
|
2021-04-25 01:29:07 +00:00
|
|
|
|
2024-02-07 12:26:51 +00:00
|
|
|
data_out.append(entry)
|
2021-04-25 01:29:07 +00:00
|
|
|
|
|
|
|
# Write the processed data to file
|
2024-01-11 00:28:58 +00:00
|
|
|
with open(filename, 'w') as f_out:
|
2024-02-07 12:26:51 +00:00
|
|
|
f_out.write(json.dumps(data_out, indent=2))
|
2021-04-25 01:29:07 +00:00
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Data export completed')
|
2021-04-25 02:07:58 +00:00
|
|
|
|
2024-01-16 10:32:51 +00:00
|
|
|
if not retain_temp:
|
|
|
|
print('Removing temporary files')
|
2022-06-09 01:47:29 +00:00
|
|
|
os.remove(tmpfile)
|
|
|
|
|
2021-04-25 01:29:07 +00:00
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
@task(
|
2024-01-16 10:32:51 +00:00
|
|
|
help={
|
|
|
|
'filename': 'Input filename',
|
|
|
|
'clear': 'Clear existing data before import',
|
|
|
|
'retain_temp': 'Retain temporary files at end of process (default = False)',
|
|
|
|
},
|
2024-01-11 00:28:58 +00:00
|
|
|
post=[rebuild_models, rebuild_thumbnails],
|
|
|
|
)
|
2024-01-16 10:32:51 +00:00
|
|
|
def import_records(
|
|
|
|
c, filename='data.json', clear: bool = False, retain_temp: bool = False
|
|
|
|
):
|
2022-06-11 00:07:57 +00:00
|
|
|
"""Import database records from a file."""
|
2020-11-12 02:31:27 +00:00
|
|
|
# Get an absolute path to the supplied filename
|
|
|
|
if not os.path.isabs(filename):
|
2022-07-27 00:42:34 +00:00
|
|
|
filename = localDir().joinpath(filename)
|
2020-11-12 02:31:27 +00:00
|
|
|
|
|
|
|
if not os.path.exists(filename):
|
|
|
|
print(f"Error: File '{filename}' does not exist")
|
2020-11-12 05:10:00 +00:00
|
|
|
sys.exit(1)
|
2020-11-12 02:31:27 +00:00
|
|
|
|
2022-05-02 00:45:26 +00:00
|
|
|
if clear:
|
|
|
|
delete_data(c, force=True)
|
|
|
|
|
2020-11-12 02:31:27 +00:00
|
|
|
print(f"Importing database records from '{filename}'")
|
|
|
|
|
2024-01-16 10:32:51 +00:00
|
|
|
# We need to load 'auth' data (users / groups) *first*
|
|
|
|
# This is due to the users.owner model, which has a ContentType foreign key
|
|
|
|
authfile = f'{filename}.auth.json'
|
|
|
|
|
2021-04-25 02:07:58 +00:00
|
|
|
# Pre-process the data, to remove any "permissions" specified for a user or group
|
2024-01-16 10:32:51 +00:00
|
|
|
datafile = f'{filename}.data.json'
|
2021-04-25 02:07:58 +00:00
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
with open(filename, 'r') as f_in:
|
2024-01-16 10:32:51 +00:00
|
|
|
try:
|
|
|
|
data = json.loads(f_in.read())
|
|
|
|
except json.JSONDecodeError as exc:
|
|
|
|
print(f'Error: Failed to decode JSON file: {exc}')
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
auth_data = []
|
|
|
|
load_data = []
|
2021-04-25 02:07:58 +00:00
|
|
|
|
|
|
|
for entry in data:
|
2024-01-11 00:28:58 +00:00
|
|
|
if 'model' in entry:
|
2021-04-25 02:07:58 +00:00
|
|
|
# Clear out any permissions specified for a group
|
2024-01-11 00:28:58 +00:00
|
|
|
if entry['model'] == 'auth.group':
|
|
|
|
entry['fields']['permissions'] = []
|
2021-04-25 02:07:58 +00:00
|
|
|
|
|
|
|
# Clear out any permissions specified for a user
|
2024-01-11 00:28:58 +00:00
|
|
|
if entry['model'] == 'auth.user':
|
|
|
|
entry['fields']['user_permissions'] = []
|
2021-04-25 02:07:58 +00:00
|
|
|
|
2024-01-16 10:32:51 +00:00
|
|
|
# Save auth data for later
|
|
|
|
if entry['model'].startswith('auth.'):
|
|
|
|
auth_data.append(entry)
|
|
|
|
else:
|
|
|
|
load_data.append(entry)
|
|
|
|
else:
|
|
|
|
print('Warning: Invalid entry in data file')
|
|
|
|
print(entry)
|
|
|
|
|
|
|
|
# Write the auth file data
|
|
|
|
with open(authfile, 'w') as f_out:
|
|
|
|
f_out.write(json.dumps(auth_data, indent=2))
|
|
|
|
|
2021-04-25 02:07:58 +00:00
|
|
|
# Write the processed data to the tmp file
|
2024-01-16 10:32:51 +00:00
|
|
|
with open(datafile, 'w') as f_out:
|
|
|
|
f_out.write(json.dumps(load_data, indent=2))
|
2021-04-25 02:07:58 +00:00
|
|
|
|
2024-01-16 10:32:51 +00:00
|
|
|
excludes = content_excludes(allow_auth=False)
|
2020-11-12 02:31:27 +00:00
|
|
|
|
2024-01-16 10:32:51 +00:00
|
|
|
# Import auth models first
|
|
|
|
print('Importing user auth data...')
|
|
|
|
cmd = f"loaddata '{authfile}'"
|
2020-11-12 02:31:27 +00:00
|
|
|
manage(c, cmd, pty=True)
|
|
|
|
|
2024-01-16 10:32:51 +00:00
|
|
|
# Import everything else next
|
|
|
|
print('Importing database records...')
|
|
|
|
cmd = f"loaddata '{datafile}' -i {excludes}"
|
|
|
|
|
|
|
|
manage(c, cmd, pty=True)
|
|
|
|
|
|
|
|
if not retain_temp:
|
|
|
|
print('Removing temporary files')
|
|
|
|
os.remove(datafile)
|
|
|
|
os.remove(authfile)
|
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Data import completed')
|
2021-04-25 02:07:58 +00:00
|
|
|
|
2021-06-21 00:23:53 +00:00
|
|
|
|
|
|
|
@task
|
|
|
|
def delete_data(c, force=False):
|
2022-06-01 15:37:39 +00:00
|
|
|
"""Delete all database records!
|
2021-06-21 00:23:53 +00:00
|
|
|
|
|
|
|
Warning: This will REALLY delete all records in the database!!
|
|
|
|
"""
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Deleting all data from InvenTree database...')
|
2022-05-02 00:45:26 +00:00
|
|
|
|
2021-06-21 00:23:53 +00:00
|
|
|
if force:
|
2021-06-21 00:38:50 +00:00
|
|
|
manage(c, 'flush --noinput')
|
2021-06-21 00:23:53 +00:00
|
|
|
else:
|
|
|
|
manage(c, 'flush')
|
|
|
|
|
|
|
|
|
2021-10-04 21:05:26 +00:00
|
|
|
@task(post=[rebuild_models, rebuild_thumbnails])
|
2020-11-12 02:31:27 +00:00
|
|
|
def import_fixtures(c):
|
2022-06-01 15:37:39 +00:00
|
|
|
"""Import fixture data into the database.
|
2020-11-12 02:31:27 +00:00
|
|
|
|
|
|
|
This command imports all existing test fixture data into the database.
|
|
|
|
|
|
|
|
Warning:
|
|
|
|
- Intended for testing / development only!
|
|
|
|
- Running this command may overwrite existing database data!!
|
|
|
|
- Don't say you were not warned...
|
|
|
|
"""
|
|
|
|
fixtures = [
|
|
|
|
# Build model
|
|
|
|
'build',
|
2020-11-12 03:48:57 +00:00
|
|
|
# Common models
|
|
|
|
'settings',
|
2020-11-12 02:31:27 +00:00
|
|
|
# Company model
|
|
|
|
'company',
|
|
|
|
'price_breaks',
|
|
|
|
'supplier_part',
|
|
|
|
# Order model
|
|
|
|
'order',
|
|
|
|
# Part model
|
|
|
|
'bom',
|
|
|
|
'category',
|
|
|
|
'params',
|
|
|
|
'part',
|
|
|
|
'test_templates',
|
|
|
|
# Stock model
|
|
|
|
'location',
|
|
|
|
'stock_tests',
|
|
|
|
'stock',
|
2021-04-25 01:41:48 +00:00
|
|
|
# Users
|
2024-01-11 00:28:58 +00:00
|
|
|
'users',
|
2020-11-12 02:31:27 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
command = 'loaddata ' + ' '.join(fixtures)
|
|
|
|
|
|
|
|
manage(c, command, pty=True)
|
|
|
|
|
2021-02-10 15:55:04 +00:00
|
|
|
|
2022-06-11 00:07:57 +00:00
|
|
|
# Execution tasks
|
2022-07-20 01:44:25 +00:00
|
|
|
@task
|
|
|
|
def wait(c):
|
|
|
|
"""Wait until the database connection is ready."""
|
2024-01-11 00:28:58 +00:00
|
|
|
return manage(c, 'wait_for_db')
|
2022-07-20 01:44:25 +00:00
|
|
|
|
|
|
|
|
2024-01-30 23:29:56 +00:00
|
|
|
@task(pre=[wait], help={'address': 'Server address:port (default=0.0.0.0:8000)'})
|
|
|
|
def gunicorn(c, address='0.0.0.0:8000'):
|
|
|
|
"""Launch a gunicorn webserver.
|
|
|
|
|
|
|
|
Note: This server will not auto-reload in response to code changes.
|
|
|
|
"""
|
|
|
|
c.run(
|
|
|
|
'gunicorn -c ./docker/gunicorn.conf.py InvenTree.wsgi -b {address} --chdir ./InvenTree'.format(
|
|
|
|
address=address
|
|
|
|
),
|
|
|
|
pty=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-07-20 01:44:25 +00:00
|
|
|
@task(pre=[wait], help={'address': 'Server address:port (default=127.0.0.1:8000)'})
|
2024-01-11 00:28:58 +00:00
|
|
|
def server(c, address='127.0.0.1:8000'):
|
2023-10-17 20:27:28 +00:00
|
|
|
"""Launch a (development) server using Django's in-built webserver.
|
2020-08-21 11:49:03 +00:00
|
|
|
|
|
|
|
Note: This is *not* sufficient for a production installation.
|
|
|
|
"""
|
2024-01-11 00:28:58 +00:00
|
|
|
manage(c, 'runserver {address}'.format(address=address), pty=True)
|
2021-08-28 10:59:41 +00:00
|
|
|
|
|
|
|
|
2022-06-11 00:07:57 +00:00
|
|
|
@task(pre=[wait])
|
|
|
|
def worker(c):
|
|
|
|
"""Run the InvenTree background worker process."""
|
|
|
|
manage(c, 'qcluster', pty=True)
|
|
|
|
|
|
|
|
|
|
|
|
# Testing tasks
|
|
|
|
@task
|
|
|
|
def render_js_files(c):
|
|
|
|
"""Render templated javascript files (used for static testing)."""
|
2024-01-11 00:28:58 +00:00
|
|
|
manage(c, 'test InvenTree.ci_render_js')
|
2022-06-11 00:07:57 +00:00
|
|
|
|
|
|
|
|
2021-11-24 22:07:48 +00:00
|
|
|
@task(post=[translate_stats, static, server])
|
|
|
|
def test_translations(c):
|
2022-06-11 00:07:57 +00:00
|
|
|
"""Add a fictional language to test if each component is ready for translations."""
|
2021-11-24 22:07:48 +00:00
|
|
|
import django
|
|
|
|
from django.conf import settings
|
|
|
|
|
|
|
|
# setup django
|
2022-07-27 00:42:34 +00:00
|
|
|
base_path = Path.cwd()
|
|
|
|
new_base_path = pathlib.Path('InvenTree').resolve()
|
2021-11-24 22:07:48 +00:00
|
|
|
sys.path.append(str(new_base_path))
|
|
|
|
os.chdir(new_base_path)
|
|
|
|
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'InvenTree.settings')
|
|
|
|
django.setup()
|
|
|
|
|
|
|
|
# Add language
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Add dummy language...')
|
|
|
|
print('========================================')
|
|
|
|
manage(c, 'makemessages -e py,html,js --no-wrap -l xx')
|
2021-11-24 22:07:48 +00:00
|
|
|
|
|
|
|
# change translation
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Fill in dummy translations...')
|
|
|
|
print('========================================')
|
2021-11-24 22:07:48 +00:00
|
|
|
|
|
|
|
file_path = pathlib.Path(settings.LOCALE_PATHS[0], 'xx', 'LC_MESSAGES', 'django.po')
|
|
|
|
new_file_path = str(file_path) + '_new'
|
2021-11-24 23:46:23 +00:00
|
|
|
|
2023-06-03 14:04:52 +00:00
|
|
|
# compile regex
|
2021-11-24 23:46:23 +00:00
|
|
|
reg = re.compile(
|
2024-01-11 00:28:58 +00:00
|
|
|
r'[a-zA-Z0-9]{1}' # match any single letter and number # noqa: W504
|
|
|
|
+ r'(?![^{\(\<]*[}\)\>])' # that is not inside curly brackets, brackets or a tag # noqa: W504
|
|
|
|
+ r'(?<![^\%][^\(][)][a-z])' # that is not a specially formatted variable with singles # noqa: W504
|
|
|
|
+ r'(?![^\\][\n])' # that is not a newline
|
2021-11-24 23:46:23 +00:00
|
|
|
)
|
|
|
|
last_string = ''
|
|
|
|
|
|
|
|
# loop through input file lines
|
2024-01-11 00:28:58 +00:00
|
|
|
with open(file_path, 'rt') as file_org:
|
|
|
|
with open(new_file_path, 'wt') as file_new:
|
2021-11-24 23:46:23 +00:00
|
|
|
for line in file_org:
|
2021-11-24 22:07:48 +00:00
|
|
|
if line.startswith('msgstr "'):
|
2021-11-24 23:46:23 +00:00
|
|
|
# write output -> replace regex matches with x in the read in (multi)string
|
|
|
|
file_new.write(f'msgstr "{reg.sub("x", last_string[7:-2])}"\n')
|
2024-01-11 00:28:58 +00:00
|
|
|
last_string = '' # reset (multi)string
|
2021-11-24 23:46:23 +00:00
|
|
|
elif line.startswith('msgid "'):
|
2024-01-11 00:28:58 +00:00
|
|
|
last_string = (
|
|
|
|
last_string + line
|
|
|
|
) # a new translatable string starts -> start append
|
2021-11-24 23:46:23 +00:00
|
|
|
file_new.write(line)
|
2021-11-24 22:07:48 +00:00
|
|
|
else:
|
2021-11-24 23:46:23 +00:00
|
|
|
if last_string:
|
2024-01-11 00:28:58 +00:00
|
|
|
last_string = (
|
|
|
|
last_string + line
|
|
|
|
) # a string is being read in -> continue appending
|
2021-11-24 23:46:23 +00:00
|
|
|
file_new.write(line)
|
2021-11-24 22:07:48 +00:00
|
|
|
|
|
|
|
# change out translation files
|
2022-07-27 00:42:34 +00:00
|
|
|
file_path.rename(str(file_path) + '_old')
|
|
|
|
new_file_path.rename(file_path)
|
2021-11-24 22:07:48 +00:00
|
|
|
|
|
|
|
# compile languages
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Compile languages ...')
|
|
|
|
print('========================================')
|
|
|
|
manage(c, 'compilemessages')
|
2021-11-24 22:07:48 +00:00
|
|
|
|
|
|
|
# reset cwd
|
|
|
|
os.chdir(base_path)
|
|
|
|
|
|
|
|
# set env flag
|
|
|
|
os.environ['TEST_TRANSLATIONS'] = 'True'
|
|
|
|
|
|
|
|
|
2023-06-01 13:54:06 +00:00
|
|
|
@task(
|
|
|
|
help={
|
|
|
|
'disable_pty': 'Disable PTY',
|
|
|
|
'runtest': 'Specify which tests to run, in format <module>.<file>.<class>.<method>',
|
2023-06-23 07:25:59 +00:00
|
|
|
'migrations': 'Run migration unit tests',
|
|
|
|
'report': 'Display a report of slow tests',
|
|
|
|
'coverage': 'Run code coverage analysis (requires coverage package)',
|
2024-02-22 22:56:50 +00:00
|
|
|
'cui': 'Do not run CUI tests',
|
2023-06-01 13:54:06 +00:00
|
|
|
}
|
|
|
|
)
|
2024-01-11 00:28:58 +00:00
|
|
|
def test(
|
2024-02-22 22:56:50 +00:00
|
|
|
c,
|
|
|
|
disable_pty=False,
|
|
|
|
runtest='',
|
|
|
|
migrations=False,
|
|
|
|
report=False,
|
|
|
|
coverage=False,
|
|
|
|
cui=False,
|
2024-01-11 00:28:58 +00:00
|
|
|
):
|
2023-06-01 13:54:06 +00:00
|
|
|
"""Run unit-tests for InvenTree codebase.
|
|
|
|
|
|
|
|
To run only certain test, use the argument --runtest.
|
|
|
|
This can filter all the way down to:
|
|
|
|
<module>.<file>.<class>.<method>
|
|
|
|
|
|
|
|
Example:
|
|
|
|
test --runtest=company.test_api
|
|
|
|
will run tests in the company/test_api.py file.
|
|
|
|
"""
|
2022-06-11 00:07:57 +00:00
|
|
|
# Run sanity check on the django install
|
|
|
|
manage(c, 'check')
|
|
|
|
|
2022-12-08 13:15:26 +00:00
|
|
|
pty = not disable_pty
|
|
|
|
|
2023-06-23 07:25:59 +00:00
|
|
|
_apps = ' '.join(apps())
|
|
|
|
|
|
|
|
cmd = 'test'
|
|
|
|
|
|
|
|
if runtest:
|
|
|
|
# Specific tests to run
|
|
|
|
cmd += f' {runtest}'
|
|
|
|
else:
|
|
|
|
# Run all tests
|
|
|
|
cmd += f' {_apps}'
|
|
|
|
|
|
|
|
if report:
|
|
|
|
cmd += ' --slowreport'
|
|
|
|
|
|
|
|
if migrations:
|
|
|
|
cmd += ' --tag migration_test'
|
|
|
|
else:
|
|
|
|
cmd += ' --exclude-tag migration_test'
|
|
|
|
|
2024-02-22 22:56:50 +00:00
|
|
|
if cui:
|
|
|
|
cmd += ' --exclude-tag=cui'
|
|
|
|
|
2023-06-23 07:25:59 +00:00
|
|
|
if coverage:
|
|
|
|
# Run tests within coverage environment, and generate report
|
|
|
|
c.run(f'coverage run {managePyPath()} {cmd}')
|
2024-04-09 00:33:27 +00:00
|
|
|
c.run('coverage xml -i')
|
2023-06-23 07:25:59 +00:00
|
|
|
else:
|
|
|
|
# Run simple test runner, without coverage
|
|
|
|
manage(c, cmd, pty=pty)
|
2022-06-11 00:07:57 +00:00
|
|
|
|
|
|
|
|
2022-11-14 04:58:22 +00:00
|
|
|
@task(help={'dev': 'Set up development environment at the end'})
|
2024-01-11 00:28:58 +00:00
|
|
|
def setup_test(c, ignore_update=False, dev=False, path='inventree-demo-dataset'):
|
2023-06-03 14:04:52 +00:00
|
|
|
"""Setup a testing environment."""
|
2024-04-03 01:16:59 +00:00
|
|
|
from src.backend.InvenTree.InvenTree.config import get_media_dir
|
2022-07-31 13:16:58 +00:00
|
|
|
|
2022-07-30 00:34:16 +00:00
|
|
|
if not ignore_update:
|
|
|
|
update(c)
|
|
|
|
|
2022-07-21 01:41:42 +00:00
|
|
|
# Remove old data directory
|
2022-07-30 00:34:16 +00:00
|
|
|
if os.path.exists(path):
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Removing old data ...')
|
2022-07-30 00:34:16 +00:00
|
|
|
c.run(f'rm {path} -r')
|
2022-07-21 01:41:42 +00:00
|
|
|
|
|
|
|
# Get test data
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Cloning demo dataset ...')
|
2023-03-28 23:35:43 +00:00
|
|
|
c.run(f'git clone https://github.com/inventree/demo-dataset {path} -v --depth=1')
|
2024-01-11 00:28:58 +00:00
|
|
|
print('========================================')
|
2022-07-29 04:58:54 +00:00
|
|
|
|
|
|
|
# Make sure migrations are done - might have just deleted sqlite database
|
2022-07-30 00:34:16 +00:00
|
|
|
if not ignore_update:
|
|
|
|
migrate(c)
|
2022-07-21 01:41:42 +00:00
|
|
|
|
|
|
|
# Load data
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Loading database records ...')
|
2022-07-30 00:34:16 +00:00
|
|
|
import_records(c, filename=f'{path}/inventree_data.json', clear=True)
|
2022-07-31 13:16:58 +00:00
|
|
|
|
|
|
|
# Copy media files
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Copying media files ...')
|
2022-07-31 13:16:58 +00:00
|
|
|
src = Path(path).joinpath('media').resolve()
|
|
|
|
dst = get_media_dir()
|
|
|
|
|
2024-04-14 10:24:22 +00:00
|
|
|
print(f'Copying media files - "{src}" to "{dst}"')
|
2022-07-31 13:16:58 +00:00
|
|
|
shutil.copytree(src, dst, dirs_exist_ok=True)
|
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Done setting up test environment...')
|
|
|
|
print('========================================')
|
2022-07-29 04:58:54 +00:00
|
|
|
|
|
|
|
# Set up development setup if flag is set
|
|
|
|
if dev:
|
|
|
|
setup_dev(c)
|
2022-07-21 01:41:42 +00:00
|
|
|
|
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
@task(
|
|
|
|
help={
|
|
|
|
'filename': "Output filename (default = 'schema.yml')",
|
|
|
|
'overwrite': 'Overwrite existing files without asking first (default = off/False)',
|
|
|
|
}
|
|
|
|
)
|
2024-02-08 05:19:57 +00:00
|
|
|
def schema(c, filename='schema.yml', overwrite=False, ignore_warnings=False):
|
2023-04-18 13:08:36 +00:00
|
|
|
"""Export current API schema."""
|
|
|
|
check_file_existance(filename, overwrite)
|
2024-02-08 05:19:57 +00:00
|
|
|
|
2024-03-02 00:28:37 +00:00
|
|
|
filename = os.path.abspath(filename)
|
|
|
|
|
|
|
|
print(f"Exporting schema file to '{filename}'")
|
|
|
|
|
2024-02-08 05:19:57 +00:00
|
|
|
cmd = f'spectacular --file {filename} --validate --color'
|
|
|
|
|
|
|
|
if not ignore_warnings:
|
|
|
|
cmd += ' --fail-on-warn'
|
|
|
|
|
|
|
|
manage(c, cmd, pty=True)
|
2023-05-24 06:34:36 +00:00
|
|
|
|
2024-03-02 00:28:37 +00:00
|
|
|
assert os.path.exists(filename)
|
|
|
|
|
|
|
|
print('Schema export completed:', filename)
|
|
|
|
|
2023-05-24 06:34:36 +00:00
|
|
|
|
|
|
|
@task(default=True)
|
|
|
|
def version(c):
|
|
|
|
"""Show the current version of InvenTree."""
|
2024-04-03 01:16:59 +00:00
|
|
|
import src.backend.InvenTree.InvenTree.version as InvenTreeVersion
|
|
|
|
from src.backend.InvenTree.InvenTree.config import (
|
2024-01-11 00:28:58 +00:00
|
|
|
get_config_file,
|
|
|
|
get_media_dir,
|
|
|
|
get_static_dir,
|
|
|
|
)
|
2023-05-24 06:34:36 +00:00
|
|
|
|
2023-07-20 00:12:08 +00:00
|
|
|
# Gather frontend version information
|
|
|
|
_, node, yarn = node_available(versions=True)
|
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
print(
|
|
|
|
f"""
|
2023-05-24 06:34:36 +00:00
|
|
|
InvenTree - inventree.org
|
|
|
|
The Open-Source Inventory Management System\n
|
|
|
|
|
|
|
|
Installation paths:
|
|
|
|
Base {localDir()}
|
|
|
|
Config {get_config_file()}
|
|
|
|
Media {get_media_dir()}
|
|
|
|
Static {get_static_dir()}
|
|
|
|
|
|
|
|
Versions:
|
|
|
|
Python {python_version()}
|
|
|
|
Django {InvenTreeVersion.inventreeDjangoVersion()}
|
|
|
|
InvenTree {InvenTreeVersion.inventreeVersion()}
|
|
|
|
API {InvenTreeVersion.inventreeApiVersion()}
|
2023-07-20 00:12:08 +00:00
|
|
|
Node {node if node else 'N/A'}
|
|
|
|
Yarn {yarn if yarn else 'N/A'}
|
2023-05-24 06:34:36 +00:00
|
|
|
|
|
|
|
Commit hash:{InvenTreeVersion.inventreeCommitHash()}
|
2024-01-11 00:28:58 +00:00
|
|
|
Commit date:{InvenTreeVersion.inventreeCommitDate()}"""
|
|
|
|
)
|
2023-05-24 06:34:36 +00:00
|
|
|
if len(sys.argv) == 1 and sys.argv[0].startswith('/opt/inventree/env/lib/python'):
|
2024-01-11 00:28:58 +00:00
|
|
|
print(
|
|
|
|
"""
|
2023-05-24 06:34:36 +00:00
|
|
|
You are probably running the package installer / single-line installer. Please mentioned that in any bug reports!
|
|
|
|
|
|
|
|
Use '--list' for a list of available commands
|
2024-01-11 00:28:58 +00:00
|
|
|
Use '--help' for help on a specific command"""
|
|
|
|
)
|
2023-07-18 12:45:49 +00:00
|
|
|
|
|
|
|
|
2023-07-20 00:12:08 +00:00
|
|
|
@task()
|
|
|
|
def frontend_check(c):
|
|
|
|
"""Check if frontend is available."""
|
|
|
|
print(node_available())
|
|
|
|
|
|
|
|
|
2023-07-18 12:45:49 +00:00
|
|
|
@task
|
|
|
|
def frontend_compile(c):
|
|
|
|
"""Generate react frontend.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
c: Context variable
|
|
|
|
"""
|
2024-02-01 01:25:24 +00:00
|
|
|
print('Compiling frontend code...')
|
|
|
|
|
2023-07-18 12:45:49 +00:00
|
|
|
frontend_install(c)
|
|
|
|
frontend_trans(c)
|
|
|
|
frontend_build(c)
|
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
def frontend_install(c):
|
|
|
|
"""Install frontend requirements.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
c: Context variable
|
|
|
|
"""
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Installing frontend dependencies')
|
|
|
|
yarn(c, 'yarn install')
|
2023-07-18 12:45:49 +00:00
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
def frontend_trans(c):
|
|
|
|
"""Compile frontend translations.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
c: Context variable
|
|
|
|
"""
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Compiling frontend translations')
|
|
|
|
yarn(c, 'yarn run extract')
|
|
|
|
yarn(c, 'yarn run compile')
|
2023-07-18 12:45:49 +00:00
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
def frontend_build(c):
|
|
|
|
"""Build frontend.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
c: Context variable
|
|
|
|
"""
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Building frontend')
|
|
|
|
yarn(c, 'yarn run build --emptyOutDir')
|
2023-07-20 00:12:08 +00:00
|
|
|
|
|
|
|
|
2023-07-20 23:46:44 +00:00
|
|
|
@task
|
|
|
|
def frontend_dev(c):
|
|
|
|
"""Start frontend development server.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
c: Context variable
|
|
|
|
"""
|
2024-01-11 00:28:58 +00:00
|
|
|
print('Starting frontend development server')
|
2024-01-22 13:55:44 +00:00
|
|
|
yarn(c, 'yarn run compile')
|
2024-01-11 00:28:58 +00:00
|
|
|
yarn(c, 'yarn run dev')
|
|
|
|
|
|
|
|
|
|
|
|
@task(
|
|
|
|
help={
|
|
|
|
'ref': 'git ref, default: current git ref',
|
|
|
|
'tag': 'git tag to look for release',
|
|
|
|
'file': 'destination to frontend-build.zip file',
|
|
|
|
'repo': 'GitHub repository, default: InvenTree/inventree',
|
|
|
|
'extract': 'Also extract and place at the correct destination, default: True',
|
|
|
|
'clean': 'Delete old files from InvenTree/web/static/web first, default: True',
|
|
|
|
}
|
|
|
|
)
|
|
|
|
def frontend_download(
|
|
|
|
c,
|
|
|
|
ref=None,
|
|
|
|
tag=None,
|
|
|
|
file=None,
|
|
|
|
repo='InvenTree/inventree',
|
|
|
|
extract=True,
|
|
|
|
clean=True,
|
|
|
|
):
|
2023-07-20 00:12:08 +00:00
|
|
|
"""Download a pre-build frontend from GitHub if you dont want to install nodejs on your machine.
|
|
|
|
|
|
|
|
There are 3 possibilities to install the frontend:
|
|
|
|
1. invoke frontend-download --ref 01f2aa5f746a36706e9a5e588c4242b7bf1996d5
|
|
|
|
if ref is omitted, it tries to auto detect the current git ref via `git rev-parse HEAD`.
|
|
|
|
Note: GitHub doesn't allow workflow artifacts to be downloaded from anonymous users, so
|
|
|
|
this will output a link where you can download the frontend with a signed in browser
|
|
|
|
and then continue with option 3
|
|
|
|
2. invoke frontend-download --tag 0.13.0
|
|
|
|
Downloads the frontend build from the releases.
|
|
|
|
3. invoke frontend-download --file /home/vscode/Downloads/frontend-build.zip
|
|
|
|
This will extract your zip file and place the contents at the correct destination
|
|
|
|
"""
|
|
|
|
import functools
|
|
|
|
import subprocess
|
|
|
|
from tempfile import NamedTemporaryFile
|
|
|
|
from zipfile import ZipFile
|
|
|
|
|
|
|
|
import requests
|
|
|
|
|
2024-02-01 01:25:24 +00:00
|
|
|
print('Downloading frontend...')
|
|
|
|
|
2023-07-20 00:12:08 +00:00
|
|
|
# globals
|
2024-01-11 00:28:58 +00:00
|
|
|
default_headers = {'Accept': 'application/vnd.github.v3+json'}
|
2023-07-20 00:12:08 +00:00
|
|
|
|
|
|
|
# helper functions
|
|
|
|
def find_resource(resource, key, value):
|
|
|
|
for obj in resource:
|
|
|
|
if obj[key] == value:
|
|
|
|
return obj
|
|
|
|
return None
|
|
|
|
|
|
|
|
def handle_extract(file):
|
|
|
|
# if no extract is requested, exit here
|
|
|
|
if not extract:
|
|
|
|
return
|
|
|
|
|
2024-04-03 01:16:59 +00:00
|
|
|
dest_path = Path(__file__).parent / 'src/backend' / 'InvenTree/web/static/web'
|
2023-07-20 00:12:08 +00:00
|
|
|
|
|
|
|
# if clean, delete static/web directory
|
|
|
|
if clean:
|
|
|
|
shutil.rmtree(dest_path, ignore_errors=True)
|
|
|
|
os.makedirs(dest_path)
|
2024-01-11 00:28:58 +00:00
|
|
|
print(f'Cleaned directory: {dest_path}')
|
2023-07-20 00:12:08 +00:00
|
|
|
|
|
|
|
# unzip build to static folder
|
2024-01-11 00:28:58 +00:00
|
|
|
with ZipFile(file, 'r') as zip_ref:
|
2023-07-20 00:12:08 +00:00
|
|
|
zip_ref.extractall(dest_path)
|
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
print(f'Unzipped downloaded frontend build to: {dest_path}')
|
2023-07-20 00:12:08 +00:00
|
|
|
|
|
|
|
def handle_download(url):
|
|
|
|
# download frontend-build.zip to temporary file
|
2024-01-11 00:28:58 +00:00
|
|
|
with requests.get(
|
|
|
|
url, headers=default_headers, stream=True, allow_redirects=True
|
|
|
|
) as response, NamedTemporaryFile(suffix='.zip') as dst:
|
2023-07-20 00:12:08 +00:00
|
|
|
response.raise_for_status()
|
|
|
|
|
|
|
|
# auto decode the gzipped raw data
|
2024-01-11 00:28:58 +00:00
|
|
|
response.raw.read = functools.partial(
|
|
|
|
response.raw.read, decode_content=True
|
|
|
|
)
|
|
|
|
with open(dst.name, 'wb') as f:
|
2023-07-20 00:12:08 +00:00
|
|
|
shutil.copyfileobj(response.raw, f)
|
2024-01-11 00:28:58 +00:00
|
|
|
print(f'Downloaded frontend build to temporary file: {dst.name}')
|
2023-07-20 00:12:08 +00:00
|
|
|
|
|
|
|
handle_extract(dst.name)
|
|
|
|
|
|
|
|
# if zip file is specified, try to extract it directly
|
|
|
|
if file:
|
|
|
|
handle_extract(file)
|
|
|
|
return
|
|
|
|
|
|
|
|
# check arguments
|
|
|
|
if ref is not None and tag is not None:
|
2024-01-11 00:28:58 +00:00
|
|
|
print('[ERROR] Do not set ref and tag.')
|
2023-07-20 00:12:08 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
if ref is None and tag is None:
|
|
|
|
try:
|
2024-01-11 00:28:58 +00:00
|
|
|
ref = subprocess.check_output(
|
|
|
|
['git', 'rev-parse', 'HEAD'], encoding='utf-8'
|
|
|
|
).strip()
|
2023-07-20 00:12:08 +00:00
|
|
|
except Exception:
|
|
|
|
print("[ERROR] Cannot get current ref via 'git rev-parse HEAD'")
|
|
|
|
return
|
|
|
|
|
|
|
|
if ref is None and tag is None:
|
2024-01-11 00:28:58 +00:00
|
|
|
print('[ERROR] Either ref or tag needs to be set.')
|
2023-07-20 00:12:08 +00:00
|
|
|
|
|
|
|
if tag:
|
2024-01-11 00:28:58 +00:00
|
|
|
tag = tag.lstrip('v')
|
2023-07-20 00:12:08 +00:00
|
|
|
try:
|
2024-01-11 00:28:58 +00:00
|
|
|
handle_download(
|
|
|
|
f'https://github.com/{repo}/releases/download/{tag}/frontend-build.zip'
|
|
|
|
)
|
2023-07-20 00:12:08 +00:00
|
|
|
except Exception as e:
|
|
|
|
if not isinstance(e, requests.HTTPError):
|
|
|
|
raise e
|
2024-01-11 00:28:58 +00:00
|
|
|
print(
|
|
|
|
f"""[ERROR] An Error occurred. Unable to download frontend build, release or build does not exist,
|
2023-07-20 00:12:08 +00:00
|
|
|
try downloading the frontend-build.zip yourself via: https://github.com/{repo}/releases
|
2024-01-11 00:28:58 +00:00
|
|
|
Then try continuing by running: invoke frontend-download --file <path-to-downloaded-zip-file>"""
|
|
|
|
)
|
2023-07-20 00:12:08 +00:00
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
if ref:
|
|
|
|
# get workflow run from all workflow runs on that particular ref
|
2024-01-11 00:28:58 +00:00
|
|
|
workflow_runs = requests.get(
|
|
|
|
f'https://api.github.com/repos/{repo}/actions/runs?head_sha={ref}',
|
|
|
|
headers=default_headers,
|
|
|
|
).json()
|
2023-07-20 00:12:08 +00:00
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
if not (qc_run := find_resource(workflow_runs['workflow_runs'], 'name', 'QC')):
|
|
|
|
print('[ERROR] Cannot find any workflow runs for current sha')
|
2023-07-20 00:12:08 +00:00
|
|
|
return
|
2024-01-11 00:28:58 +00:00
|
|
|
print(
|
|
|
|
f"Found workflow {qc_run['name']} (run {qc_run['run_number']}-{qc_run['run_attempt']})"
|
|
|
|
)
|
2023-07-20 00:12:08 +00:00
|
|
|
|
|
|
|
# get frontend-build artifact from all artifacts available for this workflow run
|
2024-01-11 00:28:58 +00:00
|
|
|
artifacts = requests.get(
|
|
|
|
qc_run['artifacts_url'], headers=default_headers
|
|
|
|
).json()
|
|
|
|
if not (
|
|
|
|
frontend_artifact := find_resource(
|
|
|
|
artifacts['artifacts'], 'name', 'frontend-build'
|
|
|
|
)
|
|
|
|
):
|
|
|
|
print('[ERROR] Cannot find frontend-build.zip attachment for current sha')
|
2023-07-20 00:12:08 +00:00
|
|
|
return
|
2024-01-11 00:28:58 +00:00
|
|
|
print(
|
2024-02-19 23:47:57 +00:00
|
|
|
f"Found artifact {frontend_artifact['name']} with id {frontend_artifact['id']} ({frontend_artifact['size_in_bytes'] / 1e6:.2f}MB)."
|
2024-01-11 00:28:58 +00:00
|
|
|
)
|
2023-07-20 00:12:08 +00:00
|
|
|
|
2024-01-11 00:28:58 +00:00
|
|
|
print(
|
|
|
|
f"""
|
2023-07-20 00:12:08 +00:00
|
|
|
GitHub doesn't allow artifact downloads from anonymous users. Either download the following file
|
|
|
|
via your signed in browser, or consider using a point release download via invoke frontend-download --tag <git-tag>
|
|
|
|
|
|
|
|
Download: https://github.com/{repo}/suites/{qc_run['check_suite_id']}/artifacts/{frontend_artifact['id']} manually and
|
2024-01-11 00:28:58 +00:00
|
|
|
continue by running: invoke frontend-download --file <path-to-downloaded-zip-file>"""
|
|
|
|
)
|
2024-05-22 00:17:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
@task(
|
|
|
|
help={
|
|
|
|
'address': 'Host and port to run the server on (default: localhost:8080)',
|
|
|
|
'compile_schema': 'Compile the schema documentation first (default: False)',
|
|
|
|
}
|
|
|
|
)
|
|
|
|
def docs_server(c, address='localhost:8080', compile_schema=False):
|
|
|
|
"""Start a local mkdocs server to view the documentation."""
|
|
|
|
if compile_schema:
|
|
|
|
# Build the schema docs first
|
|
|
|
schema(c, ignore_warnings=True, overwrite=True, filename='docs/schema.yml')
|
|
|
|
c.run('python docs/extract_schema.py docs/schema.yml')
|
|
|
|
|
|
|
|
c.run(f'mkdocs serve -a {address} -f docs/mkdocs.yml')
|