Import data fix (#6253)

* Use '--natural-foreign' when exporting dataset

- Uses "natural keys" (model names) instead of ContentType ID

* Update task options

- Change 'include_plugins' to 'exclude_plugins'
- Change 'delete_temp' to 'retain_temp'

* Split data import into two-step process

- First, import auth models
- Second, import data
- Ensures auth.user and auth.group are in place before users.owner is loaded

* Adjust temp file name

* Touch apps.py

- Just so the proper CI checks run
This commit is contained in:
Oliver 2024-01-16 21:32:51 +11:00 committed by GitHub
parent 829e01dd33
commit fa28697799
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 70 additions and 18 deletions

View File

@ -1,4 +1,4 @@
"""AppConfig for inventree app."""
"""AppConfig for InvenTree app."""
import logging
from importlib import import_module

View File

@ -59,7 +59,10 @@ def apps():
def content_excludes(
allow_tokens: bool = True, allow_plugins: bool = True, allow_sso: bool = True
allow_auth: bool = True,
allow_tokens: bool = True,
allow_plugins: bool = True,
allow_sso: bool = True,
):
"""Returns a list of content types to exclude from import/export.
@ -83,6 +86,11 @@ def content_excludes(
'user_sessions.session',
]
# Optionally exclude user auth data
if not allow_auth:
excludes.append('auth.group')
excludes.append('auth.user')
# Optionally exclude user token information
if not allow_tokens:
excludes.append('users.apitoken')
@ -421,9 +429,9 @@ def update(c, skip_backup=False, frontend: bool = False, no_frontend: bool = Fal
'overwrite': 'Overwrite existing files without asking first (default = False)',
'include_permissions': 'Include user and group permissions in the output file (default = False)',
'include_tokens': 'Include API tokens in the output file (default = False)',
'include_plugins': 'Include plugin data in the output file (default = False)',
'exclude_plugins': 'Exclude plugin data from the output file (default = False)',
'include_sso': 'Include SSO token data in the output file (default = False)',
'delete_temp': 'Delete temporary files (containing permissions) at end of run. Note that this will delete temporary files from previous runs as well. (default = off/False)',
'retain_temp': 'Retain temporary files (containing permissions) at end of process (default = False)',
}
)
def export_records(
@ -432,9 +440,9 @@ def export_records(
overwrite=False,
include_permissions=False,
include_tokens=False,
include_plugins=False,
exclude_plugins=False,
include_sso=False,
delete_temp=False,
retain_temp=False,
):
"""Export all database records to a file.
@ -465,11 +473,11 @@ def export_records(
excludes = content_excludes(
allow_tokens=include_tokens,
allow_plugins=include_plugins,
allow_plugins=not exclude_plugins,
allow_sso=include_sso,
)
cmd = f"dumpdata --indent 2 --output '{tmpfile}' {excludes}"
cmd = f"dumpdata --natural-foreign --indent 2 --output '{tmpfile}' {excludes}"
# Dump data to temporary file
manage(c, cmd, pty=True)
@ -497,16 +505,22 @@ def export_records(
print('Data export completed')
if delete_temp is True:
print('Removing temporary file')
if not retain_temp:
print('Removing temporary files')
os.remove(tmpfile)
@task(
help={'filename': 'Input filename', 'clear': 'Clear existing data before import'},
help={
'filename': 'Input filename',
'clear': 'Clear existing data before import',
'retain_temp': 'Retain temporary files at end of process (default = False)',
},
post=[rebuild_models, rebuild_thumbnails],
)
def import_records(c, filename='data.json', clear=False):
def import_records(
c, filename='data.json', clear: bool = False, retain_temp: bool = False
):
"""Import database records from a file."""
# Get an absolute path to the supplied filename
if not os.path.isabs(filename):
@ -521,11 +535,22 @@ def import_records(c, filename='data.json', clear=False):
print(f"Importing database records from '{filename}'")
# We need to load 'auth' data (users / groups) *first*
# This is due to the users.owner model, which has a ContentType foreign key
authfile = f'{filename}.auth.json'
# Pre-process the data, to remove any "permissions" specified for a user or group
tmpfile = f'{filename}.tmp.json'
datafile = f'{filename}.data.json'
with open(filename, 'r') as f_in:
try:
data = json.loads(f_in.read())
except json.JSONDecodeError as exc:
print(f'Error: Failed to decode JSON file: {exc}')
sys.exit(1)
auth_data = []
load_data = []
for entry in data:
if 'model' in entry:
@ -537,14 +562,41 @@ def import_records(c, filename='data.json', clear=False):
if entry['model'] == 'auth.user':
entry['fields']['user_permissions'] = []
# Write the processed data to the tmp file
with open(tmpfile, 'w') as f_out:
f_out.write(json.dumps(data, indent=2))
# Save auth data for later
if entry['model'].startswith('auth.'):
auth_data.append(entry)
else:
load_data.append(entry)
else:
print('Warning: Invalid entry in data file')
print(entry)
cmd = f"loaddata '{tmpfile}' -i {content_excludes()}"
# Write the auth file data
with open(authfile, 'w') as f_out:
f_out.write(json.dumps(auth_data, indent=2))
# Write the processed data to the tmp file
with open(datafile, 'w') as f_out:
f_out.write(json.dumps(load_data, indent=2))
excludes = content_excludes(allow_auth=False)
# Import auth models first
print('Importing user auth data...')
cmd = f"loaddata '{authfile}'"
manage(c, cmd, pty=True)
# Import everything else next
print('Importing database records...')
cmd = f"loaddata '{datafile}' -i {excludes}"
manage(c, cmd, pty=True)
if not retain_temp:
print('Removing temporary files')
os.remove(datafile)
os.remove(authfile)
print('Data import completed')