Add support for using redis as a cache and a broker

This commit is contained in:
Nigel 2021-10-14 17:03:37 -06:00
parent 96a89558b4
commit e8c2d4da10
No known key found for this signature in database
GPG Key ID: 3AB9572E33E501E6
2 changed files with 78 additions and 28 deletions

View File

@ -15,6 +15,7 @@ import logging
import os
import random
import socket
import string
import shutil
import sys
@ -361,30 +362,6 @@ REST_FRAMEWORK = {
WSGI_APPLICATION = 'InvenTree.wsgi.application'
background_workers = os.environ.get('INVENTREE_BACKGROUND_WORKERS', None)
if background_workers is not None:
try:
background_workers = int(background_workers)
except ValueError:
background_workers = None
if background_workers is None:
# Sensible default?
background_workers = 4
# django-q configuration
Q_CLUSTER = {
'name': 'InvenTree',
'workers': background_workers,
'timeout': 90,
'retry': 120,
'queue_limit': 50,
'bulk': 10,
'orm': 'default',
'sync': False,
}
"""
Configure the database backend based on the user-specified values.
@ -562,12 +539,82 @@ DATABASES = {
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
_cache_config = CONFIG.get("cache", {})
_cache_host = _cache_config.get("host", os.getenv("INVENTREE_CACHE_HOST"))
_cache_port = _cache_config.get("port", os.getenv("INVENTREE_CACHE_PORT"))
if _cache_host:
# We are going to rely upon a possibly non-localhost for our cache,
# so don't wait too long for the cache as nothing in the cache should be
# irreplacable. Django Q Cluster will just try again later.
_cache_options = {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"SOCKET_CONNECT_TIMEOUT": int(os.getenv("CACHE_CONNECT_TIMEOUT", "2")),
"SOCKET_TIMEOUT": int(os.getenv("CACHE_SOCKET_TIMEOUT", "2")),
"CONNECTION_POOL_KWARGS": {
"socket_keepalive": _is_true(
os.getenv("CACHE_TCP_KEEPALIVE", "1")
),
"socket_keepalive_options": {
socket.TCP_KEEPCNT: int(
os.getenv("CACHE_KEEPALIVES_COUNT", "5")
),
socket.TCP_KEEPIDLE: int(
os.getenv("CACHE_KEEPALIVES_IDLE", "1")
),
socket.TCP_KEEPINTVL: int(
os.getenv("CACHE_KEEPALIVES_INTERVAL", "1")
),
socket.TCP_USER_TIMEOUT: int(
os.getenv("CACHE_TCP_USER_TIMEOUT", "1000")
),
},
},
}
CACHES = {
# Connection configuration for Django Q Cluster
"worker": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": f"redis://{_cache_host}:{_cache_port}/0",
"OPTIONS": _cache_options,
},
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": f"redis://{_cache_host}:{_cache_port}/1",
"OPTIONS": _cache_options,
},
}
else:
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
},
}
try:
# 4 background workers seems like a sensible default
background_workers = int(os.environ.get('INVENTREE_BACKGROUND_WORKERS', 4))
except ValueError:
background_workers = 4
# django-q configuration
Q_CLUSTER = {
'name': 'InvenTree',
'workers': background_workers,
'timeout': 90,
'retry': 120,
'queue_limit': 50,
'bulk': 10,
'orm': 'default',
'sync': False,
}
if _cache_host:
# If using external redis cache, make the cache the broker for Django Q
# as well
Q_CLUSTER["django_redis"] = "worker"
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators

View File

@ -11,3 +11,6 @@ psycopg2>=2.9.1
mysqlclient>=2.0.3
pgcli>=3.1.0
mariadb>=1.0.7
# Cache
django-redis>=5.0.0