fix(ui): do not re-process if processor config hasn't changed

This commit is contained in:
psychedelicious 2024-05-02 08:27:16 +10:00 committed by Kent Keirsey
parent 47ee08db91
commit ca1c3c0873
2 changed files with 22 additions and 18 deletions

View File

@ -14,6 +14,7 @@ import { CONTROLNET_PROCESSORS } from 'features/controlLayers/util/controlAdapte
import { isImageOutput } from 'features/nodes/types/common';
import { addToast } from 'features/system/store/systemSlice';
import { t } from 'i18next';
import { isEqual } from 'lodash-es';
import { imagesApi } from 'services/api/endpoints/images';
import { queueApi } from 'services/api/endpoints/queue';
import type { BatchConfig, ImageDTO } from 'services/api/types';
@ -27,8 +28,11 @@ const log = logger('session');
export const addControlAdapterPreprocessor = (startAppListening: AppStartListening) => {
startAppListening({
matcher,
effect: async (action, { dispatch, getState, cancelActiveListeners, delay, take }) => {
effect: async (action, { dispatch, getState, getOriginalState, cancelActiveListeners, delay, take }) => {
const { layerId } = action.payload;
const precheckLayerOriginal = getOriginalState()
.controlLayers.present.layers.filter(isControlAdapterLayer)
.find((l) => l.id === layerId);
const precheckLayer = getState()
.controlLayers.present.layers.filter(isControlAdapterLayer)
.find((l) => l.id === layerId);
@ -42,7 +46,9 @@ export const addControlAdapterPreprocessor = (startAppListening: AppStartListeni
// Layer doesn't have a processor config
!precheckLayer.controlAdapter.processorConfig ||
// Layer is already processing an image
precheckLayer.controlAdapter.isProcessingImage
precheckLayer.controlAdapter.isProcessingImage ||
// Processor config is the same
isEqual(precheckLayerOriginal?.controlAdapter.processorConfig, precheckLayer.controlAdapter.processorConfig)
) {
return;
}

View File

@ -26,7 +26,6 @@ import type {
T2IAdapterModelConfig,
ZoeDepthImageProcessorInvocation,
} from 'services/api/types';
import { v4 as uuidv4 } from 'uuid';
import { z } from 'zod';
const zDepthAnythingModelSize = z.enum(['large', 'base', 'small']);
@ -164,7 +163,6 @@ type ProcessorData<T extends ProcessorType> = {
};
const minDim = (image: ImageWithDims): number => Math.min(image.width, image.height);
const getId = (type: ProcessorType): string => `${type}_${uuidv4()}`;
type CAProcessorsData = {
[key in ProcessorType]: ProcessorData<key>;
@ -184,7 +182,7 @@ export const CONTROLNET_PROCESSORS: CAProcessorsData = {
labelTKey: 'controlnet.canny',
descriptionTKey: 'controlnet.cannyDescription',
buildDefaults: () => ({
id: getId('canny_image_processor'),
id: 'canny_image_processor',
type: 'canny_image_processor',
low_threshold: 100,
high_threshold: 200,
@ -202,7 +200,7 @@ export const CONTROLNET_PROCESSORS: CAProcessorsData = {
labelTKey: 'controlnet.colorMap',
descriptionTKey: 'controlnet.colorMapDescription',
buildDefaults: () => ({
id: getId('color_map_image_processor'),
id: 'color_map_image_processor',
type: 'color_map_image_processor',
color_map_tile_size: 64,
}),
@ -217,7 +215,7 @@ export const CONTROLNET_PROCESSORS: CAProcessorsData = {
labelTKey: 'controlnet.contentShuffle',
descriptionTKey: 'controlnet.contentShuffleDescription',
buildDefaults: (baseModel) => ({
id: getId('content_shuffle_image_processor'),
id: 'content_shuffle_image_processor',
type: 'content_shuffle_image_processor',
h: baseModel === 'sdxl' ? 1024 : 512,
w: baseModel === 'sdxl' ? 1024 : 512,
@ -235,7 +233,7 @@ export const CONTROLNET_PROCESSORS: CAProcessorsData = {
labelTKey: 'controlnet.depthAnything',
descriptionTKey: 'controlnet.depthAnythingDescription',
buildDefaults: () => ({
id: getId('depth_anything_image_processor'),
id: 'depth_anything_image_processor',
type: 'depth_anything_image_processor',
model_size: 'small',
}),
@ -250,7 +248,7 @@ export const CONTROLNET_PROCESSORS: CAProcessorsData = {
labelTKey: 'controlnet.hed',
descriptionTKey: 'controlnet.hedDescription',
buildDefaults: () => ({
id: getId('hed_image_processor'),
id: 'hed_image_processor',
type: 'hed_image_processor',
scribble: false,
}),
@ -266,7 +264,7 @@ export const CONTROLNET_PROCESSORS: CAProcessorsData = {
labelTKey: 'controlnet.lineartAnime',
descriptionTKey: 'controlnet.lineartAnimeDescription',
buildDefaults: () => ({
id: getId('lineart_anime_image_processor'),
id: 'lineart_anime_image_processor',
type: 'lineart_anime_image_processor',
}),
buildNode: (image, config) => ({
@ -281,7 +279,7 @@ export const CONTROLNET_PROCESSORS: CAProcessorsData = {
labelTKey: 'controlnet.lineart',
descriptionTKey: 'controlnet.lineartDescription',
buildDefaults: () => ({
id: getId('lineart_image_processor'),
id: 'lineart_image_processor',
type: 'lineart_image_processor',
coarse: false,
}),
@ -297,7 +295,7 @@ export const CONTROLNET_PROCESSORS: CAProcessorsData = {
labelTKey: 'controlnet.mediapipeFace',
descriptionTKey: 'controlnet.mediapipeFaceDescription',
buildDefaults: () => ({
id: getId('mediapipe_face_processor'),
id: 'mediapipe_face_processor',
type: 'mediapipe_face_processor',
max_faces: 1,
min_confidence: 0.5,
@ -314,7 +312,7 @@ export const CONTROLNET_PROCESSORS: CAProcessorsData = {
labelTKey: 'controlnet.depthMidas',
descriptionTKey: 'controlnet.depthMidasDescription',
buildDefaults: () => ({
id: getId('midas_depth_image_processor'),
id: 'midas_depth_image_processor',
type: 'midas_depth_image_processor',
a_mult: 2,
bg_th: 0.1,
@ -331,7 +329,7 @@ export const CONTROLNET_PROCESSORS: CAProcessorsData = {
labelTKey: 'controlnet.mlsd',
descriptionTKey: 'controlnet.mlsdDescription',
buildDefaults: () => ({
id: getId('mlsd_image_processor'),
id: 'mlsd_image_processor',
type: 'mlsd_image_processor',
thr_d: 0.1,
thr_v: 0.1,
@ -348,7 +346,7 @@ export const CONTROLNET_PROCESSORS: CAProcessorsData = {
labelTKey: 'controlnet.normalBae',
descriptionTKey: 'controlnet.normalBaeDescription',
buildDefaults: () => ({
id: getId('normalbae_image_processor'),
id: 'normalbae_image_processor',
type: 'normalbae_image_processor',
}),
buildNode: (image, config) => ({
@ -363,7 +361,7 @@ export const CONTROLNET_PROCESSORS: CAProcessorsData = {
labelTKey: 'controlnet.dwOpenpose',
descriptionTKey: 'controlnet.dwOpenposeDescription',
buildDefaults: () => ({
id: getId('dw_openpose_image_processor'),
id: 'dw_openpose_image_processor',
type: 'dw_openpose_image_processor',
draw_body: true,
draw_face: false,
@ -380,7 +378,7 @@ export const CONTROLNET_PROCESSORS: CAProcessorsData = {
labelTKey: 'controlnet.pidi',
descriptionTKey: 'controlnet.pidiDescription',
buildDefaults: () => ({
id: getId('pidi_image_processor'),
id: 'pidi_image_processor',
type: 'pidi_image_processor',
scribble: false,
safe: false,
@ -397,7 +395,7 @@ export const CONTROLNET_PROCESSORS: CAProcessorsData = {
labelTKey: 'controlnet.depthZoe',
descriptionTKey: 'controlnet.depthZoeDescription',
buildDefaults: () => ({
id: getId('zoe_depth_image_processor'),
id: 'zoe_depth_image_processor',
type: 'zoe_depth_image_processor',
}),
buildNode: (image, config) => ({