mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
refactor(ui): fix more types
This commit is contained in:
parent
37d2607f34
commit
e8891a1988
@ -10,7 +10,9 @@ export const ReduxInit = memo((props: PropsWithChildren) => {
|
||||
const dispatch = useAppDispatch();
|
||||
useGlobalModifiersInit();
|
||||
useEffect(() => {
|
||||
dispatch(modelChanged({ key: 'test_model', hash: 'some_hash', name: 'some name', base: 'sd-1', type: 'main' }));
|
||||
dispatch(
|
||||
modelChanged({ model: { key: 'test_model', hash: 'some_hash', name: 'some name', base: 'sd-1', type: 'main' } })
|
||||
);
|
||||
}, []);
|
||||
|
||||
return props.children;
|
||||
|
@ -1,5 +1,5 @@
|
||||
import type { AppStartListening } from 'app/store/middleware/listenerMiddleware';
|
||||
import { setInfillMethod } from 'features/canvas/store/canvasSlice';
|
||||
import { setInfillMethod } from 'features/controlLayers/store/canvasV2Slice';
|
||||
import { shouldUseNSFWCheckerChanged, shouldUseWatermarkerChanged } from 'features/system/store/systemSlice';
|
||||
import { appInfoApi } from 'services/api/endpoints/appInfo';
|
||||
|
||||
@ -8,7 +8,7 @@ export const addAppConfigReceivedListener = (startAppListening: AppStartListenin
|
||||
matcher: appInfoApi.endpoints.getAppConfig.matchFulfilled,
|
||||
effect: async (action, { getState, dispatch }) => {
|
||||
const { infill_methods = [], nsfw_methods = [], watermarking_methods = [] } = action.payload;
|
||||
const infillMethod = getState().generation.infillMethod;
|
||||
const infillMethod = getState().canvasV2.compositing.infillMethod;
|
||||
|
||||
if (!infill_methods.includes(infillMethod)) {
|
||||
// if there is no infill method, set it to the first one
|
||||
|
@ -2,7 +2,7 @@ import { logger } from 'app/logging/logger';
|
||||
import type { AppStartListening } from 'app/store/middleware/listenerMiddleware';
|
||||
import { canvasImageToControlAdapter } from 'features/canvas/store/actions';
|
||||
import { getBaseLayerBlob } from 'features/canvas/util/getBaseLayerBlob';
|
||||
import { controlAdapterImageChanged } from 'features/controlAdapters/store/controlAdaptersSlice';
|
||||
import { caImageChanged } from 'features/controlLayers/store/canvasV2Slice';
|
||||
import { toast } from 'features/toast/toast';
|
||||
import { t } from 'i18next';
|
||||
import { imagesApi } from 'services/api/endpoints/images';
|
||||
@ -47,14 +47,7 @@ export const addCanvasImageToControlNetListener = (startAppListening: AppStartLi
|
||||
})
|
||||
).unwrap();
|
||||
|
||||
const { image_name } = imageDTO;
|
||||
|
||||
dispatch(
|
||||
controlAdapterImageChanged({
|
||||
id,
|
||||
controlImage: image_name,
|
||||
})
|
||||
);
|
||||
dispatch(caImageChanged({ id, imageDTO }));
|
||||
},
|
||||
});
|
||||
};
|
||||
|
@ -2,7 +2,7 @@ import { logger } from 'app/logging/logger';
|
||||
import type { AppStartListening } from 'app/store/middleware/listenerMiddleware';
|
||||
import { canvasMaskToControlAdapter } from 'features/canvas/store/actions';
|
||||
import { getCanvasData } from 'features/canvas/util/getCanvasData';
|
||||
import { controlAdapterImageChanged } from 'features/controlAdapters/store/controlAdaptersSlice';
|
||||
import { caImageChanged } from 'features/controlLayers/store/canvasV2Slice';
|
||||
import { toast } from 'features/toast/toast';
|
||||
import { t } from 'i18next';
|
||||
import { imagesApi } from 'services/api/endpoints/images';
|
||||
@ -57,14 +57,7 @@ export const addCanvasMaskToControlNetListener = (startAppListening: AppStartLis
|
||||
})
|
||||
).unwrap();
|
||||
|
||||
const { image_name } = imageDTO;
|
||||
|
||||
dispatch(
|
||||
controlAdapterImageChanged({
|
||||
id,
|
||||
controlImage: image_name,
|
||||
})
|
||||
);
|
||||
dispatch(caImageChanged({ id, imageDTO }));
|
||||
},
|
||||
});
|
||||
};
|
||||
|
@ -1,12 +1,8 @@
|
||||
import { logger } from 'app/logging/logger';
|
||||
import type { AppStartListening } from 'app/store/middleware/listenerMiddleware';
|
||||
import {
|
||||
controlAdapterIsEnabledChanged,
|
||||
selectControlAdapterAll,
|
||||
} from 'features/controlAdapters/store/controlAdaptersSlice';
|
||||
import { caIsEnabledToggled, modelChanged, vaeSelected } from 'features/controlLayers/store/canvasV2Slice';
|
||||
import { loraRemoved } from 'features/lora/store/loraSlice';
|
||||
import { modelSelected } from 'features/parameters/store/actions';
|
||||
import { modelChanged, vaeSelected } from 'features/canvas/store/canvasSlice';
|
||||
import { zParameterModel } from 'features/parameters/types/parameterSchemas';
|
||||
import { toast } from 'features/toast/toast';
|
||||
import { t } from 'i18next';
|
||||
@ -51,10 +47,12 @@ export const addModelSelectedListener = (startAppListening: AppStartListening) =
|
||||
}
|
||||
|
||||
// handle incompatible controlnets
|
||||
selectControlAdapterAll(state.controlAdapters).forEach((ca) => {
|
||||
state.canvasV2.controlAdapters.forEach((ca) => {
|
||||
if (ca.model?.base !== newBaseModel) {
|
||||
dispatch(controlAdapterIsEnabledChanged({ id: ca.id, isEnabled: false }));
|
||||
modelsCleared += 1;
|
||||
if (ca.isEnabled) {
|
||||
dispatch(caIsEnabledToggled({ id: ca.id }));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -5,8 +5,10 @@ import type { JSONObject } from 'common/types';
|
||||
import {
|
||||
caModelChanged,
|
||||
heightChanged,
|
||||
ipaModelChanged,
|
||||
modelChanged,
|
||||
refinerModelChanged,
|
||||
rgIPAdapterModelChanged,
|
||||
vaeSelected,
|
||||
widthChanged,
|
||||
} from 'features/controlLayers/store/canvasV2Slice';
|
||||
@ -20,6 +22,9 @@ import type { Logger } from 'roarr';
|
||||
import { modelConfigsAdapterSelectors, modelsApi } from 'services/api/endpoints/models';
|
||||
import type { AnyModelConfig } from 'services/api/types';
|
||||
import {
|
||||
isControlNetOrT2IAdapterModelConfig,
|
||||
isIPAdapterModelConfig,
|
||||
isLoRAModelConfig,
|
||||
isNonRefinerMainModelConfig,
|
||||
isRefinerMainModelModelConfig,
|
||||
isSpandrelImageToImageModelConfig,
|
||||
@ -44,6 +49,7 @@ export const addModelsLoadedListener = (startAppListening: AppStartListening) =>
|
||||
handleLoRAModels(models, state, dispatch, log);
|
||||
handleControlAdapterModels(models, state, dispatch, log);
|
||||
handleSpandrelImageToImageModels(models, state, dispatch, log);
|
||||
handleIPAdapterModels(models, state, dispatch, log);
|
||||
},
|
||||
});
|
||||
};
|
||||
@ -75,7 +81,7 @@ const handleMainModels: ModelHandler = (models, state, dispatch, log) => {
|
||||
if (defaultModelInList) {
|
||||
const result = zParameterModel.safeParse(defaultModelInList);
|
||||
if (result.success) {
|
||||
dispatch(modelChanged({ model: defaultModelInList, previousModel: currentModel ?? undefined }));
|
||||
dispatch(modelChanged({ model: defaultModelInList, previousModel: currentModel }));
|
||||
|
||||
const optimalDimension = getOptimalDimension(defaultModelInList);
|
||||
if (getIsSizeOptimal(state.canvasV2.document.width, state.canvasV2.document.height, optimalDimension)) {
|
||||
@ -99,7 +105,7 @@ const handleMainModels: ModelHandler = (models, state, dispatch, log) => {
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch(modelChanged({ model: result.data, previousModel: currentModel ?? undefined }));
|
||||
dispatch(modelChanged({ model: result.data, previousModel: currentModel }));
|
||||
};
|
||||
|
||||
const handleRefinerModels: ModelHandler = (models, state, dispatch, _log) => {
|
||||
@ -156,30 +162,49 @@ const handleVAEModels: ModelHandler = (models, state, dispatch, log) => {
|
||||
|
||||
const handleLoRAModels: ModelHandler = (models, state, dispatch, _log) => {
|
||||
const loras = state.lora.loras;
|
||||
const loraModels = models.filter(isLoRAModelConfig);
|
||||
|
||||
forEach(loras, (lora, id) => {
|
||||
const isLoRAAvailable = models.some((m) => m.key === lora.model.key);
|
||||
|
||||
const isLoRAAvailable = loraModels.some((m) => m.key === lora.model.key);
|
||||
if (isLoRAAvailable) {
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch(loraRemoved(id));
|
||||
});
|
||||
};
|
||||
|
||||
const handleControlAdapterModels: ModelHandler = (models, state, dispatch, _log) => {
|
||||
const caModels = models.filter(isControlNetOrT2IAdapterModelConfig);
|
||||
state.canvasV2.controlAdapters.forEach((ca) => {
|
||||
const isModelAvailable = models.some((m) => m.key === ca.model?.key);
|
||||
|
||||
const isModelAvailable = caModels.some((m) => m.key === ca.model?.key);
|
||||
if (isModelAvailable) {
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch(caModelChanged({ id: ca.id, modelConfig: null }));
|
||||
});
|
||||
};
|
||||
|
||||
const handleIPAdapterModels: ModelHandler = (models, state, dispatch, _log) => {
|
||||
const ipaModels = models.filter(isIPAdapterModelConfig);
|
||||
state.canvasV2.controlAdapters.forEach(({ id, model }) => {
|
||||
const isModelAvailable = ipaModels.some((m) => m.key === model?.key);
|
||||
if (isModelAvailable) {
|
||||
return;
|
||||
}
|
||||
dispatch(ipaModelChanged({ id, modelConfig: null }));
|
||||
});
|
||||
|
||||
state.canvasV2.regions.forEach(({ id, ipAdapters }) => {
|
||||
ipAdapters.forEach(({ id: ipAdapterId, model }) => {
|
||||
const isModelAvailable = ipaModels.some((m) => m.key === model?.key);
|
||||
if (isModelAvailable) {
|
||||
return;
|
||||
}
|
||||
dispatch(rgIPAdapterModelChanged({ id, ipAdapterId, modelConfig: null }));
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const handleSpandrelImageToImageModels: ModelHandler = (models, state, dispatch, _log) => {
|
||||
const { upscaleModel: currentUpscaleModel, postProcessingModel: currentPostProcessingModel } = state.upscale;
|
||||
const upscaleModels = models.filter(isSpandrelImageToImageModelConfig);
|
||||
|
@ -1,14 +1,15 @@
|
||||
import type { AppStartListening } from 'app/store/middleware/listenerMiddleware';
|
||||
import { heightChanged, widthChanged } from 'features/controlLayers/store/canvasV2Slice';
|
||||
import { setDefaultSettings } from 'features/parameters/store/actions';
|
||||
import {
|
||||
heightChanged,
|
||||
setCfgRescaleMultiplier,
|
||||
setCfgScale,
|
||||
setScheduler,
|
||||
setSteps,
|
||||
vaePrecisionChanged,
|
||||
vaeSelected,
|
||||
} from 'features/canvas/store/canvasSlice';
|
||||
widthChanged,
|
||||
} from 'features/controlLayers/store/canvasV2Slice';
|
||||
import { setDefaultSettings } from 'features/parameters/store/actions';
|
||||
import {
|
||||
isParameterCFGRescaleMultiplier,
|
||||
isParameterCFGScale,
|
||||
|
@ -34,7 +34,7 @@ export const addInvocationCompleteEventListener = (startAppListening: AppStartLi
|
||||
// This complete event has an associated image output
|
||||
if (data.result.type === 'image_output' && !nodeTypeDenylist.includes(data.invocation.type)) {
|
||||
const { image_name } = data.result.image;
|
||||
const { canvas, gallery } = getState();
|
||||
const { canvasV2, gallery } = getState();
|
||||
|
||||
// This populates the `getImageDTO` cache
|
||||
const imageDTORequest = dispatch(
|
||||
@ -47,7 +47,9 @@ export const addInvocationCompleteEventListener = (startAppListening: AppStartLi
|
||||
imageDTORequest.unsubscribe();
|
||||
|
||||
// Add canvas images to the staging area
|
||||
if (canvas.batchIds.includes(data.batch_id) && data.invocation_source_id === CANVAS_OUTPUT) {
|
||||
// TODO(psyche): canvas batchid processing, [] -> canvas.batchIds
|
||||
// if (canvas.batchIds.includes(data.batch_id) && data.invocation_source_id === CANVAS_OUTPUT) {
|
||||
if ([].includes(data.batch_id) && data.invocation_source_id === CANVAS_OUTPUT) {
|
||||
dispatch(addImageToStagingArea(imageDTO));
|
||||
}
|
||||
|
||||
|
@ -47,7 +47,10 @@ export const paramsReducers = {
|
||||
setShouldRandomizeSeed: (state, action: PayloadAction<boolean>) => {
|
||||
state.params.shouldRandomizeSeed = action.payload;
|
||||
},
|
||||
modelChanged: (state, action: PayloadAction<{ model: ParameterModel | null; previousModel?: ParameterModel }>) => {
|
||||
modelChanged: (
|
||||
state,
|
||||
action: PayloadAction<{ model: ParameterModel | null; previousModel?: ParameterModel | null }>
|
||||
) => {
|
||||
const { model, previousModel } = action.payload;
|
||||
state.params.model = model;
|
||||
|
||||
|
@ -1,11 +1,6 @@
|
||||
import { skipToken } from '@reduxjs/toolkit/query';
|
||||
import { useAppDispatch, useAppSelector } from 'app/store/storeHooks';
|
||||
import { setInitialCanvasImage } from 'features/canvas/store/canvasSlice';
|
||||
import { iiLayerAdded } from 'features/controlLayers/store/canvasV2Slice';
|
||||
import { selectOptimalDimension } from 'features/controlLayers/store/selectors';
|
||||
import { parseAndRecallAllMetadata } from 'features/metadata/util/handlers';
|
||||
import { toast } from 'features/toast/toast';
|
||||
import { setActiveTab } from 'features/ui/store/uiSlice';
|
||||
import { t } from 'i18next';
|
||||
import { useCallback, useEffect } from 'react';
|
||||
import { useGetImageDTOQuery, useGetImageMetadataQuery } from 'services/api/endpoints/images';
|
||||
@ -14,31 +9,30 @@ export const usePreselectedImage = (selectedImage?: {
|
||||
imageName: string;
|
||||
action: 'sendToImg2Img' | 'sendToCanvas' | 'useAllParameters';
|
||||
}) => {
|
||||
const dispatch = useAppDispatch();
|
||||
const optimalDimension = useAppSelector(selectOptimalDimension);
|
||||
|
||||
const { currentData: selectedImageDto } = useGetImageDTOQuery(selectedImage?.imageName ?? skipToken);
|
||||
|
||||
const { currentData: selectedImageMetadata } = useGetImageMetadataQuery(selectedImage?.imageName ?? skipToken);
|
||||
|
||||
const handleSendToCanvas = useCallback(() => {
|
||||
if (selectedImageDto) {
|
||||
dispatch(setInitialCanvasImage(selectedImageDto, optimalDimension));
|
||||
dispatch(setActiveTab('canvas'));
|
||||
// TODO(psyche): handle send to canvas
|
||||
// dispatch(setInitialCanvasImage(selectedImageDto, optimalDimension));
|
||||
// dispatch(setActiveTab('canvas'));
|
||||
toast({
|
||||
id: 'SENT_TO_CANVAS',
|
||||
title: t('toast.sentToUnifiedCanvas'),
|
||||
status: 'info',
|
||||
});
|
||||
}
|
||||
}, [selectedImageDto, dispatch, optimalDimension]);
|
||||
}, [selectedImageDto]);
|
||||
|
||||
const handleSendToImg2Img = useCallback(() => {
|
||||
if (selectedImageDto) {
|
||||
dispatch(iiLayerAdded(selectedImageDto));
|
||||
dispatch(setActiveTab('generation'));
|
||||
// TODO(psyche): handle send to img2img
|
||||
// dispatch(iiLayerAdded(selectedImageDto));
|
||||
// dispatch(setActiveTab('generation'));
|
||||
}
|
||||
}, [dispatch, selectedImageDto]);
|
||||
}, [selectedImageDto]);
|
||||
|
||||
const handleUseAllMetadata = useCallback(() => {
|
||||
if (selectedImageMetadata) {
|
||||
|
@ -1,13 +1,9 @@
|
||||
import { createSelector } from '@reduxjs/toolkit';
|
||||
import { skipToken } from '@reduxjs/toolkit/query';
|
||||
import { useAppSelector } from 'app/store/storeHooks';
|
||||
import { selectGenerationSlice } from 'features/canvas/store/canvasSlice';
|
||||
import { useGetModelConfigQuery } from 'services/api/endpoints/models';
|
||||
|
||||
const selectModelKey = createSelector(selectGenerationSlice, (generation) => generation.model?.key);
|
||||
|
||||
export const useSelectedModelConfig = () => {
|
||||
const key = useAppSelector(selectModelKey);
|
||||
const key = useAppSelector((s) => s.canvasV2.params.model?.key);
|
||||
const { currentData: modelConfig } = useGetModelConfigQuery(key ?? skipToken);
|
||||
|
||||
return modelConfig;
|
||||
|
Loading…
Reference in New Issue
Block a user