From 2cde8a643e4b96455b4f656af12c82073c2866e3 Mon Sep 17 00:00:00 2001 From: psychedelicious <4822129+psychedelicious@users.noreply.github.com> Date: Thu, 2 May 2024 16:06:06 +1000 Subject: [PATCH] tidy(ui): suffix a control adapter types/objects with V2 Prevent mixing the old and new implementations up --- .../CALayer/CALayerControlAdapterWrapper.tsx | 4 +- .../ControlAndIPAdapter/ControlAdapter.tsx | 10 +-- .../ControlAdapterControlModeSelect.tsx | 10 +-- .../ControlAdapterImagePreview.tsx | 4 +- .../ControlAdapterProcessorTypeSelect.tsx | 4 +- .../ControlAndIPAdapter/IPAdapter.tsx | 8 +-- .../ControlAndIPAdapter/IPAdapterMethod.tsx | 12 ++-- .../IPAdapterModelSelect.tsx | 10 +-- .../processors/CannyProcessor.tsx | 2 +- .../processors/ColorMapProcessor.tsx | 2 +- .../IPALayer/IPALayerIPAdapterWrapper.tsx | 6 +- .../RGLayer/RGLayerIPAdapterWrapper.tsx | 6 +- .../controlLayers/hooks/addLayerHooks.ts | 4 +- .../controlLayers/store/controlLayersSlice.ts | 38 +++++----- .../src/features/controlLayers/store/types.ts | 12 ++-- .../util/controlAdapters.test.ts | 16 ++--- .../controlLayers/util/controlAdapters.ts | 72 +++++++++---------- .../util/graph/addControlLayersToGraph.ts | 28 ++++---- 18 files changed, 126 insertions(+), 122 deletions(-) diff --git a/invokeai/frontend/web/src/features/controlLayers/components/CALayer/CALayerControlAdapterWrapper.tsx b/invokeai/frontend/web/src/features/controlLayers/components/CALayer/CALayerControlAdapterWrapper.tsx index 6793a33f69..8ff1f9711f 100644 --- a/invokeai/frontend/web/src/features/controlLayers/components/CALayer/CALayerControlAdapterWrapper.tsx +++ b/invokeai/frontend/web/src/features/controlLayers/components/CALayer/CALayerControlAdapterWrapper.tsx @@ -9,7 +9,7 @@ import { caOrIPALayerWeightChanged, selectCALayerOrThrow, } from 'features/controlLayers/store/controlLayersSlice'; -import type { ControlMode, ProcessorConfig } from 'features/controlLayers/util/controlAdapters'; +import type { ControlModeV2, ProcessorConfig } from 'features/controlLayers/util/controlAdapters'; import type { CALayerImageDropData } from 'features/dnd/types'; import { memo, useCallback, useMemo } from 'react'; import type { @@ -40,7 +40,7 @@ export const CALayerControlAdapterWrapper = memo(({ layerId }: Props) => { ); const onChangeControlMode = useCallback( - (controlMode: ControlMode) => { + (controlMode: ControlModeV2) => { dispatch( caLayerControlModeChanged({ layerId, diff --git a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapter.tsx b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapter.tsx index 087f634d73..c28c40ecc1 100644 --- a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapter.tsx +++ b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapter.tsx @@ -1,10 +1,10 @@ import { Box, Divider, Flex, Icon, IconButton } from '@invoke-ai/ui-library'; import { ControlAdapterModelCombobox } from 'features/controlLayers/components/ControlAndIPAdapter/ControlAdapterModelCombobox'; import type { - ControlMode, - ControlNetConfig, + ControlModeV2, + ControlNetConfigV2, ProcessorConfig, - T2IAdapterConfig, + T2IAdapterConfigV2, } from 'features/controlLayers/util/controlAdapters'; import type { TypesafeDroppableData } from 'features/dnd/types'; import { memo } from 'react'; @@ -21,9 +21,9 @@ import { ControlAdapterProcessorTypeSelect } from './ControlAdapterProcessorType import { ControlAdapterWeight } from './ControlAdapterWeight'; type Props = { - controlAdapter: ControlNetConfig | T2IAdapterConfig; + controlAdapter: ControlNetConfigV2 | T2IAdapterConfigV2; onChangeBeginEndStepPct: (beginEndStepPct: [number, number]) => void; - onChangeControlMode: (controlMode: ControlMode) => void; + onChangeControlMode: (controlMode: ControlModeV2) => void; onChangeWeight: (weight: number) => void; onChangeProcessorConfig: (processorConfig: ProcessorConfig | null) => void; onChangeModel: (modelConfig: ControlNetModelConfig | T2IAdapterModelConfig) => void; diff --git a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapterControlModeSelect.tsx b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapterControlModeSelect.tsx index 34f4c85467..2c35ce51b6 100644 --- a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapterControlModeSelect.tsx +++ b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapterControlModeSelect.tsx @@ -1,15 +1,15 @@ import type { ComboboxOnChange } from '@invoke-ai/ui-library'; import { Combobox, FormControl, FormLabel } from '@invoke-ai/ui-library'; import { InformationalPopover } from 'common/components/InformationalPopover/InformationalPopover'; -import type { ControlMode } from 'features/controlLayers/util/controlAdapters'; -import { isControlMode } from 'features/controlLayers/util/controlAdapters'; +import type { ControlModeV2 } from 'features/controlLayers/util/controlAdapters'; +import { isControlModeV2 } from 'features/controlLayers/util/controlAdapters'; import { memo, useCallback, useMemo } from 'react'; import { useTranslation } from 'react-i18next'; import { assert } from 'tsafe'; type Props = { - controlMode: ControlMode; - onChange: (controlMode: ControlMode) => void; + controlMode: ControlModeV2; + onChange: (controlMode: ControlModeV2) => void; }; export const ControlAdapterControlModeSelect = memo(({ controlMode, onChange }: Props) => { @@ -26,7 +26,7 @@ export const ControlAdapterControlModeSelect = memo(({ controlMode, onChange }: const handleControlModeChange = useCallback( (v) => { - assert(isControlMode(v?.value)); + assert(isControlModeV2(v?.value)); onChange(v.value); }, [onChange] diff --git a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapterImagePreview.tsx b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapterImagePreview.tsx index 7def6b2b56..675118c534 100644 --- a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapterImagePreview.tsx +++ b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapterImagePreview.tsx @@ -6,7 +6,7 @@ import IAIDndImage from 'common/components/IAIDndImage'; import IAIDndImageIcon from 'common/components/IAIDndImageIcon'; import { setBoundingBoxDimensions } from 'features/canvas/store/canvasSlice'; import { heightChanged, widthChanged } from 'features/controlLayers/store/controlLayersSlice'; -import type { ControlNetConfig, T2IAdapterConfig } from 'features/controlLayers/util/controlAdapters'; +import type { ControlNetConfigV2, T2IAdapterConfigV2 } from 'features/controlLayers/util/controlAdapters'; import type { ImageDraggableData, TypesafeDroppableData } from 'features/dnd/types'; import { calculateNewSize } from 'features/parameters/components/ImageSize/calculateNewSize'; import { selectOptimalDimension } from 'features/parameters/store/generationSlice'; @@ -23,7 +23,7 @@ import { import type { ImageDTO, PostUploadAction } from 'services/api/types'; type Props = { - controlAdapter: ControlNetConfig | T2IAdapterConfig; + controlAdapter: ControlNetConfigV2 | T2IAdapterConfigV2; onChangeImage: (imageDTO: ImageDTO | null) => void; droppableData: TypesafeDroppableData; postUploadAction: PostUploadAction; diff --git a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapterProcessorTypeSelect.tsx b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapterProcessorTypeSelect.tsx index 1d14d8606f..5598b81787 100644 --- a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapterProcessorTypeSelect.tsx +++ b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/ControlAdapterProcessorTypeSelect.tsx @@ -4,7 +4,7 @@ import { createMemoizedSelector } from 'app/store/createMemoizedSelector'; import { useAppSelector } from 'app/store/storeHooks'; import { InformationalPopover } from 'common/components/InformationalPopover/InformationalPopover'; import type { ProcessorConfig } from 'features/controlLayers/util/controlAdapters'; -import { CA_PROCESSOR_DATA, isProcessorType } from 'features/controlLayers/util/controlAdapters'; +import { CA_PROCESSOR_DATA, isProcessorTypeV2 } from 'features/controlLayers/util/controlAdapters'; import { configSelector } from 'features/system/store/configSelectors'; import { includes, map } from 'lodash-es'; import { memo, useCallback, useMemo } from 'react'; @@ -36,7 +36,7 @@ export const ControlAdapterProcessorTypeSelect = memo(({ config, onChange }: Pro if (!v) { onChange(null); } else { - assert(isProcessorType(v.value)); + assert(isProcessorTypeV2(v.value)); onChange(CA_PROCESSOR_DATA[v.value].buildDefaults()); } }, diff --git a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/IPAdapter.tsx b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/IPAdapter.tsx index a0aa7d79a1..86ed77ce36 100644 --- a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/IPAdapter.tsx +++ b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/IPAdapter.tsx @@ -4,18 +4,18 @@ import { ControlAdapterWeight } from 'features/controlLayers/components/ControlA import { IPAdapterImagePreview } from 'features/controlLayers/components/ControlAndIPAdapter/IPAdapterImagePreview'; import { IPAdapterMethod } from 'features/controlLayers/components/ControlAndIPAdapter/IPAdapterMethod'; import { IPAdapterModelSelect } from 'features/controlLayers/components/ControlAndIPAdapter/IPAdapterModelSelect'; -import type { CLIPVisionModel, IPAdapterConfig, IPMethod } from 'features/controlLayers/util/controlAdapters'; +import type { CLIPVisionModelV2, IPAdapterConfigV2, IPMethodV2 } from 'features/controlLayers/util/controlAdapters'; import type { TypesafeDroppableData } from 'features/dnd/types'; import { memo } from 'react'; import type { ImageDTO, IPAdapterModelConfig, PostUploadAction } from 'services/api/types'; type Props = { - ipAdapter: IPAdapterConfig; + ipAdapter: IPAdapterConfigV2; onChangeBeginEndStepPct: (beginEndStepPct: [number, number]) => void; onChangeWeight: (weight: number) => void; - onChangeIPMethod: (method: IPMethod) => void; + onChangeIPMethod: (method: IPMethodV2) => void; onChangeModel: (modelConfig: IPAdapterModelConfig) => void; - onChangeCLIPVisionModel: (clipVisionModel: CLIPVisionModel) => void; + onChangeCLIPVisionModel: (clipVisionModel: CLIPVisionModelV2) => void; onChangeImage: (imageDTO: ImageDTO | null) => void; droppableData: TypesafeDroppableData; postUploadAction: PostUploadAction; diff --git a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/IPAdapterMethod.tsx b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/IPAdapterMethod.tsx index 70fd63f9c0..4f6a468fc3 100644 --- a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/IPAdapterMethod.tsx +++ b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/IPAdapterMethod.tsx @@ -1,20 +1,20 @@ import type { ComboboxOnChange } from '@invoke-ai/ui-library'; import { Combobox, FormControl, FormLabel } from '@invoke-ai/ui-library'; import { InformationalPopover } from 'common/components/InformationalPopover/InformationalPopover'; -import type { IPMethod } from 'features/controlLayers/util/controlAdapters'; -import { isIPMethod } from 'features/controlLayers/util/controlAdapters'; +import type { IPMethodV2 } from 'features/controlLayers/util/controlAdapters'; +import { isIPMethodV2 } from 'features/controlLayers/util/controlAdapters'; import { memo, useCallback, useMemo } from 'react'; import { useTranslation } from 'react-i18next'; import { assert } from 'tsafe'; type Props = { - method: IPMethod; - onChange: (method: IPMethod) => void; + method: IPMethodV2; + onChange: (method: IPMethodV2) => void; }; export const IPAdapterMethod = memo(({ method, onChange }: Props) => { const { t } = useTranslation(); - const options: { label: string; value: IPMethod }[] = useMemo( + const options: { label: string; value: IPMethodV2 }[] = useMemo( () => [ { label: t('controlnet.full'), value: 'full' }, { label: `${t('controlnet.style')} (${t('common.beta')})`, value: 'style' }, @@ -24,7 +24,7 @@ export const IPAdapterMethod = memo(({ method, onChange }: Props) => { ); const _onChange = useCallback( (v) => { - assert(isIPMethod(v?.value)); + assert(isIPMethodV2(v?.value)); onChange(v.value); }, [onChange] diff --git a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/IPAdapterModelSelect.tsx b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/IPAdapterModelSelect.tsx index e47bcd5182..b0541dca2c 100644 --- a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/IPAdapterModelSelect.tsx +++ b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/IPAdapterModelSelect.tsx @@ -2,8 +2,8 @@ import type { ComboboxOnChange } from '@invoke-ai/ui-library'; import { Combobox, Flex, FormControl, Tooltip } from '@invoke-ai/ui-library'; import { useAppSelector } from 'app/store/storeHooks'; import { useGroupedModelCombobox } from 'common/hooks/useGroupedModelCombobox'; -import type { CLIPVisionModel } from 'features/controlLayers/util/controlAdapters'; -import { isCLIPVisionModel } from 'features/controlLayers/util/controlAdapters'; +import type { CLIPVisionModelV2 } from 'features/controlLayers/util/controlAdapters'; +import { isCLIPVisionModelV2 } from 'features/controlLayers/util/controlAdapters'; import { memo, useCallback, useMemo } from 'react'; import { useTranslation } from 'react-i18next'; import { useIPAdapterModels } from 'services/api/hooks/modelsByType'; @@ -18,8 +18,8 @@ const CLIP_VISION_OPTIONS = [ type Props = { modelKey: string | null; onChangeModel: (modelConfig: IPAdapterModelConfig) => void; - clipVisionModel: CLIPVisionModel; - onChangeCLIPVisionModel: (clipVisionModel: CLIPVisionModel) => void; + clipVisionModel: CLIPVisionModelV2; + onChangeCLIPVisionModel: (clipVisionModel: CLIPVisionModelV2) => void; }; export const IPAdapterModelSelect = memo( @@ -41,7 +41,7 @@ export const IPAdapterModelSelect = memo( const _onChangeCLIPVisionModel = useCallback( (v) => { - assert(isCLIPVisionModel(v?.value)); + assert(isCLIPVisionModelV2(v?.value)); onChangeCLIPVisionModel(v.value); }, [onChangeCLIPVisionModel] diff --git a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/processors/CannyProcessor.tsx b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/processors/CannyProcessor.tsx index cc3e9ba996..ef6e4160d6 100644 --- a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/processors/CannyProcessor.tsx +++ b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/processors/CannyProcessor.tsx @@ -1,6 +1,6 @@ import { CompositeNumberInput, CompositeSlider, FormControl, FormLabel } from '@invoke-ai/ui-library'; import type { ProcessorComponentProps } from 'features/controlLayers/components/ControlAndIPAdapter/processors/types'; -import { type CannyProcessorConfig, CA_PROCESSOR_DATA } from 'features/controlLayers/util/controlAdapters'; +import { CA_PROCESSOR_DATA, type CannyProcessorConfig } from 'features/controlLayers/util/controlAdapters'; import { useCallback } from 'react'; import { useTranslation } from 'react-i18next'; diff --git a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/processors/ColorMapProcessor.tsx b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/processors/ColorMapProcessor.tsx index eda9af47a5..6faa00dd14 100644 --- a/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/processors/ColorMapProcessor.tsx +++ b/invokeai/frontend/web/src/features/controlLayers/components/ControlAndIPAdapter/processors/ColorMapProcessor.tsx @@ -1,6 +1,6 @@ import { CompositeNumberInput, CompositeSlider, FormControl, FormLabel } from '@invoke-ai/ui-library'; import type { ProcessorComponentProps } from 'features/controlLayers/components/ControlAndIPAdapter/processors/types'; -import { type ColorMapProcessorConfig, CA_PROCESSOR_DATA } from 'features/controlLayers/util/controlAdapters'; +import { CA_PROCESSOR_DATA, type ColorMapProcessorConfig } from 'features/controlLayers/util/controlAdapters'; import { memo, useCallback } from 'react'; import { useTranslation } from 'react-i18next'; diff --git a/invokeai/frontend/web/src/features/controlLayers/components/IPALayer/IPALayerIPAdapterWrapper.tsx b/invokeai/frontend/web/src/features/controlLayers/components/IPALayer/IPALayerIPAdapterWrapper.tsx index b8dfae6c03..9f99710dac 100644 --- a/invokeai/frontend/web/src/features/controlLayers/components/IPALayer/IPALayerIPAdapterWrapper.tsx +++ b/invokeai/frontend/web/src/features/controlLayers/components/IPALayer/IPALayerIPAdapterWrapper.tsx @@ -9,7 +9,7 @@ import { ipaLayerModelChanged, selectIPALayerOrThrow, } from 'features/controlLayers/store/controlLayersSlice'; -import type { CLIPVisionModel, IPMethod } from 'features/controlLayers/util/controlAdapters'; +import type { CLIPVisionModelV2, IPMethodV2 } from 'features/controlLayers/util/controlAdapters'; import type { IPALayerImageDropData } from 'features/dnd/types'; import { memo, useCallback, useMemo } from 'react'; import type { ImageDTO, IPAdapterModelConfig, IPALayerImagePostUploadAction } from 'services/api/types'; @@ -42,7 +42,7 @@ export const IPALayerIPAdapterWrapper = memo(({ layerId }: Props) => { ); const onChangeIPMethod = useCallback( - (method: IPMethod) => { + (method: IPMethodV2) => { dispatch(ipaLayerMethodChanged({ layerId, method })); }, [dispatch, layerId] @@ -56,7 +56,7 @@ export const IPALayerIPAdapterWrapper = memo(({ layerId }: Props) => { ); const onChangeCLIPVisionModel = useCallback( - (clipVisionModel: CLIPVisionModel) => { + (clipVisionModel: CLIPVisionModelV2) => { dispatch(ipaLayerCLIPVisionModelChanged({ layerId, clipVisionModel })); }, [dispatch, layerId] diff --git a/invokeai/frontend/web/src/features/controlLayers/components/RGLayer/RGLayerIPAdapterWrapper.tsx b/invokeai/frontend/web/src/features/controlLayers/components/RGLayer/RGLayerIPAdapterWrapper.tsx index 015cf75e4d..f7be62eb0a 100644 --- a/invokeai/frontend/web/src/features/controlLayers/components/RGLayer/RGLayerIPAdapterWrapper.tsx +++ b/invokeai/frontend/web/src/features/controlLayers/components/RGLayer/RGLayerIPAdapterWrapper.tsx @@ -11,7 +11,7 @@ import { rgLayerIPAdapterWeightChanged, selectRGLayerIPAdapterOrThrow, } from 'features/controlLayers/store/controlLayersSlice'; -import type { CLIPVisionModel, IPMethod } from 'features/controlLayers/util/controlAdapters'; +import type { CLIPVisionModelV2, IPMethodV2 } from 'features/controlLayers/util/controlAdapters'; import type { RGLayerIPAdapterImageDropData } from 'features/dnd/types'; import { memo, useCallback, useMemo } from 'react'; import { PiTrashSimpleBold } from 'react-icons/pi'; @@ -51,7 +51,7 @@ export const RGLayerIPAdapterWrapper = memo(({ layerId, ipAdapterId, ipAdapterNu ); const onChangeIPMethod = useCallback( - (method: IPMethod) => { + (method: IPMethodV2) => { dispatch(rgLayerIPAdapterMethodChanged({ layerId, ipAdapterId, method })); }, [dispatch, ipAdapterId, layerId] @@ -65,7 +65,7 @@ export const RGLayerIPAdapterWrapper = memo(({ layerId, ipAdapterId, ipAdapterNu ); const onChangeCLIPVisionModel = useCallback( - (clipVisionModel: CLIPVisionModel) => { + (clipVisionModel: CLIPVisionModelV2) => { dispatch(rgLayerIPAdapterCLIPVisionModelChanged({ layerId, ipAdapterId, clipVisionModel })); }, [dispatch, ipAdapterId, layerId] diff --git a/invokeai/frontend/web/src/features/controlLayers/hooks/addLayerHooks.ts b/invokeai/frontend/web/src/features/controlLayers/hooks/addLayerHooks.ts index 75ffd81202..7a4e7ebc09 100644 --- a/invokeai/frontend/web/src/features/controlLayers/hooks/addLayerHooks.ts +++ b/invokeai/frontend/web/src/features/controlLayers/hooks/addLayerHooks.ts @@ -5,7 +5,7 @@ import { buildIPAdapter, buildT2IAdapter, CA_PROCESSOR_DATA, - isProcessorType, + isProcessorTypeV2, } from 'features/controlLayers/util/controlAdapters'; import { zModelIdentifierField } from 'features/nodes/types/common'; import { useCallback, useMemo } from 'react'; @@ -30,7 +30,7 @@ export const useAddCALayer = () => { const id = uuidv4(); const defaultPreprocessor = model.default_settings?.preprocessor; - const processorConfig = isProcessorType(defaultPreprocessor) + const processorConfig = isProcessorTypeV2(defaultPreprocessor) ? CA_PROCESSOR_DATA[defaultPreprocessor].buildDefaults(baseModel) : null; diff --git a/invokeai/frontend/web/src/features/controlLayers/store/controlLayersSlice.ts b/invokeai/frontend/web/src/features/controlLayers/store/controlLayersSlice.ts index c27a98c826..5f70cbf4a9 100644 --- a/invokeai/frontend/web/src/features/controlLayers/store/controlLayersSlice.ts +++ b/invokeai/frontend/web/src/features/controlLayers/store/controlLayersSlice.ts @@ -4,16 +4,16 @@ import type { PersistConfig, RootState } from 'app/store/store'; import { moveBackward, moveForward, moveToBack, moveToFront } from 'common/util/arrayUtils'; import { deepClone } from 'common/util/deepClone'; import type { - CLIPVisionModel, - ControlMode, - ControlNetConfig, - IPAdapterConfig, - IPMethod, + CLIPVisionModelV2, + ControlModeV2, + ControlNetConfigV2, + IPAdapterConfigV2, + IPMethodV2, ProcessorConfig, - T2IAdapterConfig, + T2IAdapterConfigV2, } from 'features/controlLayers/util/controlAdapters'; import { - buildControlAdapterProcessor, + buildControlAdapterProcessorV2, controlNetToT2IAdapter, imageDTOToImageWithDims, t2iAdapterToControlNet, @@ -110,7 +110,7 @@ export const selectRGLayerIPAdapterOrThrow = ( state: ControlLayersState, layerId: string, ipAdapterId: string -): IPAdapterConfig => { +): IPAdapterConfigV2 => { const layer = state.layers.find((l) => l.id === layerId); assert(isRegionalGuidanceLayer(layer)); const ipAdapter = layer.ipAdapters.find((ipAdapter) => ipAdapter.id === ipAdapterId); @@ -221,7 +221,7 @@ export const controlLayersSlice = createSlice({ caLayerAdded: { reducer: ( state, - action: PayloadAction<{ layerId: string; controlAdapter: ControlNetConfig | T2IAdapterConfig }> + action: PayloadAction<{ layerId: string; controlAdapter: ControlNetConfigV2 | T2IAdapterConfigV2 }> ) => { const { layerId, controlAdapter } = action.payload; const layer: ControlAdapterLayer = { @@ -245,7 +245,7 @@ export const controlLayersSlice = createSlice({ } } }, - prepare: (controlAdapter: ControlNetConfig | T2IAdapterConfig) => ({ + prepare: (controlAdapter: ControlNetConfigV2 | T2IAdapterConfigV2) => ({ payload: { layerId: uuidv4(), controlAdapter }, }), }, @@ -297,7 +297,7 @@ export const controlLayersSlice = createSlice({ layer.controlAdapter = controlNetToT2IAdapter(layer.controlAdapter); } - const candidateProcessorConfig = buildControlAdapterProcessor(modelConfig); + const candidateProcessorConfig = buildControlAdapterProcessorV2(modelConfig); if (candidateProcessorConfig?.type !== layer.controlAdapter.processorConfig?.type) { // The processor has changed. For example, the previous model was a Canny model and the new model is a Depth // model. We need to use the new processor. @@ -305,7 +305,7 @@ export const controlLayersSlice = createSlice({ layer.controlAdapter.processorConfig = candidateProcessorConfig; } }, - caLayerControlModeChanged: (state, action: PayloadAction<{ layerId: string; controlMode: ControlMode }>) => { + caLayerControlModeChanged: (state, action: PayloadAction<{ layerId: string; controlMode: ControlModeV2 }>) => { const { layerId, controlMode } = action.payload; const layer = selectCALayerOrThrow(state, layerId); assert(layer.controlAdapter.type === 'controlnet'); @@ -344,7 +344,7 @@ export const controlLayersSlice = createSlice({ //#region IP Adapter Layers ipaLayerAdded: { - reducer: (state, action: PayloadAction<{ layerId: string; ipAdapter: IPAdapterConfig }>) => { + reducer: (state, action: PayloadAction<{ layerId: string; ipAdapter: IPAdapterConfigV2 }>) => { const { layerId, ipAdapter } = action.payload; const layer: IPAdapterLayer = { id: getIPALayerId(layerId), @@ -354,14 +354,14 @@ export const controlLayersSlice = createSlice({ }; state.layers.push(layer); }, - prepare: (ipAdapter: IPAdapterConfig) => ({ payload: { layerId: uuidv4(), ipAdapter } }), + prepare: (ipAdapter: IPAdapterConfigV2) => ({ payload: { layerId: uuidv4(), ipAdapter } }), }, ipaLayerImageChanged: (state, action: PayloadAction<{ layerId: string; imageDTO: ImageDTO | null }>) => { const { layerId, imageDTO } = action.payload; const layer = selectIPALayerOrThrow(state, layerId); layer.ipAdapter.image = imageDTO ? imageDTOToImageWithDims(imageDTO) : null; }, - ipaLayerMethodChanged: (state, action: PayloadAction<{ layerId: string; method: IPMethod }>) => { + ipaLayerMethodChanged: (state, action: PayloadAction<{ layerId: string; method: IPMethodV2 }>) => { const { layerId, method } = action.payload; const layer = selectIPALayerOrThrow(state, layerId); layer.ipAdapter.method = method; @@ -383,7 +383,7 @@ export const controlLayersSlice = createSlice({ }, ipaLayerCLIPVisionModelChanged: ( state, - action: PayloadAction<{ layerId: string; clipVisionModel: CLIPVisionModel }> + action: PayloadAction<{ layerId: string; clipVisionModel: CLIPVisionModelV2 }> ) => { const { layerId, clipVisionModel } = action.payload; const layer = selectIPALayerOrThrow(state, layerId); @@ -533,7 +533,7 @@ export const controlLayersSlice = createSlice({ const layer = selectRGLayerOrThrow(state, layerId); layer.autoNegative = autoNegative; }, - rgLayerIPAdapterAdded: (state, action: PayloadAction<{ layerId: string; ipAdapter: IPAdapterConfig }>) => { + rgLayerIPAdapterAdded: (state, action: PayloadAction<{ layerId: string; ipAdapter: IPAdapterConfigV2 }>) => { const { layerId, ipAdapter } = action.payload; const layer = selectRGLayerOrThrow(state, layerId); layer.ipAdapters.push(ipAdapter); @@ -569,7 +569,7 @@ export const controlLayersSlice = createSlice({ }, rgLayerIPAdapterMethodChanged: ( state, - action: PayloadAction<{ layerId: string; ipAdapterId: string; method: IPMethod }> + action: PayloadAction<{ layerId: string; ipAdapterId: string; method: IPMethodV2 }> ) => { const { layerId, ipAdapterId, method } = action.payload; const ipAdapter = selectRGLayerIPAdapterOrThrow(state, layerId, ipAdapterId); @@ -593,7 +593,7 @@ export const controlLayersSlice = createSlice({ }, rgLayerIPAdapterCLIPVisionModelChanged: ( state, - action: PayloadAction<{ layerId: string; ipAdapterId: string; clipVisionModel: CLIPVisionModel }> + action: PayloadAction<{ layerId: string; ipAdapterId: string; clipVisionModel: CLIPVisionModelV2 }> ) => { const { layerId, ipAdapterId, clipVisionModel } = action.payload; const ipAdapter = selectRGLayerIPAdapterOrThrow(state, layerId, ipAdapterId); diff --git a/invokeai/frontend/web/src/features/controlLayers/store/types.ts b/invokeai/frontend/web/src/features/controlLayers/store/types.ts index a4d88f3a0a..cbf47ff3ad 100644 --- a/invokeai/frontend/web/src/features/controlLayers/store/types.ts +++ b/invokeai/frontend/web/src/features/controlLayers/store/types.ts @@ -1,4 +1,8 @@ -import type { ControlNetConfig, IPAdapterConfig, T2IAdapterConfig } from 'features/controlLayers/util/controlAdapters'; +import type { + ControlNetConfigV2, + IPAdapterConfigV2, + T2IAdapterConfigV2, +} from 'features/controlLayers/util/controlAdapters'; import type { AspectRatioState } from 'features/parameters/components/ImageSize/types'; import type { ParameterAutoNegative, @@ -50,12 +54,12 @@ export type ControlAdapterLayer = RenderableLayerBase & { type: 'control_adapter_layer'; // technically, also t2i adapter layer opacity: number; isFilterEnabled: boolean; - controlAdapter: ControlNetConfig | T2IAdapterConfig; + controlAdapter: ControlNetConfigV2 | T2IAdapterConfigV2; }; export type IPAdapterLayer = LayerBase & { type: 'ip_adapter_layer'; - ipAdapter: IPAdapterConfig; + ipAdapter: IPAdapterConfigV2; }; export type RegionalGuidanceLayer = RenderableLayerBase & { @@ -63,7 +67,7 @@ export type RegionalGuidanceLayer = RenderableLayerBase & { maskObjects: (VectorMaskLine | VectorMaskRect)[]; positivePrompt: ParameterPositivePrompt | null; negativePrompt: ParameterNegativePrompt | null; // Up to one text prompt per mask - ipAdapters: IPAdapterConfig[]; // Any number of image prompts + ipAdapters: IPAdapterConfigV2[]; // Any number of image prompts previewColor: RgbColor; autoNegative: ParameterAutoNegative; needsPixelBbox: boolean; // Needs the slower pixel-based bbox calculation - set to true when an there is an eraser object diff --git a/invokeai/frontend/web/src/features/controlLayers/util/controlAdapters.test.ts b/invokeai/frontend/web/src/features/controlLayers/util/controlAdapters.test.ts index 656b759faa..880514bf7c 100644 --- a/invokeai/frontend/web/src/features/controlLayers/util/controlAdapters.test.ts +++ b/invokeai/frontend/web/src/features/controlLayers/util/controlAdapters.test.ts @@ -4,20 +4,20 @@ import { assert } from 'tsafe'; import { describe, test } from 'vitest'; import type { - CLIPVisionModel, - ControlMode, + CLIPVisionModelV2, + ControlModeV2, DepthAnythingModelSize, - IPMethod, + IPMethodV2, ProcessorConfig, - ProcessorType, + ProcessorTypeV2, } from './controlAdapters'; describe('Control Adapter Types', () => { - test('ProcessorType', () => assert>()); - test('IP Adapter Method', () => assert, IPMethod>>()); + test('ProcessorType', () => assert>()); + test('IP Adapter Method', () => assert, IPMethodV2>>()); test('CLIP Vision Model', () => - assert, CLIPVisionModel>>()); - test('Control Mode', () => assert, ControlMode>>()); + assert, CLIPVisionModelV2>>()); + test('Control Mode', () => assert, ControlModeV2>>()); test('DepthAnything Model Size', () => assert, DepthAnythingModelSize>>()); }); diff --git a/invokeai/frontend/web/src/features/controlLayers/util/controlAdapters.ts b/invokeai/frontend/web/src/features/controlLayers/util/controlAdapters.ts index afc168d749..360cfcabc6 100644 --- a/invokeai/frontend/web/src/features/controlLayers/util/controlAdapters.ts +++ b/invokeai/frontend/web/src/features/controlLayers/util/controlAdapters.ts @@ -94,45 +94,45 @@ type ControlAdapterBase = { beginEndStepPct: [number, number]; }; -const zControlMode = z.enum(['balanced', 'more_prompt', 'more_control', 'unbalanced']); -export type ControlMode = z.infer; -export const isControlMode = (v: unknown): v is ControlMode => zControlMode.safeParse(v).success; +const zControlModeV2 = z.enum(['balanced', 'more_prompt', 'more_control', 'unbalanced']); +export type ControlModeV2 = z.infer; +export const isControlModeV2 = (v: unknown): v is ControlModeV2 => zControlModeV2.safeParse(v).success; -export type ControlNetConfig = ControlAdapterBase & { +export type ControlNetConfigV2 = ControlAdapterBase & { type: 'controlnet'; model: ParameterControlNetModel | null; - controlMode: ControlMode; + controlMode: ControlModeV2; }; -export const isControlNetConfig = (ca: ControlNetConfig | T2IAdapterConfig): ca is ControlNetConfig => +export const isControlNetConfigV2 = (ca: ControlNetConfigV2 | T2IAdapterConfigV2): ca is ControlNetConfigV2 => ca.type === 'controlnet'; -export type T2IAdapterConfig = ControlAdapterBase & { +export type T2IAdapterConfigV2 = ControlAdapterBase & { type: 't2i_adapter'; model: ParameterT2IAdapterModel | null; }; -export const isT2IAdapterConfig = (ca: ControlNetConfig | T2IAdapterConfig): ca is T2IAdapterConfig => +export const isT2IAdapterConfigV2 = (ca: ControlNetConfigV2 | T2IAdapterConfigV2): ca is T2IAdapterConfigV2 => ca.type === 't2i_adapter'; -const zCLIPVisionModel = z.enum(['ViT-H', 'ViT-G']); -export type CLIPVisionModel = z.infer; -export const isCLIPVisionModel = (v: unknown): v is CLIPVisionModel => zCLIPVisionModel.safeParse(v).success; +const zCLIPVisionModelV2 = z.enum(['ViT-H', 'ViT-G']); +export type CLIPVisionModelV2 = z.infer; +export const isCLIPVisionModelV2 = (v: unknown): v is CLIPVisionModelV2 => zCLIPVisionModelV2.safeParse(v).success; -const zIPMethod = z.enum(['full', 'style', 'composition']); -export type IPMethod = z.infer; -export const isIPMethod = (v: unknown): v is IPMethod => zIPMethod.safeParse(v).success; +const zIPMethodV2 = z.enum(['full', 'style', 'composition']); +export type IPMethodV2 = z.infer; +export const isIPMethodV2 = (v: unknown): v is IPMethodV2 => zIPMethodV2.safeParse(v).success; -export type IPAdapterConfig = { +export type IPAdapterConfigV2 = { id: string; type: 'ip_adapter'; weight: number; - method: IPMethod; + method: IPMethodV2; image: ImageWithDims | null; model: ParameterIPAdapterModel | null; - clipVisionModel: CLIPVisionModel; + clipVisionModel: CLIPVisionModelV2; beginEndStepPct: [number, number]; }; -const zProcessorType = z.enum([ +const zProcessorTypeV2 = z.enum([ 'canny_image_processor', 'color_map_image_processor', 'content_shuffle_image_processor', @@ -148,10 +148,10 @@ const zProcessorType = z.enum([ 'pidi_image_processor', 'zoe_depth_image_processor', ]); -export type ProcessorType = z.infer; -export const isProcessorType = (v: unknown): v is ProcessorType => zProcessorType.safeParse(v).success; +export type ProcessorTypeV2 = z.infer; +export const isProcessorTypeV2 = (v: unknown): v is ProcessorTypeV2 => zProcessorTypeV2.safeParse(v).success; -type ProcessorData = { +type ProcessorData = { type: T; labelTKey: string; descriptionTKey: string; @@ -165,7 +165,7 @@ type ProcessorData = { const minDim = (image: ImageWithDims): number => Math.min(image.width, image.height); type CAProcessorsData = { - [key in ProcessorType]: ProcessorData; + [key in ProcessorTypeV2]: ProcessorData; }; /** * A dict of ControlNet processors, including: @@ -405,7 +405,7 @@ export const CA_PROCESSOR_DATA: CAProcessorsData = { }, }; -const initialControlNet: Omit = { +const initialControlNetV2: Omit = { type: 'controlnet', model: null, weight: 1, @@ -417,7 +417,7 @@ const initialControlNet: Omit = { processorConfig: CA_PROCESSOR_DATA.canny_image_processor.buildDefaults(), }; -const initialT2IAdapter: Omit = { +const initialT2IAdapterV2: Omit = { type: 't2i_adapter', model: null, weight: 1, @@ -428,7 +428,7 @@ const initialT2IAdapter: Omit = { processorConfig: CA_PROCESSOR_DATA.canny_image_processor.buildDefaults(), }; -const initialIPAdapter: Omit = { +const initialIPAdapterV2: Omit = { type: 'ip_adapter', image: null, model: null, @@ -438,23 +438,23 @@ const initialIPAdapter: Omit = { weight: 1, }; -export const buildControlNet = (id: string, overrides?: Partial): ControlNetConfig => { - return merge(deepClone(initialControlNet), { id, ...overrides }); +export const buildControlNet = (id: string, overrides?: Partial): ControlNetConfigV2 => { + return merge(deepClone(initialControlNetV2), { id, ...overrides }); }; -export const buildT2IAdapter = (id: string, overrides?: Partial): T2IAdapterConfig => { - return merge(deepClone(initialT2IAdapter), { id, ...overrides }); +export const buildT2IAdapter = (id: string, overrides?: Partial): T2IAdapterConfigV2 => { + return merge(deepClone(initialT2IAdapterV2), { id, ...overrides }); }; -export const buildIPAdapter = (id: string, overrides?: Partial): IPAdapterConfig => { - return merge(deepClone(initialIPAdapter), { id, ...overrides }); +export const buildIPAdapter = (id: string, overrides?: Partial): IPAdapterConfigV2 => { + return merge(deepClone(initialIPAdapterV2), { id, ...overrides }); }; -export const buildControlAdapterProcessor = ( +export const buildControlAdapterProcessorV2 = ( modelConfig: ControlNetModelConfig | T2IAdapterModelConfig ): ProcessorConfig | null => { const defaultPreprocessor = modelConfig.default_settings?.preprocessor; - if (!isProcessorType(defaultPreprocessor)) { + if (!isProcessorTypeV2(defaultPreprocessor)) { return null; } const processorConfig = CA_PROCESSOR_DATA[defaultPreprocessor].buildDefaults(modelConfig.base); @@ -467,15 +467,15 @@ export const imageDTOToImageWithDims = ({ image_name, width, height }: ImageDTO) height, }); -export const t2iAdapterToControlNet = (t2iAdapter: T2IAdapterConfig): ControlNetConfig => { +export const t2iAdapterToControlNet = (t2iAdapter: T2IAdapterConfigV2): ControlNetConfigV2 => { return { ...deepClone(t2iAdapter), type: 'controlnet', - controlMode: initialControlNet.controlMode, + controlMode: initialControlNetV2.controlMode, }; }; -export const controlNetToT2IAdapter = (controlNet: ControlNetConfig): T2IAdapterConfig => { +export const controlNetToT2IAdapter = (controlNet: ControlNetConfigV2): T2IAdapterConfigV2 => { return { ...omit(deepClone(controlNet), 'controlMode'), type: 't2i_adapter', diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/addControlLayersToGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/addControlLayersToGraph.ts index 4581b51ee1..da13fed9f5 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/addControlLayersToGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/addControlLayersToGraph.ts @@ -6,13 +6,13 @@ import { isRegionalGuidanceLayer, } from 'features/controlLayers/store/controlLayersSlice'; import { - type ControlNetConfig, + type ControlNetConfigV2, type ImageWithDims, - type IPAdapterConfig, - isControlNetConfig, - isT2IAdapterConfig, + type IPAdapterConfigV2, + isControlNetConfigV2, + isT2IAdapterConfigV2, type ProcessorConfig, - type T2IAdapterConfig, + type T2IAdapterConfigV2, } from 'features/controlLayers/util/controlAdapters'; import { getRegionalPromptLayerBlobs } from 'features/controlLayers/util/getLayerBlobs'; import type { ImageField } from 'features/nodes/types/common'; @@ -64,7 +64,7 @@ const buildControlImage = ( assert(false, 'Attempted to add unprocessed control image'); }; -const buildControlNetMetadata = (controlNet: ControlNetConfig): S['ControlNetMetadataField'] => { +const buildControlNetMetadata = (controlNet: ControlNetConfigV2): S['ControlNetMetadataField'] => { const { beginEndStepPct, controlMode, image, model, processedImage, processorConfig, weight } = controlNet; assert(model, 'ControlNet model is required'); @@ -113,7 +113,7 @@ const addControlNetCollectorSafe = (graph: NonNullableGraph, denoiseNodeId: stri }; const addGlobalControlNetsToGraph = async ( - controlNets: ControlNetConfig[], + controlNets: ControlNetConfigV2[], graph: NonNullableGraph, denoiseNodeId: string ) => { @@ -157,7 +157,7 @@ const addGlobalControlNetsToGraph = async ( upsertMetadata(graph, { controlnets: controlNetMetadata }); }; -const buildT2IAdapterMetadata = (t2iAdapter: T2IAdapterConfig): S['T2IAdapterMetadataField'] => { +const buildT2IAdapterMetadata = (t2iAdapter: T2IAdapterConfigV2): S['T2IAdapterMetadataField'] => { const { beginEndStepPct, image, model, processedImage, processorConfig, weight } = t2iAdapter; assert(model, 'T2I Adapter model is required'); @@ -205,7 +205,7 @@ const addT2IAdapterCollectorSafe = (graph: NonNullableGraph, denoiseNodeId: stri }; const addGlobalT2IAdaptersToGraph = async ( - t2iAdapters: T2IAdapterConfig[], + t2iAdapters: T2IAdapterConfigV2[], graph: NonNullableGraph, denoiseNodeId: string ) => { @@ -249,7 +249,7 @@ const addGlobalT2IAdaptersToGraph = async ( upsertMetadata(graph, { t2iAdapters: t2iAdapterMetadata }); }; -const buildIPAdapterMetadata = (ipAdapter: IPAdapterConfig): S['IPAdapterMetadataField'] => { +const buildIPAdapterMetadata = (ipAdapter: IPAdapterConfigV2): S['IPAdapterMetadataField'] => { const { weight, model, clipVisionModel, method, beginEndStepPct, image } = ipAdapter; assert(model, 'IP Adapter model is required'); @@ -290,7 +290,7 @@ const addIPAdapterCollectorSafe = (graph: NonNullableGraph, denoiseNodeId: strin }; const addGlobalIPAdaptersToGraph = async ( - ipAdapters: IPAdapterConfig[], + ipAdapters: IPAdapterConfigV2[], graph: NonNullableGraph, denoiseNodeId: string ) => { @@ -351,7 +351,7 @@ export const addControlLayersToGraph = async (state: RootState, graph: NonNullab // We want the CAs themselves .map((l) => l.controlAdapter) // Must be a ControlNet - .filter(isControlNetConfig) + .filter(isControlNetConfigV2) .filter((ca) => { const hasModel = Boolean(ca.model); const modelMatchesBase = ca.model?.base === mainModel.base; @@ -368,7 +368,7 @@ export const addControlLayersToGraph = async (state: RootState, graph: NonNullab // We want the CAs themselves .map((l) => l.controlAdapter) // Must have a ControlNet CA - .filter(isT2IAdapterConfig) + .filter(isT2IAdapterConfigV2) .filter((ca) => { const hasModel = Boolean(ca.model); const modelMatchesBase = ca.model?.base === mainModel.base; @@ -633,7 +633,7 @@ export const addControlLayersToGraph = async (state: RootState, graph: NonNullab } // TODO(psyche): For some reason, I have to explicitly annotate regionalIPAdapters here. Not sure why. - const regionalIPAdapters: IPAdapterConfig[] = layer.ipAdapters.filter((ipAdapter) => { + const regionalIPAdapters: IPAdapterConfigV2[] = layer.ipAdapters.filter((ipAdapter) => { const hasModel = Boolean(ipAdapter.model); const modelMatchesBase = ipAdapter.model?.base === mainModel.base; const hasControlImage = Boolean(ipAdapter.image);