From 9e3412d77671d7bc2781fb7b21116106985aa189 Mon Sep 17 00:00:00 2001 From: Mary Hipp Date: Thu, 18 Jul 2024 20:26:17 -0400 Subject: [PATCH] translations and lint fix --- invokeai/frontend/web/public/locales/en.json | 17 + .../middleware/listenerMiddleware/index.ts | 2 +- .../listeners/enqueueRequestedUpscale.ts | 4 +- .../listeners/imageUploaded.ts | 2 +- .../listeners/modelsLoaded.ts | 14 +- invokeai/frontend/web/src/app/store/store.ts | 4 +- .../src/common/hooks/useIsReadyToEnqueue.ts | 22 +- .../web/src/features/dnd/types/index.ts | 12 +- .../SingleSelectionMenuItems.tsx | 2 +- .../graph/buildMultidiffusionUpscaleGraph.ts | 377 +++++++++--------- .../features/nodes/util/graph/constants.ts | 4 +- .../components/Upscale/ParamCreativity.tsx | 4 +- .../components/Upscale/ParamSharpness.tsx | 4 +- .../components/Upscale/ParamSpandrelModel.tsx | 2 +- .../components/Upscale/ParamStructure.tsx | 4 +- .../Upscale/ParamTiledVAEToggle.tsx | 8 +- .../parameters/hooks/useIsAllowedToUpscale.ts | 1 - .../src/features/parameters/store/types.ts | 2 +- .../features/parameters/store/upscaleSlice.ts | 16 +- .../MultidiffusionWarning.tsx | 33 +- .../UpscaleSettingsAccordion.tsx | 2 +- .../UpscaleSizeDetails.tsx | 8 +- 22 files changed, 293 insertions(+), 251 deletions(-) diff --git a/invokeai/frontend/web/public/locales/en.json b/invokeai/frontend/web/public/locales/en.json index 91a36bcee3..1430c924d5 100644 --- a/invokeai/frontend/web/public/locales/en.json +++ b/invokeai/frontend/web/public/locales/en.json @@ -1027,6 +1027,7 @@ "imageActions": "Image Actions", "sendToImg2Img": "Send to Image to Image", "sendToUnifiedCanvas": "Send To Unified Canvas", + "sendToUpscale": "Send To Upscale", "showOptionsPanel": "Show Side Panel (O or T)", "shuffle": "Shuffle Seed", "steps": "Steps", @@ -1640,6 +1641,22 @@ "layers_one": "Layer", "layers_other": "Layers" }, + "upscaling": { + "creativity": "Creativity", + "currentImageSize": "Current Image Size", + "outputImageSize": "Output Image Size", + "sharpness": "Sharpness", + "structure": "Structure", + "tiledVAE": "Tiled VAE", + "toInstall": "to install", + "upscaleModel": "Upcale Model", + "visit": "Visit", + "warningNoMainModel": "a model", + "warningNoTile": "a {{base_model}} tile controlnet required by this feature", + "warningNoTileOrUpscaleModel": "an upscaler model and {{base_model}} tile controlnet required by this feature", + "warningNoUpscaleModel": "an upscaler model required by this feature", + "x": "x" + }, "ui": { "tabs": { "generation": "Generation", diff --git a/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/index.ts b/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/index.ts index e53baf5cbb..aad9a2a289 100644 --- a/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/index.ts +++ b/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/index.ts @@ -86,7 +86,7 @@ addGalleryOffsetChangedListener(startAppListening); addEnqueueRequestedCanvasListener(startAppListening); addEnqueueRequestedNodes(startAppListening); addEnqueueRequestedLinear(startAppListening); -addEnqueueRequestedUpscale(startAppListening) +addEnqueueRequestedUpscale(startAppListening); addAnyEnqueuedListener(startAppListening); addBatchEnqueuedListener(startAppListening); diff --git a/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/enqueueRequestedUpscale.ts b/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/enqueueRequestedUpscale.ts index 6c3831208d..f51f9e7564 100644 --- a/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/enqueueRequestedUpscale.ts +++ b/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/enqueueRequestedUpscale.ts @@ -10,13 +10,11 @@ export const addEnqueueRequestedUpscale = (startAppListening: AppStartListening) predicate: (action): action is ReturnType => enqueueRequested.match(action) && action.payload.tabName === 'upscaling', effect: async (action, { getState, dispatch }) => { - const state = getState(); const { shouldShowProgressInViewer } = state.ui; const { prepend } = action.payload; - - const graph = await buildMultidiffusionUpscsaleGraph(state) + const graph = await buildMultidiffusionUpscsaleGraph(state); const batchConfig = prepareLinearUIBatch(state, graph, prepend); diff --git a/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/imageUploaded.ts b/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/imageUploaded.ts index 8ffe0f2633..1aa47345e1 100644 --- a/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/imageUploaded.ts +++ b/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/imageUploaded.ts @@ -94,7 +94,7 @@ export const addImageUploadedFulfilledListener = (startAppListening: AppStartLis dispatch(upscaleInitialImageChanged(imageDTO)); toast({ ...DEFAULT_UPLOADED_TOAST, - description: "set as upscale initial image", + description: 'set as upscale initial image', }); return; } diff --git a/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/modelsLoaded.ts b/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/modelsLoaded.ts index fd9c80d563..2ace69c54e 100644 --- a/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/modelsLoaded.ts +++ b/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/modelsLoaded.ts @@ -18,7 +18,12 @@ import { forEach } from 'lodash-es'; import type { Logger } from 'roarr'; import { modelConfigsAdapterSelectors, modelsApi } from 'services/api/endpoints/models'; import type { AnyModelConfig } from 'services/api/types'; -import { isNonRefinerMainModelConfig, isRefinerMainModelModelConfig, isSpandrelImageToImageModelConfig, isVAEModelConfig } from 'services/api/types'; +import { + isNonRefinerMainModelConfig, + isRefinerMainModelModelConfig, + isSpandrelImageToImageModelConfig, + isVAEModelConfig, +} from 'services/api/types'; export const addModelsLoadedListener = (startAppListening: AppStartListening) => { startAppListening({ @@ -193,10 +198,9 @@ const handleSpandrelImageToImageModels: ModelHandler = (models, state, dispatch, const firstModel = upscaleModels[0]; if (firstModel) { - dispatch(upscaleModelChanged(firstModel)) - return + dispatch(upscaleModelChanged(firstModel)); + return; } - dispatch(upscaleModelChanged(null)) - + dispatch(upscaleModelChanged(null)); }; diff --git a/invokeai/frontend/web/src/app/store/store.ts b/invokeai/frontend/web/src/app/store/store.ts index d73e67d635..1a4093dfc5 100644 --- a/invokeai/frontend/web/src/app/store/store.ts +++ b/invokeai/frontend/web/src/app/store/store.ts @@ -70,7 +70,7 @@ const allReducers = { [controlLayersSlice.name]: undoable(controlLayersSlice.reducer, controlLayersUndoableConfig), [workflowSettingsSlice.name]: workflowSettingsSlice.reducer, [api.reducerPath]: api.reducer, - [upscaleSlice.name]: upscaleSlice.reducer + [upscaleSlice.name]: upscaleSlice.reducer, }; const rootReducer = combineReducers(allReducers); @@ -116,7 +116,7 @@ const persistConfigs: { [key in keyof typeof allReducers]?: PersistConfig } = { [hrfPersistConfig.name]: hrfPersistConfig, [controlLayersPersistConfig.name]: controlLayersPersistConfig, [workflowSettingsPersistConfig.name]: workflowSettingsPersistConfig, - [upscalePersistConfig.name]: upscalePersistConfig + [upscalePersistConfig.name]: upscalePersistConfig, }; const unserialize: UnserializeFunction = (data, key) => { diff --git a/invokeai/frontend/web/src/common/hooks/useIsReadyToEnqueue.ts b/invokeai/frontend/web/src/common/hooks/useIsReadyToEnqueue.ts index e61361e29a..9c465bb3cc 100644 --- a/invokeai/frontend/web/src/common/hooks/useIsReadyToEnqueue.ts +++ b/invokeai/frontend/web/src/common/hooks/useIsReadyToEnqueue.ts @@ -41,9 +41,19 @@ const createSelector = (templates: Templates) => selectDynamicPromptsSlice, selectControlLayersSlice, activeTabNameSelector, - selectUpscalelice + selectUpscalelice, ], - (controlAdapters, generation, system, nodes, workflowSettings, dynamicPrompts, controlLayers, activeTabName, upscale) => { + ( + controlAdapters, + generation, + system, + nodes, + workflowSettings, + dynamicPrompts, + controlLayers, + activeTabName, + upscale + ) => { const { model } = generation; const { size } = controlLayers.present; const { positivePrompt } = controlLayers.present; @@ -196,16 +206,16 @@ const createSelector = (templates: Templates) => reasons.push({ prefix, content }); } }); - } else if (activeTabName === "upscaling") { + } else if (activeTabName === 'upscaling') { if (!upscale.upscaleInitialImage) { - reasons.push({ content: "No Initial image" }) + reasons.push({ content: 'No Initial image' }); } if (!upscale.upscaleModel) { - reasons.push({ content: "No upscale model selected" }) + reasons.push({ content: 'No upscale model selected' }); } if (!upscale.tileControlnetModel) { - reasons.push({ content: "No valid tile controlnet available" }) + reasons.push({ content: 'No valid tile controlnet available' }); } } else { // Handling for all other tabs diff --git a/invokeai/frontend/web/src/features/dnd/types/index.ts b/invokeai/frontend/web/src/features/dnd/types/index.ts index 1e72123b75..c6b9f784f0 100644 --- a/invokeai/frontend/web/src/features/dnd/types/index.ts +++ b/invokeai/frontend/web/src/features/dnd/types/index.ts @@ -91,8 +91,6 @@ export type SelectForCompareDropData = BaseDropData & { }; }; - - export type TypesafeDroppableData = | CurrentImageDropData | ControlAdapterDropData @@ -166,11 +164,11 @@ interface DragEvent { over: TypesafeOver | null; } -export interface DragStartEvent extends Pick { } -interface DragMoveEvent extends DragEvent { } -interface DragOverEvent extends DragMoveEvent { } -export interface DragEndEvent extends DragEvent { } -interface DragCancelEvent extends DragEndEvent { } +export interface DragStartEvent extends Pick {} +interface DragMoveEvent extends DragEvent {} +interface DragOverEvent extends DragMoveEvent {} +export interface DragEndEvent extends DragEvent {} +interface DragCancelEvent extends DragEndEvent {} export interface DndContextTypesafeProps extends Omit { diff --git a/invokeai/frontend/web/src/features/gallery/components/ImageContextMenu/SingleSelectionMenuItems.tsx b/invokeai/frontend/web/src/features/gallery/components/ImageContextMenu/SingleSelectionMenuItems.tsx index d0084ab416..ab12684c11 100644 --- a/invokeai/frontend/web/src/features/gallery/components/ImageContextMenu/SingleSelectionMenuItems.tsx +++ b/invokeai/frontend/web/src/features/gallery/components/ImageContextMenu/SingleSelectionMenuItems.tsx @@ -192,7 +192,7 @@ const SingleSelectionMenuItems = (props: SingleSelectionMenuItemsProps) => { )} } onClickCapture={handleSendToUpscale} id="send-to-upscale"> - Send to upscale + {t('parameters.sendToUpscale')} } onClickCapture={handleChangeBoard}> diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/buildMultidiffusionUpscaleGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/buildMultidiffusionUpscaleGraph.ts index 839caf9095..77c757bb57 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/buildMultidiffusionUpscaleGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/buildMultidiffusionUpscaleGraph.ts @@ -4,223 +4,226 @@ import { Graph } from 'features/nodes/util/graph/generation/Graph'; import { isParamESRGANModelName } from 'features/parameters/store/postprocessingSlice'; import { assert } from 'tsafe'; -import { CLIP_SKIP, CONTROL_NET_COLLECT, ESRGAN, IMAGE_TO_LATENTS, LATENTS_TO_IMAGE, MAIN_MODEL_LOADER, NEGATIVE_CONDITIONING, NOISE, POSITIVE_CONDITIONING, RESIZE, SDXL_MODEL_LOADER, TILED_MULTI_DIFFUSION_DENOISE_LATENTS, UNSHARP_MASK, VAE_LOADER } from './constants'; +import { + CLIP_SKIP, + CONTROL_NET_COLLECT, + ESRGAN, + IMAGE_TO_LATENTS, + LATENTS_TO_IMAGE, + MAIN_MODEL_LOADER, + NEGATIVE_CONDITIONING, + NOISE, + POSITIVE_CONDITIONING, + RESIZE, + SDXL_MODEL_LOADER, + TILED_MULTI_DIFFUSION_DENOISE_LATENTS, + UNSHARP_MASK, + VAE_LOADER, +} from './constants'; import { addLoRAs } from './generation/addLoRAs'; import { addSDXLLoRas } from './generation/addSDXLLoRAs'; import { getBoardField, getSDXLStylePrompts } from './graphBuilderUtils'; - export const buildMultidiffusionUpscsaleGraph = async (state: RootState): Promise => { - const { - model, - cfgScale: cfg_scale, - scheduler, - steps, - vaePrecision, - seed, - vae, - } = state.generation; - const { positivePrompt, negativePrompt } = state.controlLayers.present; - const { upscaleModel, upscaleInitialImage, sharpness, structure, creativity, tiledVAE, scale, tileControlnetModel } = state.upscale; + const { model, cfgScale: cfg_scale, scheduler, steps, vaePrecision, seed, vae } = state.generation; + const { positivePrompt, negativePrompt } = state.controlLayers.present; + const { upscaleModel, upscaleInitialImage, sharpness, structure, creativity, tiledVAE, scale, tileControlnetModel } = + state.upscale; - assert(model, 'No model found in state'); - assert(upscaleModel, 'No upscale model found in state'); - assert(upscaleInitialImage, 'No initial image found in state'); - assert(isParamESRGANModelName(upscaleModel.name), "Model must be valid upscale model") - assert(scale, 'Scale is required') - assert(tileControlnetModel, "Tile controlnet is required") + assert(model, 'No model found in state'); + assert(upscaleModel, 'No upscale model found in state'); + assert(upscaleInitialImage, 'No initial image found in state'); + assert(isParamESRGANModelName(upscaleModel.name), 'Model must be valid upscale model'); + assert(scale, 'Scale is required'); + assert(tileControlnetModel, 'Tile controlnet is required'); - const g = new Graph() + const g = new Graph(); - const unsharpMaskNode1 = g.addNode({ - id: `${UNSHARP_MASK}_1`, - type: 'unsharp_mask', - image: upscaleInitialImage, - radius: 2, - strength: ((sharpness + 10) * 3.75) + 25 - }) + const unsharpMaskNode1 = g.addNode({ + id: `${UNSHARP_MASK}_1`, + type: 'unsharp_mask', + image: upscaleInitialImage, + radius: 2, + strength: (sharpness + 10) * 3.75 + 25, + }); - const upscaleNode = g.addNode({ - id: ESRGAN, - type: 'esrgan', - model_name: upscaleModel.name, - tile_size: 500 - }) + const upscaleNode = g.addNode({ + id: ESRGAN, + type: 'esrgan', + model_name: upscaleModel.name, + tile_size: 500, + }); - g.addEdge(unsharpMaskNode1, 'image', upscaleNode, 'image') + g.addEdge(unsharpMaskNode1, 'image', upscaleNode, 'image'); - const unsharpMaskNode2 = g.addNode({ - id: `${UNSHARP_MASK}_2`, - type: 'unsharp_mask', - radius: 2, - strength: 50 - }) + const unsharpMaskNode2 = g.addNode({ + id: `${UNSHARP_MASK}_2`, + type: 'unsharp_mask', + radius: 2, + strength: 50, + }); - g.addEdge(upscaleNode, 'image', unsharpMaskNode2, 'image',) + g.addEdge(upscaleNode, 'image', unsharpMaskNode2, 'image'); - const resizeNode = g.addNode({ - id: RESIZE, - type: 'img_resize', - width: ((upscaleInitialImage.width * scale) / 8) * 8, - height: ((upscaleInitialImage.height * scale) / 8) * 8, - resample_mode: "lanczos", - }) + const resizeNode = g.addNode({ + id: RESIZE, + type: 'img_resize', + width: ((upscaleInitialImage.width * scale) / 8) * 8, + height: ((upscaleInitialImage.height * scale) / 8) * 8, + resample_mode: 'lanczos', + }); - g.addEdge(unsharpMaskNode2, 'image', resizeNode, "image") + g.addEdge(unsharpMaskNode2, 'image', resizeNode, 'image'); - const noiseNode = g.addNode({ - id: NOISE, - type: "noise", - seed, - }) + const noiseNode = g.addNode({ + id: NOISE, + type: 'noise', + seed, + }); - g.addEdge(resizeNode, 'width', noiseNode, "width") - g.addEdge(resizeNode, 'height', noiseNode, "height") + g.addEdge(resizeNode, 'width', noiseNode, 'width'); + g.addEdge(resizeNode, 'height', noiseNode, 'height'); - const i2lNode = g.addNode({ - id: IMAGE_TO_LATENTS, - type: "i2l", - fp32: vaePrecision === "fp32", - tiled: tiledVAE - }) + const i2lNode = g.addNode({ + id: IMAGE_TO_LATENTS, + type: 'i2l', + fp32: vaePrecision === 'fp32', + tiled: tiledVAE, + }); - g.addEdge(resizeNode, 'image', i2lNode, "image") + g.addEdge(resizeNode, 'image', i2lNode, 'image'); - const l2iNode = g.addNode({ - type: "l2i", - id: LATENTS_TO_IMAGE, - fp32: vaePrecision === "fp32", - tiled: tiledVAE, - board: getBoardField(state), - is_intermediate: false, + const l2iNode = g.addNode({ + type: 'l2i', + id: LATENTS_TO_IMAGE, + fp32: vaePrecision === 'fp32', + tiled: tiledVAE, + board: getBoardField(state), + is_intermediate: false, + }); - }) + const tiledMultidiffusionNode = g.addNode({ + id: TILED_MULTI_DIFFUSION_DENOISE_LATENTS, + type: 'tiled_multi_diffusion_denoise_latents', + tile_height: 1024, // is this dependent on base model + tile_width: 1024, // is this dependent on base model + tile_overlap: 128, + steps, + cfg_scale, + scheduler, + denoising_start: ((creativity * -1 + 10) * 4.99) / 100, + denoising_end: 1, + }); - const tiledMultidiffusionNode = g.addNode({ - id: TILED_MULTI_DIFFUSION_DENOISE_LATENTS, - type: 'tiled_multi_diffusion_denoise_latents', - tile_height: 1024, // is this dependent on base model - tile_width: 1024, // is this dependent on base model - tile_overlap: 128, - steps, - cfg_scale, - scheduler, - denoising_start: (((creativity * -1) + 10) * 4.99) / 100, - denoising_end: 1 + let posCondNode; + let negCondNode; + let modelNode; + + if (model.base === 'sdxl') { + const { positiveStylePrompt, negativeStylePrompt } = getSDXLStylePrompts(state); + + posCondNode = g.addNode({ + type: 'sdxl_compel_prompt', + id: POSITIVE_CONDITIONING, + prompt: positivePrompt, + style: positiveStylePrompt, + }); + negCondNode = g.addNode({ + type: 'sdxl_compel_prompt', + id: NEGATIVE_CONDITIONING, + prompt: negativePrompt, + style: negativeStylePrompt, + }); + modelNode = g.addNode({ + type: 'sdxl_model_loader', + id: SDXL_MODEL_LOADER, + model, + }); + g.addEdge(modelNode, 'clip', posCondNode, 'clip'); + g.addEdge(modelNode, 'clip', negCondNode, 'clip'); + g.addEdge(modelNode, 'clip2', posCondNode, 'clip2'); + g.addEdge(modelNode, 'clip2', negCondNode, 'clip2'); + addSDXLLoRas(state, g, tiledMultidiffusionNode, modelNode, null, posCondNode, negCondNode); + } else { + posCondNode = g.addNode({ + type: 'compel', + id: POSITIVE_CONDITIONING, + prompt: positivePrompt, + }); + negCondNode = g.addNode({ + type: 'compel', + id: NEGATIVE_CONDITIONING, + prompt: negativePrompt, + }); + modelNode = g.addNode({ + type: 'main_model_loader', + id: MAIN_MODEL_LOADER, + model, + }); + const clipSkipNode = g.addNode({ + type: 'clip_skip', + id: CLIP_SKIP, }); - let posCondNode; let negCondNode; let modelNode; + g.addEdge(modelNode, 'clip', clipSkipNode, 'clip'); + g.addEdge(clipSkipNode, 'clip', posCondNode, 'clip'); + g.addEdge(clipSkipNode, 'clip', negCondNode, 'clip'); + addLoRAs(state, g, tiledMultidiffusionNode, modelNode, null, clipSkipNode, posCondNode, negCondNode); + } - if (model.base === "sdxl") { - const { positiveStylePrompt, negativeStylePrompt } = getSDXLStylePrompts(state); + let vaeNode; + if (vae) { + vaeNode = g.addNode({ + id: VAE_LOADER, + type: 'vae_loader', + vae_model: vae, + }); + } - posCondNode = g.addNode({ - type: 'sdxl_compel_prompt', - id: POSITIVE_CONDITIONING, - prompt: positivePrompt, - style: positiveStylePrompt - }); - negCondNode = g.addNode({ - type: 'sdxl_compel_prompt', - id: NEGATIVE_CONDITIONING, - prompt: negativePrompt, - style: negativeStylePrompt - }); - modelNode = g.addNode({ - type: 'sdxl_model_loader', - id: SDXL_MODEL_LOADER, - model, - }); - g.addEdge(modelNode, 'clip', posCondNode, 'clip'); - g.addEdge(modelNode, 'clip', negCondNode, 'clip'); - g.addEdge(modelNode, 'clip2', posCondNode, 'clip2'); - g.addEdge(modelNode, 'clip2', negCondNode, 'clip2'); - addSDXLLoRas(state, g, tiledMultidiffusionNode, modelNode, null, posCondNode, negCondNode); - } else { - posCondNode = g.addNode({ - type: 'compel', - id: POSITIVE_CONDITIONING, - prompt: positivePrompt, - }); - negCondNode = g.addNode({ - type: 'compel', - id: NEGATIVE_CONDITIONING, - prompt: negativePrompt, - }); - modelNode = g.addNode({ - type: 'main_model_loader', - id: MAIN_MODEL_LOADER, - model, - }); - const clipSkipNode = g.addNode({ - type: 'clip_skip', - id: CLIP_SKIP, - }); + g.addEdge(vaeNode || modelNode, 'vae', i2lNode, 'vae'); + g.addEdge(vaeNode || modelNode, 'vae', l2iNode, 'vae'); - g.addEdge(modelNode, 'clip', clipSkipNode, 'clip'); - g.addEdge(clipSkipNode, 'clip', posCondNode, 'clip'); - g.addEdge(clipSkipNode, 'clip', negCondNode, 'clip'); - addLoRAs(state, g, tiledMultidiffusionNode, modelNode, null, clipSkipNode, posCondNode, negCondNode); - } + g.addEdge(noiseNode, 'noise', tiledMultidiffusionNode, 'noise'); + g.addEdge(i2lNode, 'latents', tiledMultidiffusionNode, 'latents'); + g.addEdge(posCondNode, 'conditioning', tiledMultidiffusionNode, 'positive_conditioning'); + g.addEdge(negCondNode, 'conditioning', tiledMultidiffusionNode, 'negative_conditioning'); + g.addEdge(modelNode, 'unet', tiledMultidiffusionNode, 'unet'); + g.addEdge(tiledMultidiffusionNode, 'latents', l2iNode, 'latents'); + const controlnetNode1 = g.addNode({ + id: 'controlnet_1', + type: 'controlnet', + control_model: tileControlnetModel, + control_mode: 'balanced', + resize_mode: 'just_resize', + control_weight: ((structure + 10) * 0.025 + 0.3) * 0.013 + 0.35, + begin_step_percent: 0, + end_step_percent: (structure + 10) * 0.025 + 0.3, + }); - let vaeNode; - if (vae) { - vaeNode = g.addNode({ - id: VAE_LOADER, - type: "vae_loader", - vae_model: vae - }) - } + g.addEdge(resizeNode, 'image', controlnetNode1, 'image'); - g.addEdge(vaeNode || modelNode, "vae", i2lNode, "vae") - g.addEdge(vaeNode || modelNode, "vae", l2iNode, "vae") + const controlnetNode2 = g.addNode({ + id: 'controlnet_2', + type: 'controlnet', + control_model: tileControlnetModel, + control_mode: 'balanced', + resize_mode: 'just_resize', + control_weight: ((structure + 10) * 0.025 + 0.3) * 0.013, + begin_step_percent: (structure + 10) * 0.025 + 0.3, + end_step_percent: 0.8, + }); + g.addEdge(resizeNode, 'image', controlnetNode2, 'image'); - g.addEdge(noiseNode, "noise", tiledMultidiffusionNode, "noise") - g.addEdge(i2lNode, "latents", tiledMultidiffusionNode, "latents") - g.addEdge(posCondNode, 'conditioning', tiledMultidiffusionNode, 'positive_conditioning'); - g.addEdge(negCondNode, 'conditioning', tiledMultidiffusionNode, 'negative_conditioning'); - g.addEdge(modelNode, "unet", tiledMultidiffusionNode, "unet") - g.addEdge(tiledMultidiffusionNode, "latents", l2iNode, "latents") + const collectNode = g.addNode({ + id: CONTROL_NET_COLLECT, + type: 'collect', + }); + g.addEdge(controlnetNode1, 'control', collectNode, 'item'); + g.addEdge(controlnetNode2, 'control', collectNode, 'item'); + g.addEdge(collectNode, 'collection', tiledMultidiffusionNode, 'control'); - const controlnetNode1 = g.addNode({ - id: 'controlnet_1', - type: "controlnet", - control_model: tileControlnetModel, - control_mode: "balanced", - resize_mode: "just_resize", - control_weight: ((((structure + 10) * 0.025) + 0.3) * 0.013) + 0.35, - begin_step_percent: 0, - end_step_percent: ((structure + 10) * 0.025) + 0.3 - }) - - g.addEdge(resizeNode, "image", controlnetNode1, "image") - - const controlnetNode2 = g.addNode({ - id: "controlnet_2", - type: "controlnet", - control_model: tileControlnetModel, - control_mode: "balanced", - resize_mode: "just_resize", - control_weight: (((structure + 10) * 0.025) + 0.3) * 0.013, - begin_step_percent: ((structure + 10) * 0.025) + 0.3, - end_step_percent: 0.8 - }) - - g.addEdge(resizeNode, "image", controlnetNode2, "image") - - const collectNode = g.addNode({ - id: CONTROL_NET_COLLECT, - type: "collect", - }) - g.addEdge(controlnetNode1, "control", collectNode, "item") - g.addEdge(controlnetNode2, "control", collectNode, "item") - - g.addEdge(collectNode, "collection", tiledMultidiffusionNode, "control") - - - return g.getGraph(); - -} \ No newline at end of file + return g.getGraph(); +}; diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/constants.ts b/invokeai/frontend/web/src/features/nodes/util/graph/constants.ts index e7d62897ef..b08ed5365a 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/constants.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/constants.ts @@ -53,8 +53,8 @@ export const PROMPT_REGION_NEGATIVE_COND_PREFIX = 'prompt_region_negative_cond'; export const PROMPT_REGION_POSITIVE_COND_INVERTED_PREFIX = 'prompt_region_positive_cond_inverted'; export const POSITIVE_CONDITIONING_COLLECT = 'positive_conditioning_collect'; export const NEGATIVE_CONDITIONING_COLLECT = 'negative_conditioning_collect'; -export const UNSHARP_MASK = 'unsharp_mask' -export const TILED_MULTI_DIFFUSION_DENOISE_LATENTS = "tiled_multi_diffusion_denoise_latents" +export const UNSHARP_MASK = 'unsharp_mask'; +export const TILED_MULTI_DIFFUSION_DENOISE_LATENTS = 'tiled_multi_diffusion_denoise_latents'; // friendly graph ids export const CONTROL_LAYERS_GRAPH = 'control_layers_graph'; diff --git a/invokeai/frontend/web/src/features/parameters/components/Upscale/ParamCreativity.tsx b/invokeai/frontend/web/src/features/parameters/components/Upscale/ParamCreativity.tsx index 3600370fa0..71201912ce 100644 --- a/invokeai/frontend/web/src/features/parameters/components/Upscale/ParamCreativity.tsx +++ b/invokeai/frontend/web/src/features/parameters/components/Upscale/ParamCreativity.tsx @@ -1,8 +1,8 @@ import { CompositeNumberInput, CompositeSlider, FormControl, FormLabel } from '@invoke-ai/ui-library'; import { useAppDispatch, useAppSelector } from 'app/store/storeHooks'; +import { creativityChanged } from 'features/parameters/store/upscaleSlice'; import { memo, useCallback, useMemo } from 'react'; import { useTranslation } from 'react-i18next'; -import { creativityChanged } from '../../store/upscaleSlice'; const ParamCreativity = () => { const creativity = useAppSelector((s) => s.upscale.creativity); @@ -25,7 +25,7 @@ const ParamCreativity = () => { return ( - Creativity + {t('upscaling.creativity')} { const sharpness = useAppSelector((s) => s.upscale.sharpness); @@ -25,7 +25,7 @@ const ParamSharpness = () => { return ( - Sharpness + {t('upscaling.sharpness')} { return ( - Upscale Model + {t('upscaling.upscaleModel')} { const structure = useAppSelector((s) => s.upscale.structure); @@ -25,7 +25,7 @@ const ParamStructure = () => { return ( - Structure + {t('upscaling.structure')} { + const { t } = useTranslation(); const tiledVAE = useAppSelector((s) => s.upscale.tiledVAE); const dispatch = useAppDispatch(); @@ -16,7 +18,7 @@ export const ParamTiledVAEToggle = () => { return ( - Tiled VAE + {t('upscaling.tiledVAE')} ); }; diff --git a/invokeai/frontend/web/src/features/parameters/hooks/useIsAllowedToUpscale.ts b/invokeai/frontend/web/src/features/parameters/hooks/useIsAllowedToUpscale.ts index 7cae5ee630..95fc52df22 100644 --- a/invokeai/frontend/web/src/features/parameters/hooks/useIsAllowedToUpscale.ts +++ b/invokeai/frontend/web/src/features/parameters/hooks/useIsAllowedToUpscale.ts @@ -6,7 +6,6 @@ import { useMemo } from 'react'; import { useTranslation } from 'react-i18next'; import type { ImageDTO } from 'services/api/types'; - const getUpscaledPixels = (imageDTO?: ImageDTO, maxUpscalePixels?: number) => { if (!imageDTO) { return; diff --git a/invokeai/frontend/web/src/features/parameters/store/types.ts b/invokeai/frontend/web/src/features/parameters/store/types.ts index 86349019d8..51ab6146cf 100644 --- a/invokeai/frontend/web/src/features/parameters/store/types.ts +++ b/invokeai/frontend/web/src/features/parameters/store/types.ts @@ -10,7 +10,7 @@ import type { ParameterSeed, ParameterSteps, ParameterStrength, - ParameterVAEModel + ParameterVAEModel, } from 'features/parameters/types/parameterSchemas'; import type { RgbaColor } from 'react-colorful'; diff --git a/invokeai/frontend/web/src/features/parameters/store/upscaleSlice.ts b/invokeai/frontend/web/src/features/parameters/store/upscaleSlice.ts index 58f60290a4..d39ec24a3d 100644 --- a/invokeai/frontend/web/src/features/parameters/store/upscaleSlice.ts +++ b/invokeai/frontend/web/src/features/parameters/store/upscaleSlice.ts @@ -4,7 +4,6 @@ import type { PersistConfig, RootState } from 'app/store/store'; import type { ParameterSpandrelImageToImageModel } from 'features/parameters/types/parameterSchemas'; import type { ControlNetModelConfig, ImageDTO } from 'services/api/types'; - interface UpscaleState { _version: 1; upscaleModel: ParameterSpandrelImageToImageModel | null; @@ -14,7 +13,7 @@ interface UpscaleState { creativity: number; tiledVAE: boolean; scale: number | null; - tileControlnetModel: ControlNetModelConfig | null + tileControlnetModel: ControlNetModelConfig | null; } const initialUpscaleState: UpscaleState = { @@ -26,7 +25,7 @@ const initialUpscaleState: UpscaleState = { creativity: 0, tiledVAE: false, scale: null, - tileControlnetModel: null + tileControlnetModel: null, }; export const upscaleSlice = createSlice({ @@ -68,7 +67,16 @@ export const upscaleSlice = createSlice({ }, }); -export const { upscaleModelChanged, upscaleInitialImageChanged, tiledVAEChanged, structureChanged, creativityChanged, sharpnessChanged, scaleChanged, tileControlnetModelChanged } = upscaleSlice.actions; +export const { + upscaleModelChanged, + upscaleInitialImageChanged, + tiledVAEChanged, + structureChanged, + creativityChanged, + sharpnessChanged, + scaleChanged, + tileControlnetModelChanged, +} = upscaleSlice.actions; export const selectUpscalelice = (state: RootState) => state.upscale; diff --git a/invokeai/frontend/web/src/features/settingsAccordions/components/UpscaleSettingsAccordion/MultidiffusionWarning.tsx b/invokeai/frontend/web/src/features/settingsAccordions/components/UpscaleSettingsAccordion/MultidiffusionWarning.tsx index 99b8037b27..84b8432aec 100644 --- a/invokeai/frontend/web/src/features/settingsAccordions/components/UpscaleSettingsAccordion/MultidiffusionWarning.tsx +++ b/invokeai/frontend/web/src/features/settingsAccordions/components/UpscaleSettingsAccordion/MultidiffusionWarning.tsx @@ -1,12 +1,14 @@ import { Flex, Link, Text } from '@invoke-ai/ui-library'; -import { useAppDispatch, useAppSelector } from '../../../../app/store/storeHooks'; -import { useControlNetModels } from '../../../../services/api/hooks/modelsByType'; +import { useAppDispatch, useAppSelector } from 'app/store/storeHooks'; +import { tileControlnetModelChanged } from 'features/parameters/store/upscaleSlice'; +import { MODEL_TYPE_SHORT_MAP } from 'features/parameters/types/constants'; +import { setActiveTab } from 'features/ui/store/uiSlice'; import { useCallback, useEffect, useMemo } from 'react'; -import { tileControlnetModelChanged } from '../../../parameters/store/upscaleSlice'; -import { MODEL_TYPE_SHORT_MAP } from '../../../parameters/types/constants'; -import { setActiveTab } from '../../../ui/store/uiSlice'; +import { useTranslation } from 'react-i18next'; +import { useControlNetModels } from 'services/api/hooks/modelsByType'; export const MultidiffusionWarning = () => { + const { t } = useTranslation(); const model = useAppSelector((s) => s.generation.model); const { tileControlnetModel, upscaleModel } = useAppSelector((s) => s.upscale); const dispatch = useAppDispatch(); @@ -23,19 +25,18 @@ export const MultidiffusionWarning = () => { const warningText = useMemo(() => { if (!model) { - return `a model`; + return t('upscaling.warningNoMainModel'); } - if (!upscaleModel && !tileControlnetModel) { - return `an upscaler model and ${MODEL_TYPE_SHORT_MAP[model.base]} tile controlnet`; + return t('upscaling.warningNoTileOrUpscaleModel', { base_model: MODEL_TYPE_SHORT_MAP[model.base] }); } if (!upscaleModel) { - return 'an upscaler model'; + return t('upscaling.warningNoUpscaleModel'); } if (!tileControlnetModel) { - return `a ${MODEL_TYPE_SHORT_MAP[model.base]} tile controlnet`; + return t('upscaling.warningNoTile', { base_model: MODEL_TYPE_SHORT_MAP[model.base] }); } - }, [model?.base, upscaleModel, tileControlnetModel]); + }, [model, upscaleModel, tileControlnetModel, t]); const handleGoToModelManager = useCallback(() => { dispatch(setActiveTab('models')); @@ -46,13 +47,13 @@ export const MultidiffusionWarning = () => { } return ( - - - Visit{' '} + + + {t('upscaling.visit')}{' '} - Model Manager + {t('modelManager.modelManager')} {' '} - to install {warningText} required by this feature + {t('upscaling.toInstall')} {warningText}. ); diff --git a/invokeai/frontend/web/src/features/settingsAccordions/components/UpscaleSettingsAccordion/UpscaleSettingsAccordion.tsx b/invokeai/frontend/web/src/features/settingsAccordions/components/UpscaleSettingsAccordion/UpscaleSettingsAccordion.tsx index 263cb71d32..eff7c4b3b2 100644 --- a/invokeai/frontend/web/src/features/settingsAccordions/components/UpscaleSettingsAccordion/UpscaleSettingsAccordion.tsx +++ b/invokeai/frontend/web/src/features/settingsAccordions/components/UpscaleSettingsAccordion/UpscaleSettingsAccordion.tsx @@ -12,9 +12,9 @@ import { useStandaloneAccordionToggle } from 'features/settingsAccordions/hooks/ import { memo } from 'react'; import { useTranslation } from 'react-i18next'; +import { MultidiffusionWarning } from './MultidiffusionWarning'; import { UpscaleInitialImage } from './UpscaleInitialImage'; import { UpscaleSizeDetails } from './UpscaleSizeDetails'; -import { MultidiffusionWarning } from './MultidiffusionWarning'; const selector = createMemoizedSelector([selectUpscalelice], (upscale) => { const badges: string[] = []; diff --git a/invokeai/frontend/web/src/features/settingsAccordions/components/UpscaleSettingsAccordion/UpscaleSizeDetails.tsx b/invokeai/frontend/web/src/features/settingsAccordions/components/UpscaleSettingsAccordion/UpscaleSizeDetails.tsx index f963dad4af..a6f156ec84 100644 --- a/invokeai/frontend/web/src/features/settingsAccordions/components/UpscaleSettingsAccordion/UpscaleSizeDetails.tsx +++ b/invokeai/frontend/web/src/features/settingsAccordions/components/UpscaleSettingsAccordion/UpscaleSizeDetails.tsx @@ -1,15 +1,17 @@ import { Flex, Text } from '@invoke-ai/ui-library'; import { useAppSelector } from 'app/store/storeHooks'; import { useMemo } from 'react'; +import { useTranslation } from 'react-i18next'; export const UpscaleSizeDetails = () => { + const { t } = useTranslation(); const { upscaleInitialImage, scale } = useAppSelector((s) => s.upscale); const outputSizeText = useMemo(() => { if (upscaleInitialImage && scale) { - return `Output image size: ${upscaleInitialImage.width * scale} x ${upscaleInitialImage.height * scale}`; + return `${t('upscaling.outputImageSize')}: ${upscaleInitialImage.width * scale} ${t('upscaling.x')} ${upscaleInitialImage.height * scale}`; } - }, [upscaleInitialImage, scale]); + }, [upscaleInitialImage, scale, t]); if (!outputSizeText || !upscaleInitialImage) { return <>; @@ -18,7 +20,7 @@ export const UpscaleSizeDetails = () => { return ( - Current image size: {upscaleInitialImage.width} x {upscaleInitialImage.height} + {t('upscaling.currentImageSize')}: {upscaleInitialImage.width} {t('upscaling.x')} {upscaleInitialImage.height} {outputSizeText}