cleanup: Remove Openpose Image Processor

This commit is contained in:
blessedcoolant 2024-02-11 13:19:09 +05:30 committed by Kent Keirsey
parent f8e566d62a
commit 50b93992cf
9 changed files with 21 additions and 235 deletions

View File

@ -81,7 +81,7 @@ their descriptions.
| ONNX Text to Latents | Generates latents from conditionings. | | ONNX Text to Latents | Generates latents from conditionings. |
| ONNX Model Loader | Loads a main model, outputting its submodels. | | ONNX Model Loader | Loads a main model, outputting its submodels. |
| OpenCV Inpaint | Simple inpaint using opencv. | | OpenCV Inpaint | Simple inpaint using opencv. |
| Openpose Processor | Applies Openpose processing to image | | DWPose Processor | Applies Openpose processing to image |
| PIDI Processor | Applies PIDI processing to image | | PIDI Processor | Applies PIDI processing to image |
| Prompts from File | Loads prompts from a text file | | Prompts from File | Loads prompts from a text file |
| Random Integer | Outputs a single random integer. | | Random Integer | Outputs a single random integer. |

View File

@ -17,7 +17,6 @@ from controlnet_aux import (
MidasDetector, MidasDetector,
MLSDdetector, MLSDdetector,
NormalBaeDetector, NormalBaeDetector,
OpenposeDetector,
PidiNetDetector, PidiNetDetector,
SamDetector, SamDetector,
ZoeDetector, ZoeDetector,
@ -277,31 +276,6 @@ class LineartAnimeImageProcessorInvocation(ImageProcessorInvocation):
return processed_image return processed_image
@invocation(
"openpose_image_processor",
title="Openpose Processor",
tags=["controlnet", "openpose", "pose"],
category="controlnet",
version="1.2.0",
)
class OpenposeImageProcessorInvocation(ImageProcessorInvocation):
"""Applies Openpose processing to image"""
hand_and_face: bool = InputField(default=False, description="Whether to use hands and face mode")
detect_resolution: int = InputField(default=512, ge=0, description=FieldDescriptions.detect_res)
image_resolution: int = InputField(default=512, ge=0, description=FieldDescriptions.image_res)
def run_processor(self, image):
openpose_processor = OpenposeDetector.from_pretrained(pretrained_model_or_path="lllyasviel/Annotators")
processed_image = openpose_processor(
image,
detect_resolution=self.detect_resolution,
image_resolution=self.image_resolution,
hand_and_face=self.hand_and_face,
)
return processed_image
@invocation( @invocation(
"midas_depth_image_processor", "midas_depth_image_processor",
title="Midas Depth Processor", title="Midas Depth Processor",

View File

@ -264,8 +264,6 @@
"noneDescription": "No processing applied", "noneDescription": "No processing applied",
"normalBae": "Normal BAE", "normalBae": "Normal BAE",
"normalBaeDescription": "Normal BAE processing", "normalBaeDescription": "Normal BAE processing",
"openPose": "Openpose",
"openPoseDescription": "Human pose estimation using Openpose",
"dwPose": "DWPose", "dwPose": "DWPose",
"dwPoseDescription": "Human pose estimation using DWPose", "dwPoseDescription": "Human pose estimation using DWPose",
"pidi": "PIDI", "pidi": "PIDI",

View File

@ -14,7 +14,6 @@ import MediapipeFaceProcessor from './processors/MediapipeFaceProcessor';
import MidasDepthProcessor from './processors/MidasDepthProcessor'; import MidasDepthProcessor from './processors/MidasDepthProcessor';
import MlsdImageProcessor from './processors/MlsdImageProcessor'; import MlsdImageProcessor from './processors/MlsdImageProcessor';
import NormalBaeProcessor from './processors/NormalBaeProcessor'; import NormalBaeProcessor from './processors/NormalBaeProcessor';
import OpenposeProcessor from './processors/OpenposeProcessor';
import PidiProcessor from './processors/PidiProcessor'; import PidiProcessor from './processors/PidiProcessor';
import ZoeDepthProcessor from './processors/ZoeDepthProcessor'; import ZoeDepthProcessor from './processors/ZoeDepthProcessor';
@ -74,10 +73,6 @@ const ControlAdapterProcessorComponent = ({ id }: Props) => {
return <NormalBaeProcessor controlNetId={id} processorNode={processorNode} isEnabled={isEnabled} />; return <NormalBaeProcessor controlNetId={id} processorNode={processorNode} isEnabled={isEnabled} />;
} }
if (processorNode.type === 'openpose_image_processor') {
return <OpenposeProcessor controlNetId={id} processorNode={processorNode} isEnabled={isEnabled} />;
}
if (processorNode.type === 'dwpose_image_processor') { if (processorNode.type === 'dwpose_image_processor') {
return <DWPoseProcessor controlNetId={id} processorNode={processorNode} isEnabled={isEnabled} />; return <DWPoseProcessor controlNetId={id} processorNode={processorNode} isEnabled={isEnabled} />;
} }

View File

@ -1,92 +0,0 @@
import { CompositeNumberInput, CompositeSlider, FormControl, FormLabel, Switch } from '@invoke-ai/ui-library';
import { useProcessorNodeChanged } from 'features/controlAdapters/components/hooks/useProcessorNodeChanged';
import { CONTROLNET_PROCESSORS } from 'features/controlAdapters/store/constants';
import type { RequiredOpenposeImageProcessorInvocation } from 'features/controlAdapters/store/types';
import type { ChangeEvent } from 'react';
import { memo, useCallback } from 'react';
import { useTranslation } from 'react-i18next';
import ProcessorWrapper from './common/ProcessorWrapper';
const DEFAULTS = CONTROLNET_PROCESSORS.openpose_image_processor.default as RequiredOpenposeImageProcessorInvocation;
type Props = {
controlNetId: string;
processorNode: RequiredOpenposeImageProcessorInvocation;
isEnabled: boolean;
};
const OpenposeProcessor = (props: Props) => {
const { controlNetId, processorNode, isEnabled } = props;
const { image_resolution, detect_resolution, hand_and_face } = processorNode;
const processorChanged = useProcessorNodeChanged();
const { t } = useTranslation();
const handleDetectResolutionChanged = useCallback(
(v: number) => {
processorChanged(controlNetId, { detect_resolution: v });
},
[controlNetId, processorChanged]
);
const handleImageResolutionChanged = useCallback(
(v: number) => {
processorChanged(controlNetId, { image_resolution: v });
},
[controlNetId, processorChanged]
);
const handleHandAndFaceChanged = useCallback(
(e: ChangeEvent<HTMLInputElement>) => {
processorChanged(controlNetId, { hand_and_face: e.target.checked });
},
[controlNetId, processorChanged]
);
return (
<ProcessorWrapper>
<FormControl isDisabled={!isEnabled}>
<FormLabel>{t('controlnet.detectResolution')}</FormLabel>
<CompositeSlider
value={detect_resolution}
onChange={handleDetectResolutionChanged}
defaultValue={DEFAULTS.detect_resolution}
min={0}
max={4096}
marks
/>
<CompositeNumberInput
value={detect_resolution}
onChange={handleDetectResolutionChanged}
defaultValue={DEFAULTS.detect_resolution}
min={0}
max={4096}
/>
</FormControl>
<FormControl isDisabled={!isEnabled}>
<FormLabel>{t('controlnet.imageResolution')}</FormLabel>
<CompositeSlider
value={image_resolution}
onChange={handleImageResolutionChanged}
defaultValue={DEFAULTS.image_resolution}
min={0}
max={4096}
marks
/>
<CompositeNumberInput
value={image_resolution}
onChange={handleImageResolutionChanged}
defaultValue={DEFAULTS.image_resolution}
min={0}
max={4096}
/>
</FormControl>
<FormControl isDisabled={!isEnabled}>
<FormLabel>{t('controlnet.handAndFace')}</FormLabel>
<Switch isChecked={hand_and_face} onChange={handleHandAndFaceChanged} />
</FormControl>
</ProcessorWrapper>
);
};
export default memo(OpenposeProcessor);

View File

@ -205,22 +205,6 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
image_resolution: 512, image_resolution: 512,
}, },
}, },
openpose_image_processor: {
type: 'openpose_image_processor',
get label() {
return i18n.t('controlnet.openPose');
},
get description() {
return i18n.t('controlnet.openPoseDescription');
},
default: {
id: 'openpose_image_processor',
type: 'openpose_image_processor',
detect_resolution: 512,
image_resolution: 512,
hand_and_face: false,
},
},
dwpose_image_processor: { dwpose_image_processor: {
type: 'dwpose_image_processor', type: 'dwpose_image_processor',
get label() { get label() {

View File

@ -19,7 +19,6 @@ import type {
MidasDepthImageProcessorInvocation, MidasDepthImageProcessorInvocation,
MlsdImageProcessorInvocation, MlsdImageProcessorInvocation,
NormalbaeImageProcessorInvocation, NormalbaeImageProcessorInvocation,
OpenposeImageProcessorInvocation,
PidiImageProcessorInvocation, PidiImageProcessorInvocation,
ZoeDepthImageProcessorInvocation, ZoeDepthImageProcessorInvocation,
} from 'services/api/types'; } from 'services/api/types';
@ -41,7 +40,6 @@ export type ControlAdapterProcessorNode =
| MidasDepthImageProcessorInvocation | MidasDepthImageProcessorInvocation
| MlsdImageProcessorInvocation | MlsdImageProcessorInvocation
| NormalbaeImageProcessorInvocation | NormalbaeImageProcessorInvocation
| OpenposeImageProcessorInvocation
| DWPoseImageProcessorInvocation | DWPoseImageProcessorInvocation
| PidiImageProcessorInvocation | PidiImageProcessorInvocation
| ZoeDepthImageProcessorInvocation; | ZoeDepthImageProcessorInvocation;
@ -144,14 +142,6 @@ export type RequiredNormalbaeImageProcessorInvocation = O.Required<
'type' | 'detect_resolution' | 'image_resolution' 'type' | 'detect_resolution' | 'image_resolution'
>; >;
/**
* The Openpose processor node, with parameters flagged as required
*/
export type RequiredOpenposeImageProcessorInvocation = O.Required<
OpenposeImageProcessorInvocation,
'type' | 'detect_resolution' | 'image_resolution' | 'hand_and_face'
>;
/** /**
* The DWPose processor node, with parameters flagged as required * The DWPose processor node, with parameters flagged as required
*/ */
@ -189,7 +179,6 @@ export type RequiredControlAdapterProcessorNode =
| RequiredMidasDepthImageProcessorInvocation | RequiredMidasDepthImageProcessorInvocation
| RequiredMlsdImageProcessorInvocation | RequiredMlsdImageProcessorInvocation
| RequiredNormalbaeImageProcessorInvocation | RequiredNormalbaeImageProcessorInvocation
| RequiredOpenposeImageProcessorInvocation
| RequiredDWPoseImageProcessorInvocation | RequiredDWPoseImageProcessorInvocation
| RequiredPidiImageProcessorInvocation | RequiredPidiImageProcessorInvocation
| RequiredZoeDepthImageProcessorInvocation, | RequiredZoeDepthImageProcessorInvocation,
@ -309,16 +298,6 @@ export const isNormalbaeImageProcessorInvocation = (obj: unknown): obj is Normal
return false; return false;
}; };
/**
* Type guard for OpenposeImageProcessorInvocation
*/
export const isOpenposeImageProcessorInvocation = (obj: unknown): obj is OpenposeImageProcessorInvocation => {
if (isObject(obj) && 'type' in obj && obj.type === 'openpose_image_processor') {
return true;
}
return false;
};
/** /**
* Type guard for DWPoseImageProcessorInvocation * Type guard for DWPoseImageProcessorInvocation
*/ */

File diff suppressed because one or more lines are too long

View File

@ -156,7 +156,6 @@ export type MediapipeFaceProcessorInvocation = s['MediapipeFaceProcessorInvocati
export type MidasDepthImageProcessorInvocation = s['MidasDepthImageProcessorInvocation']; export type MidasDepthImageProcessorInvocation = s['MidasDepthImageProcessorInvocation'];
export type MlsdImageProcessorInvocation = s['MlsdImageProcessorInvocation']; export type MlsdImageProcessorInvocation = s['MlsdImageProcessorInvocation'];
export type NormalbaeImageProcessorInvocation = s['NormalbaeImageProcessorInvocation']; export type NormalbaeImageProcessorInvocation = s['NormalbaeImageProcessorInvocation'];
export type OpenposeImageProcessorInvocation = s['OpenposeImageProcessorInvocation'];
export type DWPoseImageProcessorInvocation = s['DWPoseImageProcessorInvocation']; export type DWPoseImageProcessorInvocation = s['DWPoseImageProcessorInvocation'];
export type PidiImageProcessorInvocation = s['PidiImageProcessorInvocation']; export type PidiImageProcessorInvocation = s['PidiImageProcessorInvocation'];
export type ZoeDepthImageProcessorInvocation = s['ZoeDepthImageProcessorInvocation']; export type ZoeDepthImageProcessorInvocation = s['ZoeDepthImageProcessorInvocation'];