mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
cleanup: Remove Openpose Image Processor
This commit is contained in:
parent
f8e566d62a
commit
50b93992cf
@ -81,7 +81,7 @@ their descriptions.
|
||||
| ONNX Text to Latents | Generates latents from conditionings. |
|
||||
| ONNX Model Loader | Loads a main model, outputting its submodels. |
|
||||
| OpenCV Inpaint | Simple inpaint using opencv. |
|
||||
| Openpose Processor | Applies Openpose processing to image |
|
||||
| DWPose Processor | Applies Openpose processing to image |
|
||||
| PIDI Processor | Applies PIDI processing to image |
|
||||
| Prompts from File | Loads prompts from a text file |
|
||||
| Random Integer | Outputs a single random integer. |
|
||||
|
@ -17,7 +17,6 @@ from controlnet_aux import (
|
||||
MidasDetector,
|
||||
MLSDdetector,
|
||||
NormalBaeDetector,
|
||||
OpenposeDetector,
|
||||
PidiNetDetector,
|
||||
SamDetector,
|
||||
ZoeDetector,
|
||||
@ -277,31 +276,6 @@ class LineartAnimeImageProcessorInvocation(ImageProcessorInvocation):
|
||||
return processed_image
|
||||
|
||||
|
||||
@invocation(
|
||||
"openpose_image_processor",
|
||||
title="Openpose Processor",
|
||||
tags=["controlnet", "openpose", "pose"],
|
||||
category="controlnet",
|
||||
version="1.2.0",
|
||||
)
|
||||
class OpenposeImageProcessorInvocation(ImageProcessorInvocation):
|
||||
"""Applies Openpose processing to image"""
|
||||
|
||||
hand_and_face: bool = InputField(default=False, description="Whether to use hands and face mode")
|
||||
detect_resolution: int = InputField(default=512, ge=0, description=FieldDescriptions.detect_res)
|
||||
image_resolution: int = InputField(default=512, ge=0, description=FieldDescriptions.image_res)
|
||||
|
||||
def run_processor(self, image):
|
||||
openpose_processor = OpenposeDetector.from_pretrained(pretrained_model_or_path="lllyasviel/Annotators")
|
||||
processed_image = openpose_processor(
|
||||
image,
|
||||
detect_resolution=self.detect_resolution,
|
||||
image_resolution=self.image_resolution,
|
||||
hand_and_face=self.hand_and_face,
|
||||
)
|
||||
return processed_image
|
||||
|
||||
|
||||
@invocation(
|
||||
"midas_depth_image_processor",
|
||||
title="Midas Depth Processor",
|
||||
|
@ -264,8 +264,6 @@
|
||||
"noneDescription": "No processing applied",
|
||||
"normalBae": "Normal BAE",
|
||||
"normalBaeDescription": "Normal BAE processing",
|
||||
"openPose": "Openpose",
|
||||
"openPoseDescription": "Human pose estimation using Openpose",
|
||||
"dwPose": "DWPose",
|
||||
"dwPoseDescription": "Human pose estimation using DWPose",
|
||||
"pidi": "PIDI",
|
||||
|
@ -14,7 +14,6 @@ import MediapipeFaceProcessor from './processors/MediapipeFaceProcessor';
|
||||
import MidasDepthProcessor from './processors/MidasDepthProcessor';
|
||||
import MlsdImageProcessor from './processors/MlsdImageProcessor';
|
||||
import NormalBaeProcessor from './processors/NormalBaeProcessor';
|
||||
import OpenposeProcessor from './processors/OpenposeProcessor';
|
||||
import PidiProcessor from './processors/PidiProcessor';
|
||||
import ZoeDepthProcessor from './processors/ZoeDepthProcessor';
|
||||
|
||||
@ -74,10 +73,6 @@ const ControlAdapterProcessorComponent = ({ id }: Props) => {
|
||||
return <NormalBaeProcessor controlNetId={id} processorNode={processorNode} isEnabled={isEnabled} />;
|
||||
}
|
||||
|
||||
if (processorNode.type === 'openpose_image_processor') {
|
||||
return <OpenposeProcessor controlNetId={id} processorNode={processorNode} isEnabled={isEnabled} />;
|
||||
}
|
||||
|
||||
if (processorNode.type === 'dwpose_image_processor') {
|
||||
return <DWPoseProcessor controlNetId={id} processorNode={processorNode} isEnabled={isEnabled} />;
|
||||
}
|
||||
|
@ -1,92 +0,0 @@
|
||||
import { CompositeNumberInput, CompositeSlider, FormControl, FormLabel, Switch } from '@invoke-ai/ui-library';
|
||||
import { useProcessorNodeChanged } from 'features/controlAdapters/components/hooks/useProcessorNodeChanged';
|
||||
import { CONTROLNET_PROCESSORS } from 'features/controlAdapters/store/constants';
|
||||
import type { RequiredOpenposeImageProcessorInvocation } from 'features/controlAdapters/store/types';
|
||||
import type { ChangeEvent } from 'react';
|
||||
import { memo, useCallback } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
import ProcessorWrapper from './common/ProcessorWrapper';
|
||||
|
||||
const DEFAULTS = CONTROLNET_PROCESSORS.openpose_image_processor.default as RequiredOpenposeImageProcessorInvocation;
|
||||
|
||||
type Props = {
|
||||
controlNetId: string;
|
||||
processorNode: RequiredOpenposeImageProcessorInvocation;
|
||||
isEnabled: boolean;
|
||||
};
|
||||
|
||||
const OpenposeProcessor = (props: Props) => {
|
||||
const { controlNetId, processorNode, isEnabled } = props;
|
||||
const { image_resolution, detect_resolution, hand_and_face } = processorNode;
|
||||
const processorChanged = useProcessorNodeChanged();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const handleDetectResolutionChanged = useCallback(
|
||||
(v: number) => {
|
||||
processorChanged(controlNetId, { detect_resolution: v });
|
||||
},
|
||||
[controlNetId, processorChanged]
|
||||
);
|
||||
|
||||
const handleImageResolutionChanged = useCallback(
|
||||
(v: number) => {
|
||||
processorChanged(controlNetId, { image_resolution: v });
|
||||
},
|
||||
[controlNetId, processorChanged]
|
||||
);
|
||||
|
||||
const handleHandAndFaceChanged = useCallback(
|
||||
(e: ChangeEvent<HTMLInputElement>) => {
|
||||
processorChanged(controlNetId, { hand_and_face: e.target.checked });
|
||||
},
|
||||
[controlNetId, processorChanged]
|
||||
);
|
||||
|
||||
return (
|
||||
<ProcessorWrapper>
|
||||
<FormControl isDisabled={!isEnabled}>
|
||||
<FormLabel>{t('controlnet.detectResolution')}</FormLabel>
|
||||
<CompositeSlider
|
||||
value={detect_resolution}
|
||||
onChange={handleDetectResolutionChanged}
|
||||
defaultValue={DEFAULTS.detect_resolution}
|
||||
min={0}
|
||||
max={4096}
|
||||
marks
|
||||
/>
|
||||
<CompositeNumberInput
|
||||
value={detect_resolution}
|
||||
onChange={handleDetectResolutionChanged}
|
||||
defaultValue={DEFAULTS.detect_resolution}
|
||||
min={0}
|
||||
max={4096}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormControl isDisabled={!isEnabled}>
|
||||
<FormLabel>{t('controlnet.imageResolution')}</FormLabel>
|
||||
<CompositeSlider
|
||||
value={image_resolution}
|
||||
onChange={handleImageResolutionChanged}
|
||||
defaultValue={DEFAULTS.image_resolution}
|
||||
min={0}
|
||||
max={4096}
|
||||
marks
|
||||
/>
|
||||
<CompositeNumberInput
|
||||
value={image_resolution}
|
||||
onChange={handleImageResolutionChanged}
|
||||
defaultValue={DEFAULTS.image_resolution}
|
||||
min={0}
|
||||
max={4096}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormControl isDisabled={!isEnabled}>
|
||||
<FormLabel>{t('controlnet.handAndFace')}</FormLabel>
|
||||
<Switch isChecked={hand_and_face} onChange={handleHandAndFaceChanged} />
|
||||
</FormControl>
|
||||
</ProcessorWrapper>
|
||||
);
|
||||
};
|
||||
|
||||
export default memo(OpenposeProcessor);
|
@ -205,22 +205,6 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
|
||||
image_resolution: 512,
|
||||
},
|
||||
},
|
||||
openpose_image_processor: {
|
||||
type: 'openpose_image_processor',
|
||||
get label() {
|
||||
return i18n.t('controlnet.openPose');
|
||||
},
|
||||
get description() {
|
||||
return i18n.t('controlnet.openPoseDescription');
|
||||
},
|
||||
default: {
|
||||
id: 'openpose_image_processor',
|
||||
type: 'openpose_image_processor',
|
||||
detect_resolution: 512,
|
||||
image_resolution: 512,
|
||||
hand_and_face: false,
|
||||
},
|
||||
},
|
||||
dwpose_image_processor: {
|
||||
type: 'dwpose_image_processor',
|
||||
get label() {
|
||||
|
@ -19,7 +19,6 @@ import type {
|
||||
MidasDepthImageProcessorInvocation,
|
||||
MlsdImageProcessorInvocation,
|
||||
NormalbaeImageProcessorInvocation,
|
||||
OpenposeImageProcessorInvocation,
|
||||
PidiImageProcessorInvocation,
|
||||
ZoeDepthImageProcessorInvocation,
|
||||
} from 'services/api/types';
|
||||
@ -41,7 +40,6 @@ export type ControlAdapterProcessorNode =
|
||||
| MidasDepthImageProcessorInvocation
|
||||
| MlsdImageProcessorInvocation
|
||||
| NormalbaeImageProcessorInvocation
|
||||
| OpenposeImageProcessorInvocation
|
||||
| DWPoseImageProcessorInvocation
|
||||
| PidiImageProcessorInvocation
|
||||
| ZoeDepthImageProcessorInvocation;
|
||||
@ -144,14 +142,6 @@ export type RequiredNormalbaeImageProcessorInvocation = O.Required<
|
||||
'type' | 'detect_resolution' | 'image_resolution'
|
||||
>;
|
||||
|
||||
/**
|
||||
* The Openpose processor node, with parameters flagged as required
|
||||
*/
|
||||
export type RequiredOpenposeImageProcessorInvocation = O.Required<
|
||||
OpenposeImageProcessorInvocation,
|
||||
'type' | 'detect_resolution' | 'image_resolution' | 'hand_and_face'
|
||||
>;
|
||||
|
||||
/**
|
||||
* The DWPose processor node, with parameters flagged as required
|
||||
*/
|
||||
@ -189,7 +179,6 @@ export type RequiredControlAdapterProcessorNode =
|
||||
| RequiredMidasDepthImageProcessorInvocation
|
||||
| RequiredMlsdImageProcessorInvocation
|
||||
| RequiredNormalbaeImageProcessorInvocation
|
||||
| RequiredOpenposeImageProcessorInvocation
|
||||
| RequiredDWPoseImageProcessorInvocation
|
||||
| RequiredPidiImageProcessorInvocation
|
||||
| RequiredZoeDepthImageProcessorInvocation,
|
||||
@ -309,16 +298,6 @@ export const isNormalbaeImageProcessorInvocation = (obj: unknown): obj is Normal
|
||||
return false;
|
||||
};
|
||||
|
||||
/**
|
||||
* Type guard for OpenposeImageProcessorInvocation
|
||||
*/
|
||||
export const isOpenposeImageProcessorInvocation = (obj: unknown): obj is OpenposeImageProcessorInvocation => {
|
||||
if (isObject(obj) && 'type' in obj && obj.type === 'openpose_image_processor') {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
/**
|
||||
* Type guard for DWPoseImageProcessorInvocation
|
||||
*/
|
||||
|
File diff suppressed because one or more lines are too long
@ -156,7 +156,6 @@ export type MediapipeFaceProcessorInvocation = s['MediapipeFaceProcessorInvocati
|
||||
export type MidasDepthImageProcessorInvocation = s['MidasDepthImageProcessorInvocation'];
|
||||
export type MlsdImageProcessorInvocation = s['MlsdImageProcessorInvocation'];
|
||||
export type NormalbaeImageProcessorInvocation = s['NormalbaeImageProcessorInvocation'];
|
||||
export type OpenposeImageProcessorInvocation = s['OpenposeImageProcessorInvocation'];
|
||||
export type DWPoseImageProcessorInvocation = s['DWPoseImageProcessorInvocation'];
|
||||
export type PidiImageProcessorInvocation = s['PidiImageProcessorInvocation'];
|
||||
export type ZoeDepthImageProcessorInvocation = s['ZoeDepthImageProcessorInvocation'];
|
||||
|
Loading…
Reference in New Issue
Block a user