mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
Merge branch 'main' into HideLegend2
This commit is contained in:
commit
dcd7e01908
@ -1,6 +1,8 @@
|
|||||||
from typing import Literal
|
from os.path import exists
|
||||||
|
from typing import Literal, Optional
|
||||||
|
|
||||||
from pydantic.fields import Field
|
import numpy as np
|
||||||
|
from pydantic import Field, validator
|
||||||
|
|
||||||
from .baseinvocation import BaseInvocation, BaseInvocationOutput, InvocationContext
|
from .baseinvocation import BaseInvocation, BaseInvocationOutput, InvocationContext
|
||||||
from dynamicprompts.generators import RandomPromptGenerator, CombinatorialPromptGenerator
|
from dynamicprompts.generators import RandomPromptGenerator, CombinatorialPromptGenerator
|
||||||
@ -55,3 +57,41 @@ class DynamicPromptInvocation(BaseInvocation):
|
|||||||
prompts = generator.generate(self.prompt, num_images=self.max_prompts)
|
prompts = generator.generate(self.prompt, num_images=self.max_prompts)
|
||||||
|
|
||||||
return PromptCollectionOutput(prompt_collection=prompts, count=len(prompts))
|
return PromptCollectionOutput(prompt_collection=prompts, count=len(prompts))
|
||||||
|
|
||||||
|
|
||||||
|
class PromptsFromFileInvocation(BaseInvocation):
|
||||||
|
'''Loads prompts from a text file'''
|
||||||
|
# fmt: off
|
||||||
|
type: Literal['prompt_from_file'] = 'prompt_from_file'
|
||||||
|
|
||||||
|
# Inputs
|
||||||
|
file_path: str = Field(description="Path to prompt text file")
|
||||||
|
pre_prompt: Optional[str] = Field(description="String to prepend to each prompt")
|
||||||
|
post_prompt: Optional[str] = Field(description="String to append to each prompt")
|
||||||
|
start_line: int = Field(default=1, ge=1, description="Line in the file to start start from")
|
||||||
|
max_prompts: int = Field(default=1, ge=0, description="Max lines to read from file (0=all)")
|
||||||
|
#fmt: on
|
||||||
|
|
||||||
|
@validator("file_path")
|
||||||
|
def file_path_exists(cls, v):
|
||||||
|
if not exists(v):
|
||||||
|
raise ValueError(FileNotFoundError)
|
||||||
|
return v
|
||||||
|
|
||||||
|
def promptsFromFile(self, file_path: str, pre_prompt: str, post_prompt: str, start_line: int, max_prompts: int):
|
||||||
|
prompts = []
|
||||||
|
start_line -= 1
|
||||||
|
end_line = start_line + max_prompts
|
||||||
|
if max_prompts <= 0:
|
||||||
|
end_line = np.iinfo(np.int32).max
|
||||||
|
with open(file_path) as f:
|
||||||
|
for i, line in enumerate(f):
|
||||||
|
if i >= start_line and i < end_line:
|
||||||
|
prompts.append((pre_prompt or '') + line.strip() + (post_prompt or ''))
|
||||||
|
if i >= end_line:
|
||||||
|
break
|
||||||
|
return prompts
|
||||||
|
|
||||||
|
def invoke(self, context: InvocationContext) -> PromptCollectionOutput:
|
||||||
|
prompts = self.promptsFromFile(self.file_path, self.pre_prompt, self.post_prompt, self.start_line, self.max_prompts)
|
||||||
|
return PromptCollectionOutput(prompt_collection=prompts, count=len(prompts))
|
||||||
|
@ -68,7 +68,7 @@ class TextualInversionModel(ModelBase):
|
|||||||
return None # diffusers-ti
|
return None # diffusers-ti
|
||||||
|
|
||||||
if os.path.isfile(path):
|
if os.path.isfile(path):
|
||||||
if any([path.endswith(f".{ext}") for ext in ["safetensors", "ckpt", "pt"]]):
|
if any([path.endswith(f".{ext}") for ext in ["safetensors", "ckpt", "pt", "bin"]]):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
raise InvalidModelException(f"Not a valid model: {path}")
|
raise InvalidModelException(f"Not a valid model: {path}")
|
||||||
|
45
pull_request_template.md
Normal file
45
pull_request_template.md
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
## What type of PR is this? (check all applicable)
|
||||||
|
|
||||||
|
- [ ] Refactor
|
||||||
|
- [ ] Feature
|
||||||
|
- [ ] Bug Fix
|
||||||
|
- [ ] Optimization
|
||||||
|
- [ ] Documentation Update
|
||||||
|
|
||||||
|
|
||||||
|
## Have you discussed this change with the InvokeAI team?
|
||||||
|
- [ ] Yes
|
||||||
|
- [ ] No, because:
|
||||||
|
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
|
||||||
|
## Related Tickets & Documents
|
||||||
|
|
||||||
|
<!--
|
||||||
|
For pull requests that relate or close an issue, please include them
|
||||||
|
below.
|
||||||
|
|
||||||
|
For example having the text: "closes #1234" would connect the current pull
|
||||||
|
request to issue 1234. And when we merge the pull request, Github will
|
||||||
|
automatically close the issue.
|
||||||
|
-->
|
||||||
|
|
||||||
|
- Related Issue #
|
||||||
|
- Closes #
|
||||||
|
|
||||||
|
## QA Instructions, Screenshots, Recordings
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Please provide steps on how to test changes, any hardware or
|
||||||
|
software specifications as well as any other pertinent information.
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Added/updated tests?
|
||||||
|
|
||||||
|
- [ ] Yes
|
||||||
|
- [ ] No : _please replace this line with details on why tests
|
||||||
|
have not been included_
|
||||||
|
|
||||||
|
## [optional] Are there any post deployment tasks we need to perform?
|
Loading…
x
Reference in New Issue
Block a user