mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
folded in changes from 4099
This commit is contained in:
parent
04229082d6
commit
6ad565d84c
@ -108,15 +108,17 @@ class CompelInvocation(BaseInvocation):
|
|||||||
for trigger in re.findall(r"<[a-zA-Z0-9., _-]+>", self.prompt):
|
for trigger in re.findall(r"<[a-zA-Z0-9., _-]+>", self.prompt):
|
||||||
name = trigger[1:-1]
|
name = trigger[1:-1]
|
||||||
try:
|
try:
|
||||||
ti_list.append((
|
ti_list.append(
|
||||||
name,
|
(
|
||||||
context.services.model_manager.get_model(
|
name,
|
||||||
model_name=name,
|
context.services.model_manager.get_model(
|
||||||
base_model=self.clip.text_encoder.base_model,
|
model_name=name,
|
||||||
model_type=ModelType.TextualInversion,
|
base_model=self.clip.text_encoder.base_model,
|
||||||
context=context,
|
model_type=ModelType.TextualInversion,
|
||||||
).context.model
|
context=context,
|
||||||
))
|
).context.model,
|
||||||
|
)
|
||||||
|
)
|
||||||
except ModelNotFoundException:
|
except ModelNotFoundException:
|
||||||
# print(e)
|
# print(e)
|
||||||
# import traceback
|
# import traceback
|
||||||
@ -197,15 +199,17 @@ class SDXLPromptInvocationBase:
|
|||||||
for trigger in re.findall(r"<[a-zA-Z0-9., _-]+>", prompt):
|
for trigger in re.findall(r"<[a-zA-Z0-9., _-]+>", prompt):
|
||||||
name = trigger[1:-1]
|
name = trigger[1:-1]
|
||||||
try:
|
try:
|
||||||
ti_list.append((
|
ti_list.append(
|
||||||
name,
|
(
|
||||||
context.services.model_manager.get_model(
|
name,
|
||||||
model_name=name,
|
context.services.model_manager.get_model(
|
||||||
base_model=clip_field.text_encoder.base_model,
|
model_name=name,
|
||||||
model_type=ModelType.TextualInversion,
|
base_model=clip_field.text_encoder.base_model,
|
||||||
context=context,
|
model_type=ModelType.TextualInversion,
|
||||||
).context.model
|
context=context,
|
||||||
))
|
).context.model,
|
||||||
|
)
|
||||||
|
)
|
||||||
except ModelNotFoundException:
|
except ModelNotFoundException:
|
||||||
# print(e)
|
# print(e)
|
||||||
# import traceback
|
# import traceback
|
||||||
@ -272,15 +276,17 @@ class SDXLPromptInvocationBase:
|
|||||||
for trigger in re.findall(r"<[a-zA-Z0-9., _-]+>", prompt):
|
for trigger in re.findall(r"<[a-zA-Z0-9., _-]+>", prompt):
|
||||||
name = trigger[1:-1]
|
name = trigger[1:-1]
|
||||||
try:
|
try:
|
||||||
ti_list.append((
|
ti_list.append(
|
||||||
name,
|
(
|
||||||
context.services.model_manager.get_model(
|
name,
|
||||||
model_name=name,
|
context.services.model_manager.get_model(
|
||||||
base_model=clip_field.text_encoder.base_model,
|
model_name=name,
|
||||||
model_type=ModelType.TextualInversion,
|
base_model=clip_field.text_encoder.base_model,
|
||||||
context=context,
|
model_type=ModelType.TextualInversion,
|
||||||
).context.model
|
context=context,
|
||||||
))
|
).context.model,
|
||||||
|
)
|
||||||
|
)
|
||||||
except ModelNotFoundException:
|
except ModelNotFoundException:
|
||||||
# print(e)
|
# print(e)
|
||||||
# import traceback
|
# import traceback
|
||||||
|
@ -74,14 +74,16 @@ class ONNXPromptInvocation(BaseInvocation):
|
|||||||
for trigger in re.findall(r"<[a-zA-Z0-9., _-]+>", self.prompt):
|
for trigger in re.findall(r"<[a-zA-Z0-9., _-]+>", self.prompt):
|
||||||
name = trigger[1:-1]
|
name = trigger[1:-1]
|
||||||
try:
|
try:
|
||||||
ti_list.append((
|
ti_list.append(
|
||||||
name,
|
(
|
||||||
context.services.model_manager.get_model(
|
name,
|
||||||
model_name=name,
|
context.services.model_manager.get_model(
|
||||||
base_model=self.clip.text_encoder.base_model,
|
model_name=name,
|
||||||
model_type=ModelType.TextualInversion,
|
base_model=self.clip.text_encoder.base_model,
|
||||||
).context.model
|
model_type=ModelType.TextualInversion,
|
||||||
))
|
).context.model,
|
||||||
|
)
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
# print(e)
|
# print(e)
|
||||||
# import traceback
|
# import traceback
|
||||||
|
@ -186,7 +186,7 @@ class ModelCache(object):
|
|||||||
cache_entry = self._cached_models.get(key, None)
|
cache_entry = self._cached_models.get(key, None)
|
||||||
if cache_entry is None:
|
if cache_entry is None:
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
f"Loading model {model_path}, type {base_model.value}:{model_type.value}:{submodel.value if submodel else ''}"
|
f"Loading model {model_path}, type {base_model.value}:{model_type.value}{':'+submodel.value if submodel else ''}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# this will remove older cached models until
|
# this will remove older cached models until
|
||||||
|
@ -670,7 +670,7 @@ class ModelManager(object):
|
|||||||
# TODO: if path changed and old_model.path inside models folder should we delete this too?
|
# TODO: if path changed and old_model.path inside models folder should we delete this too?
|
||||||
|
|
||||||
# remove conversion cache as config changed
|
# remove conversion cache as config changed
|
||||||
old_model_path = self.app_config.root_path / old_model.path
|
old_model_path = self.resolve_model_path(old_model.path)
|
||||||
old_model_cache = self._get_model_cache_path(old_model_path)
|
old_model_cache = self._get_model_cache_path(old_model_path)
|
||||||
if old_model_cache.exists():
|
if old_model_cache.exists():
|
||||||
if old_model_cache.is_dir():
|
if old_model_cache.is_dir():
|
||||||
@ -780,7 +780,7 @@ class ModelManager(object):
|
|||||||
model_type,
|
model_type,
|
||||||
**submodel,
|
**submodel,
|
||||||
)
|
)
|
||||||
checkpoint_path = self.app_config.root_path / info["path"]
|
checkpoint_path = self.resolve_model_path(info["path"])
|
||||||
old_diffusers_path = self.resolve_model_path(model.location)
|
old_diffusers_path = self.resolve_model_path(model.location)
|
||||||
new_diffusers_path = (
|
new_diffusers_path = (
|
||||||
dest_directory or self.app_config.models_path / base_model.value / model_type.value
|
dest_directory or self.app_config.models_path / base_model.value / model_type.value
|
||||||
@ -992,7 +992,7 @@ class ModelManager(object):
|
|||||||
model_manager=self,
|
model_manager=self,
|
||||||
prediction_type_helper=ask_user_for_prediction_type,
|
prediction_type_helper=ask_user_for_prediction_type,
|
||||||
)
|
)
|
||||||
known_paths = {config.root_path / x["path"] for x in self.list_models()}
|
known_paths = {self.resolve_model_path(x["path"]) for x in self.list_models()}
|
||||||
directories = {
|
directories = {
|
||||||
config.root_path / x
|
config.root_path / x
|
||||||
for x in [
|
for x in [
|
||||||
|
Loading…
Reference in New Issue
Block a user