diff --git a/invokeai/backend/install/model_install_backend.py b/invokeai/backend/install/model_install_backend.py index 00c19712a0..c25d136855 100644 --- a/invokeai/backend/install/model_install_backend.py +++ b/invokeai/backend/install/model_install_backend.py @@ -284,16 +284,16 @@ class ModelInstall(object): location = self._download_hf_model(repo_id, files, staging) break elif f'learned_embeds.{suffix}' in files: - location = self._download_hf_model(repo_id, ['learned_embeds.suffix'], staging) + location = self._download_hf_model(repo_id, [f'learned_embeds.{suffix}'], staging) break if not location: logger.warning(f'Could not determine type of repo {repo_id}. Skipping install.') - return - + return {} + info = ModelProbe().heuristic_probe(location, self.prediction_helper) if not info: logger.warning(f'Could not probe {location}. Skipping install.') - return + return {} dest = self.config.models_path / info.base_type.value / info.model_type.value / self._get_model_name(repo_id,location) if dest.exists(): shutil.rmtree(dest) diff --git a/invokeai/backend/model_management/model_probe.py b/invokeai/backend/model_management/model_probe.py index 6555b7a1c4..0ef35625ae 100644 --- a/invokeai/backend/model_management/model_probe.py +++ b/invokeai/backend/model_management/model_probe.py @@ -64,8 +64,8 @@ class ModelProbe(object): @classmethod def probe(cls, model_path: Path, - model: Optional[Union[Dict, ModelMixin]], - prediction_type_helper: Callable[[Path],SchedulerPredictionType] = None)->ModelProbeInfo: + model: Optional[Union[Dict, ModelMixin]] = None, + prediction_type_helper: Optional[Callable[[Path],SchedulerPredictionType]] = None)->ModelProbeInfo: ''' Probe the model at model_path and return sufficient information about it to place it somewhere in the models directory hierarchy. If the model is diff --git a/invokeai/frontend/install/model_install.py b/invokeai/frontend/install/model_install.py index 33ef114912..c367951f23 100644 --- a/invokeai/frontend/install/model_install.py +++ b/invokeai/frontend/install/model_install.py @@ -382,10 +382,21 @@ class addModelsForm(CyclingForm, npyscreen.FormMultiPage): ) return min(cols, len(self.installed_models)) + def confirm_deletions(self, selections: InstallSelections)->bool: + remove_models = selections.remove_models + if len(remove_models) > 0: + mods = "\n".join([ModelManager.parse_key(x)[0] for x in remove_models]) + return npyscreen.notify_ok_cancel(f"These unchecked models will be deleted from disk. Continue?\n---------\n{mods}") + else: + return True + def on_execute(self): - self.monitor.entry_widget.buffer(['Processing...'],scroll_end=True) self.marshall_arguments() app = self.parentApp + if not self.confirm_deletions(app.install_selections): + return + + self.monitor.entry_widget.buffer(['Processing...'],scroll_end=True) self.ok_button.hidden = True self.display() @@ -417,6 +428,8 @@ class addModelsForm(CyclingForm, npyscreen.FormMultiPage): def on_done(self): self.marshall_arguments() + if not self.confirm_deletions(self.parentApp.install_selections): + return self.parentApp.setNextForm(None) self.parentApp.user_cancelled = False self.editing = False