Fix lora import, fix sd2 config, fix list models api

This commit is contained in:
Sergey Borisov 2023-06-15 21:30:15 +03:00
parent 5c740452f6
commit 5f2d07917d
4 changed files with 4 additions and 2 deletions

View File

@ -80,7 +80,7 @@ async def list_models(
),
) -> ModelsList:
"""Gets a list of models"""
models_raw = ApiDependencies.invoker.services.model_manager.list_models(model_type)
models_raw = ApiDependencies.invoker.services.model_manager.list_models(base_model, model_type)
models = parse_obj_as(ModelsList, { "models": models_raw })
return models

View File

@ -512,7 +512,6 @@ class ModelManager(object):
named 'model-name', and model_manager.config to get the full OmegaConf
object derived from models.yaml
"""
assert not(model_type is not None and base_model is None), "model_type must be provided with base_model"
models = dict()
for model_key in sorted(self.models, key=str.casefold):

View File

@ -1,3 +1,4 @@
import os
import torch
from typing import Optional, Union, Literal
from .base import (

View File

@ -123,6 +123,7 @@ class StableDiffusion2Model(DiffusersModel):
class DiffusersConfig(ModelConfigBase):
format: Literal["diffusers"]
vae: Optional[str] = Field(None)
variant: ModelVariantType
prediction_type: SchedulerPredictionType
upcast_attention: bool
@ -130,6 +131,7 @@ class StableDiffusion2Model(DiffusersModel):
format: Literal["checkpoint"]
vae: Optional[str] = Field(None)
config: Optional[str] = Field(None)
variant: ModelVariantType
prediction_type: SchedulerPredictionType
upcast_attention: bool