mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
Merge branch 'main' into feat/db/graceful-migrate-workflows
This commit is contained in:
commit
60629cba3c
@ -270,7 +270,7 @@ upgrade script.** See the next section for a Windows recipe.
|
||||
3. Select option [1] to upgrade to the latest release.
|
||||
|
||||
4. Once the upgrade is finished you will be returned to the launcher
|
||||
menu. Select option [7] "Re-run the configure script to fix a broken
|
||||
menu. Select option [6] "Re-run the configure script to fix a broken
|
||||
install or to complete a major upgrade".
|
||||
|
||||
This will run the configure script against the v2.3 directory and
|
||||
|
@ -11,5 +11,5 @@ INVOKEAI_ROOT=
|
||||
# HUGGING_FACE_HUB_TOKEN=
|
||||
|
||||
## optional variables specific to the docker setup.
|
||||
# GPU_DRIVER=cuda # or rocm
|
||||
# GPU_DRIVER=nvidia #| rocm
|
||||
# CONTAINER_UID=1000
|
||||
|
@ -1,6 +1,14 @@
|
||||
# InvokeAI Containerized
|
||||
|
||||
All commands are to be run from the `docker` directory: `cd docker`
|
||||
All commands should be run within the `docker` directory: `cd docker`
|
||||
|
||||
## Quickstart :rocket:
|
||||
|
||||
On a known working Linux+Docker+CUDA (Nvidia) system, execute `./run.sh` in this directory. It will take a few minutes - depending on your internet speed - to install the core models. Once the application starts up, open `http://localhost:9090` in your browser to Invoke!
|
||||
|
||||
For more configuration options (using an AMD GPU, custom root directory location, etc): read on.
|
||||
|
||||
## Detailed setup
|
||||
|
||||
#### Linux
|
||||
|
||||
@ -18,7 +26,7 @@ All commands are to be run from the `docker` directory: `cd docker`
|
||||
|
||||
This is done via Docker Desktop preferences
|
||||
|
||||
## Quickstart
|
||||
### Configure Invoke environment
|
||||
|
||||
1. Make a copy of `env.sample` and name it `.env` (`cp env.sample .env` (Mac/Linux) or `copy example.env .env` (Windows)). Make changes as necessary. Set `INVOKEAI_ROOT` to an absolute path to:
|
||||
a. the desired location of the InvokeAI runtime directory, or
|
||||
@ -37,19 +45,21 @@ The runtime directory (holding models and outputs) will be created in the locati
|
||||
|
||||
The Docker daemon on the system must be already set up to use the GPU. In case of Linux, this involves installing `nvidia-docker-runtime` and configuring the `nvidia` runtime as default. Steps will be different for AMD. Please see Docker documentation for the most up-to-date instructions for using your GPU with Docker.
|
||||
|
||||
To use an AMD GPU, set `GPU_DRIVER=rocm` in your `.env` file.
|
||||
|
||||
## Customize
|
||||
|
||||
Check the `.env.sample` file. It contains some environment variables for running in Docker. Copy it, name it `.env`, and fill it in with your own values. Next time you run `run.sh`, your custom values will be used.
|
||||
|
||||
You can also set these values in `docker-compose.yml` directly, but `.env` will help avoid conflicts when code is updated.
|
||||
|
||||
Example (values are optional, but setting `INVOKEAI_ROOT` is highly recommended):
|
||||
Values are optional, but setting `INVOKEAI_ROOT` is highly recommended. The default is `~/invokeai`. Example:
|
||||
|
||||
```bash
|
||||
INVOKEAI_ROOT=/Volumes/WorkDrive/invokeai
|
||||
HUGGINGFACE_TOKEN=the_actual_token
|
||||
CONTAINER_UID=1000
|
||||
GPU_DRIVER=cuda
|
||||
GPU_DRIVER=nvidia
|
||||
```
|
||||
|
||||
Any environment variables supported by InvokeAI can be set here - please see the [Configuration docs](https://invoke-ai.github.io/InvokeAI/features/CONFIGURATION/) for further detail.
|
||||
|
@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -e -o pipefail
|
||||
|
||||
run() {
|
||||
local scriptdir=$(dirname "${BASH_SOURCE[0]}")
|
||||
@ -8,14 +8,18 @@ run() {
|
||||
local build_args=""
|
||||
local profile=""
|
||||
|
||||
[[ -f ".env" ]] &&
|
||||
build_args=$(awk '$1 ~ /=[^$]/ && $0 !~ /^#/ {print "--build-arg " $0 " "}' .env) &&
|
||||
profile="$(awk -F '=' '/GPU_DRIVER/ {print $2}' .env)"
|
||||
touch .env
|
||||
build_args=$(awk '$1 ~ /=[^$]/ && $0 !~ /^#/ {print "--build-arg " $0 " "}' .env) &&
|
||||
profile="$(awk -F '=' '/GPU_DRIVER/ {print $2}' .env)"
|
||||
|
||||
[[ -z "$profile" ]] && profile="nvidia"
|
||||
|
||||
local service_name="invokeai-$profile"
|
||||
|
||||
printf "%s\n" "docker compose build args:"
|
||||
printf "%s\n" "$build_args"
|
||||
if [[ ! -z "$build_args" ]]; then
|
||||
printf "%s\n" "docker compose build args:"
|
||||
printf "%s\n" "$build_args"
|
||||
fi
|
||||
|
||||
docker compose build $build_args
|
||||
unset build_args
|
||||
|
277
docs/contributing/DOWNLOAD_QUEUE.md
Normal file
277
docs/contributing/DOWNLOAD_QUEUE.md
Normal file
@ -0,0 +1,277 @@
|
||||
# The InvokeAI Download Queue
|
||||
|
||||
The DownloadQueueService provides a multithreaded parallel download
|
||||
queue for arbitrary URLs, with queue prioritization, event handling,
|
||||
and restart capabilities.
|
||||
|
||||
## Simple Example
|
||||
|
||||
```
|
||||
from invokeai.app.services.download import DownloadQueueService, TqdmProgress
|
||||
|
||||
download_queue = DownloadQueueService()
|
||||
for url in ['https://github.com/invoke-ai/InvokeAI/blob/main/invokeai/assets/a-painting-of-a-fire.png?raw=true',
|
||||
'https://github.com/invoke-ai/InvokeAI/blob/main/invokeai/assets/birdhouse.png?raw=true',
|
||||
'https://github.com/invoke-ai/InvokeAI/blob/main/invokeai/assets/missing.png',
|
||||
'https://civitai.com/api/download/models/152309?type=Model&format=SafeTensor',
|
||||
]:
|
||||
|
||||
# urls start downloading as soon as download() is called
|
||||
download_queue.download(source=url,
|
||||
dest='/tmp/downloads',
|
||||
on_progress=TqdmProgress().update
|
||||
)
|
||||
|
||||
download_queue.join() # wait for all downloads to finish
|
||||
for job in download_queue.list_jobs():
|
||||
print(job.model_dump_json(exclude_none=True, indent=4),"\n")
|
||||
```
|
||||
|
||||
Output:
|
||||
|
||||
```
|
||||
{
|
||||
"source": "https://github.com/invoke-ai/InvokeAI/blob/main/invokeai/assets/a-painting-of-a-fire.png?raw=true",
|
||||
"dest": "/tmp/downloads",
|
||||
"id": 0,
|
||||
"priority": 10,
|
||||
"status": "completed",
|
||||
"download_path": "/tmp/downloads/a-painting-of-a-fire.png",
|
||||
"job_started": "2023-12-04T05:34:41.742174",
|
||||
"job_ended": "2023-12-04T05:34:42.592035",
|
||||
"bytes": 666734,
|
||||
"total_bytes": 666734
|
||||
}
|
||||
|
||||
{
|
||||
"source": "https://github.com/invoke-ai/InvokeAI/blob/main/invokeai/assets/birdhouse.png?raw=true",
|
||||
"dest": "/tmp/downloads",
|
||||
"id": 1,
|
||||
"priority": 10,
|
||||
"status": "completed",
|
||||
"download_path": "/tmp/downloads/birdhouse.png",
|
||||
"job_started": "2023-12-04T05:34:41.741975",
|
||||
"job_ended": "2023-12-04T05:34:42.652841",
|
||||
"bytes": 774949,
|
||||
"total_bytes": 774949
|
||||
}
|
||||
|
||||
{
|
||||
"source": "https://github.com/invoke-ai/InvokeAI/blob/main/invokeai/assets/missing.png",
|
||||
"dest": "/tmp/downloads",
|
||||
"id": 2,
|
||||
"priority": 10,
|
||||
"status": "error",
|
||||
"job_started": "2023-12-04T05:34:41.742079",
|
||||
"job_ended": "2023-12-04T05:34:42.147625",
|
||||
"bytes": 0,
|
||||
"total_bytes": 0,
|
||||
"error_type": "HTTPError(Not Found)",
|
||||
"error": "Traceback (most recent call last):\n File \"/home/lstein/Projects/InvokeAI/invokeai/app/services/download/download_default.py\", line 182, in _download_next_item\n self._do_download(job)\n File \"/home/lstein/Projects/InvokeAI/invokeai/app/services/download/download_default.py\", line 206, in _do_download\n raise HTTPError(resp.reason)\nrequests.exceptions.HTTPError: Not Found\n"
|
||||
}
|
||||
|
||||
{
|
||||
"source": "https://civitai.com/api/download/models/152309?type=Model&format=SafeTensor",
|
||||
"dest": "/tmp/downloads",
|
||||
"id": 3,
|
||||
"priority": 10,
|
||||
"status": "completed",
|
||||
"download_path": "/tmp/downloads/xl_more_art-full_v1.safetensors",
|
||||
"job_started": "2023-12-04T05:34:42.147645",
|
||||
"job_ended": "2023-12-04T05:34:43.735990",
|
||||
"bytes": 719020768,
|
||||
"total_bytes": 719020768
|
||||
}
|
||||
```
|
||||
|
||||
## The API
|
||||
|
||||
The default download queue is `DownloadQueueService`, an
|
||||
implementation of ABC `DownloadQueueServiceBase`. It juggles multiple
|
||||
background download requests and provides facilities for interrogating
|
||||
and cancelling the requests. Access to a current or past download task
|
||||
is mediated via `DownloadJob` objects which report the current status
|
||||
of a job request
|
||||
|
||||
### The Queue Object
|
||||
|
||||
A default download queue is located in
|
||||
`ApiDependencies.invoker.services.download_queue`. However, you can
|
||||
create additional instances if you need to isolate your queue from the
|
||||
main one.
|
||||
|
||||
```
|
||||
queue = DownloadQueueService(event_bus=events)
|
||||
```
|
||||
|
||||
`DownloadQueueService()` takes three optional arguments:
|
||||
|
||||
| **Argument** | **Type** | **Default** | **Description** |
|
||||
|----------------|-----------------|---------------|-----------------|
|
||||
| `max_parallel_dl` | int | 5 | Maximum number of simultaneous downloads allowed |
|
||||
| `event_bus` | EventServiceBase | None | System-wide FastAPI event bus for reporting download events |
|
||||
| `requests_session` | requests.sessions.Session | None | An alternative requests Session object to use for the download |
|
||||
|
||||
`max_parallel_dl` specifies how many download jobs are allowed to run
|
||||
simultaneously. Each will run in a different thread of execution.
|
||||
|
||||
`event_bus` is an EventServiceBase, typically the one created at
|
||||
InvokeAI startup. If present, download events are periodically emitted
|
||||
on this bus to allow clients to follow download progress.
|
||||
|
||||
`requests_session` is a url library requests Session object. It is
|
||||
used for testing.
|
||||
|
||||
### The Job object
|
||||
|
||||
The queue operates on a series of download job objects. These objects
|
||||
specify the source and destination of the download, and keep track of
|
||||
the progress of the download.
|
||||
|
||||
The only job type currently implemented is `DownloadJob`, a pydantic object with the
|
||||
following fields:
|
||||
|
||||
| **Field** | **Type** | **Default** | **Description** |
|
||||
|----------------|-----------------|---------------|-----------------|
|
||||
| _Fields passed in at job creation time_ |
|
||||
| `source` | AnyHttpUrl | | Where to download from |
|
||||
| `dest` | Path | | Where to download to |
|
||||
| `access_token` | str | | [optional] string containing authentication token for access |
|
||||
| `on_start` | Callable | | [optional] callback when the download starts |
|
||||
| `on_progress` | Callable | | [optional] callback called at intervals during download progress |
|
||||
| `on_complete` | Callable | | [optional] callback called after successful download completion |
|
||||
| `on_error` | Callable | | [optional] callback called after an error occurs |
|
||||
| `id` | int | auto assigned | Job ID, an integer >= 0 |
|
||||
| `priority` | int | 10 | Job priority. Lower priorities run before higher priorities |
|
||||
| |
|
||||
| _Fields updated over the course of the download task_
|
||||
| `status` | DownloadJobStatus| | Status code |
|
||||
| `download_path` | Path | | Path to the location of the downloaded file |
|
||||
| `job_started` | float | | Timestamp for when the job started running |
|
||||
| `job_ended` | float | | Timestamp for when the job completed or errored out |
|
||||
| `job_sequence` | int | | A counter that is incremented each time a model is dequeued |
|
||||
| `bytes` | int | 0 | Bytes downloaded so far |
|
||||
| `total_bytes` | int | 0 | Total size of the file at the remote site |
|
||||
| `error_type` | str | | String version of the exception that caused an error during download |
|
||||
| `error` | str | | String version of the traceback associated with an error |
|
||||
| `cancelled` | bool | False | Set to true if the job was cancelled by the caller|
|
||||
|
||||
When you create a job, you can assign it a `priority`. If multiple
|
||||
jobs are queued, the job with the lowest priority runs first.
|
||||
|
||||
Every job has a `source` and a `dest`. `source` is a pydantic.networks AnyHttpUrl object.
|
||||
The `dest` is a path on the local filesystem that specifies the
|
||||
destination for the downloaded object. Its semantics are
|
||||
described below.
|
||||
|
||||
When the job is submitted, it is assigned a numeric `id`. The id can
|
||||
then be used to fetch the job object from the queue.
|
||||
|
||||
The `status` field is updated by the queue to indicate where the job
|
||||
is in its lifecycle. Values are defined in the string enum
|
||||
`DownloadJobStatus`, a symbol available from
|
||||
`invokeai.app.services.download_manager`. Possible values are:
|
||||
|
||||
| **Value** | **String Value** | ** Description ** |
|
||||
|--------------|---------------------|-------------------|
|
||||
| `WAITING` | waiting | Job is on the queue but not yet running|
|
||||
| `RUNNING` | running | The download is started |
|
||||
| `COMPLETED` | completed | Job has finished its work without an error |
|
||||
| `ERROR` | error | Job encountered an error and will not run again|
|
||||
|
||||
`job_started` and `job_ended` indicate when the job
|
||||
was started (using a python timestamp) and when it completed.
|
||||
|
||||
In case of an error, the job's status will be set to `DownloadJobStatus.ERROR`, the text of the
|
||||
Exception that caused the error will be placed in the `error_type`
|
||||
field and the traceback that led to the error will be in `error`.
|
||||
|
||||
A cancelled job will have status `DownloadJobStatus.ERROR` and an
|
||||
`error_type` field of "DownloadJobCancelledException". In addition,
|
||||
the job's `cancelled` property will be set to True.
|
||||
|
||||
### Callbacks
|
||||
|
||||
Download jobs can be associated with a series of callbacks, each with
|
||||
the signature `Callable[["DownloadJob"], None]`. The callbacks are assigned
|
||||
using optional arguments `on_start`, `on_progress`, `on_complete` and
|
||||
`on_error`. When the corresponding event occurs, the callback wil be
|
||||
invoked and passed the job. The callback will be run in a `try:`
|
||||
context in the same thread as the download job. Any exceptions that
|
||||
occur during execution of the callback will be caught and converted
|
||||
into a log error message, thereby allowing the download to continue.
|
||||
|
||||
#### `TqdmProgress`
|
||||
|
||||
The `invokeai.app.services.download.download_default` module defines a
|
||||
class named `TqdmProgress` which can be used as an `on_progress`
|
||||
handler to display a completion bar in the console. Use as follows:
|
||||
|
||||
```
|
||||
from invokeai.app.services.download import TqdmProgress
|
||||
|
||||
download_queue.download(source='http://some.server.somewhere/some_file',
|
||||
dest='/tmp/downloads',
|
||||
on_progress=TqdmProgress().update
|
||||
)
|
||||
|
||||
```
|
||||
|
||||
### Events
|
||||
|
||||
If the queue was initialized with the InvokeAI event bus (the case
|
||||
when using `ApiDependencies.invoker.services.download_queue`), then
|
||||
download events will also be issued on the bus. The events are:
|
||||
|
||||
* `download_started` -- This is issued when a job is taken off the
|
||||
queue and a request is made to the remote server for the URL headers, but before any data
|
||||
has been downloaded. The event payload will contain the keys `source`
|
||||
and `download_path`. The latter contains the path that the URL will be
|
||||
downloaded to.
|
||||
|
||||
* `download_progress -- This is issued periodically as the download
|
||||
runs. The payload contains the keys `source`, `download_path`,
|
||||
`current_bytes` and `total_bytes`. The latter two fields can be
|
||||
used to display the percent complete.
|
||||
|
||||
* `download_complete` -- This is issued when the download completes
|
||||
successfully. The payload contains the keys `source`, `download_path`
|
||||
and `total_bytes`.
|
||||
|
||||
* `download_error` -- This is issued when the download stops because
|
||||
of an error condition. The payload contains the fields `error_type`
|
||||
and `error`. The former is the text representation of the exception,
|
||||
and the latter is a traceback showing where the error occurred.
|
||||
|
||||
### Job control
|
||||
|
||||
To create a job call the queue's `download()` method. You can list all
|
||||
jobs using `list_jobs()`, fetch a single job by its with
|
||||
`id_to_job()`, cancel a running job with `cancel_job()`, cancel all
|
||||
running jobs with `cancel_all_jobs()`, and wait for all jobs to finish
|
||||
with `join()`.
|
||||
|
||||
#### job = queue.download(source, dest, priority, access_token)
|
||||
|
||||
Create a new download job and put it on the queue, returning the
|
||||
DownloadJob object.
|
||||
|
||||
#### jobs = queue.list_jobs()
|
||||
|
||||
Return a list of all active and inactive `DownloadJob`s.
|
||||
|
||||
#### job = queue.id_to_job(id)
|
||||
|
||||
Return the job corresponding to given ID.
|
||||
|
||||
Return a list of all active and inactive `DownloadJob`s.
|
||||
|
||||
#### queue.prune_jobs()
|
||||
|
||||
Remove inactive (complete or errored) jobs from the listing returned
|
||||
by `list_jobs()`.
|
||||
|
||||
#### queue.join()
|
||||
|
||||
Block until all pending jobs have run to completion or errored out.
|
||||
|
@ -11,6 +11,7 @@ from ..services.board_images.board_images_default import BoardImagesService
|
||||
from ..services.board_records.board_records_sqlite import SqliteBoardRecordStorage
|
||||
from ..services.boards.boards_default import BoardService
|
||||
from ..services.config import InvokeAIAppConfig
|
||||
from ..services.download import DownloadQueueService
|
||||
from ..services.image_files.image_files_disk import DiskImageFileStorage
|
||||
from ..services.image_records.image_records_sqlite import SqliteImageRecordStorage
|
||||
from ..services.images.images_default import ImageService
|
||||
@ -29,8 +30,7 @@ from ..services.model_records import ModelRecordServiceSQL
|
||||
from ..services.names.names_default import SimpleNameService
|
||||
from ..services.session_processor.session_processor_default import DefaultSessionProcessor
|
||||
from ..services.session_queue.session_queue_sqlite import SqliteSessionQueue
|
||||
from ..services.shared.default_graphs import create_system_graphs
|
||||
from ..services.shared.graph import GraphExecutionState, LibraryGraph
|
||||
from ..services.shared.graph import GraphExecutionState
|
||||
from ..services.urls.urls_default import LocalUrlService
|
||||
from ..services.workflow_records.workflow_records_sqlite import SqliteWorkflowRecordsStorage
|
||||
from .events import FastAPIEventService
|
||||
@ -80,13 +80,13 @@ class ApiDependencies:
|
||||
boards = BoardService()
|
||||
events = FastAPIEventService(event_handler_id)
|
||||
graph_execution_manager = SqliteItemStorage[GraphExecutionState](db=db, table_name="graph_executions")
|
||||
graph_library = SqliteItemStorage[LibraryGraph](db=db, table_name="graphs")
|
||||
image_records = SqliteImageRecordStorage(db=db)
|
||||
images = ImageService()
|
||||
invocation_cache = MemoryInvocationCache(max_cache_size=config.node_cache_size)
|
||||
latents = ForwardCacheLatentsStorage(DiskLatentsStorage(f"{output_folder}/latents"))
|
||||
model_manager = ModelManagerService(config, logger)
|
||||
model_record_service = ModelRecordServiceSQL(db=db)
|
||||
download_queue_service = DownloadQueueService(event_bus=events)
|
||||
model_install_service = ModelInstallService(
|
||||
app_config=config, record_store=model_record_service, event_bus=events
|
||||
)
|
||||
@ -107,7 +107,6 @@ class ApiDependencies:
|
||||
configuration=configuration,
|
||||
events=events,
|
||||
graph_execution_manager=graph_execution_manager,
|
||||
graph_library=graph_library,
|
||||
image_files=image_files,
|
||||
image_records=image_records,
|
||||
images=images,
|
||||
@ -116,6 +115,7 @@ class ApiDependencies:
|
||||
logger=logger,
|
||||
model_manager=model_manager,
|
||||
model_records=model_record_service,
|
||||
download_queue=download_queue_service,
|
||||
model_install=model_install_service,
|
||||
names=names,
|
||||
performance_statistics=performance_statistics,
|
||||
@ -127,8 +127,6 @@ class ApiDependencies:
|
||||
workflow_records=workflow_records,
|
||||
)
|
||||
|
||||
create_system_graphs(services.graph_library)
|
||||
|
||||
ApiDependencies.invoker = Invoker(services)
|
||||
db.clean()
|
||||
|
||||
|
111
invokeai/app/api/routers/download_queue.py
Normal file
111
invokeai/app/api/routers/download_queue.py
Normal file
@ -0,0 +1,111 @@
|
||||
# Copyright (c) 2023 Lincoln D. Stein
|
||||
"""FastAPI route for the download queue."""
|
||||
|
||||
from typing import List, Optional
|
||||
|
||||
from fastapi import Body, Path, Response
|
||||
from fastapi.routing import APIRouter
|
||||
from pydantic.networks import AnyHttpUrl
|
||||
from starlette.exceptions import HTTPException
|
||||
|
||||
from invokeai.app.services.download import (
|
||||
DownloadJob,
|
||||
UnknownJobIDException,
|
||||
)
|
||||
|
||||
from ..dependencies import ApiDependencies
|
||||
|
||||
download_queue_router = APIRouter(prefix="/v1/download_queue", tags=["download_queue"])
|
||||
|
||||
|
||||
@download_queue_router.get(
|
||||
"/",
|
||||
operation_id="list_downloads",
|
||||
)
|
||||
async def list_downloads() -> List[DownloadJob]:
|
||||
"""Get a list of active and inactive jobs."""
|
||||
queue = ApiDependencies.invoker.services.download_queue
|
||||
return queue.list_jobs()
|
||||
|
||||
|
||||
@download_queue_router.patch(
|
||||
"/",
|
||||
operation_id="prune_downloads",
|
||||
responses={
|
||||
204: {"description": "All completed jobs have been pruned"},
|
||||
400: {"description": "Bad request"},
|
||||
},
|
||||
)
|
||||
async def prune_downloads():
|
||||
"""Prune completed and errored jobs."""
|
||||
queue = ApiDependencies.invoker.services.download_queue
|
||||
queue.prune_jobs()
|
||||
return Response(status_code=204)
|
||||
|
||||
|
||||
@download_queue_router.post(
|
||||
"/i/",
|
||||
operation_id="download",
|
||||
)
|
||||
async def download(
|
||||
source: AnyHttpUrl = Body(description="download source"),
|
||||
dest: str = Body(description="download destination"),
|
||||
priority: int = Body(default=10, description="queue priority"),
|
||||
access_token: Optional[str] = Body(default=None, description="token for authorization to download"),
|
||||
) -> DownloadJob:
|
||||
"""Download the source URL to the file or directory indicted in dest."""
|
||||
queue = ApiDependencies.invoker.services.download_queue
|
||||
return queue.download(source, dest, priority, access_token)
|
||||
|
||||
|
||||
@download_queue_router.get(
|
||||
"/i/{id}",
|
||||
operation_id="get_download_job",
|
||||
responses={
|
||||
200: {"description": "Success"},
|
||||
404: {"description": "The requested download JobID could not be found"},
|
||||
},
|
||||
)
|
||||
async def get_download_job(
|
||||
id: int = Path(description="ID of the download job to fetch."),
|
||||
) -> DownloadJob:
|
||||
"""Get a download job using its ID."""
|
||||
try:
|
||||
job = ApiDependencies.invoker.services.download_queue.id_to_job(id)
|
||||
return job
|
||||
except UnknownJobIDException as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
|
||||
|
||||
@download_queue_router.delete(
|
||||
"/i/{id}",
|
||||
operation_id="cancel_download_job",
|
||||
responses={
|
||||
204: {"description": "Job has been cancelled"},
|
||||
404: {"description": "The requested download JobID could not be found"},
|
||||
},
|
||||
)
|
||||
async def cancel_download_job(
|
||||
id: int = Path(description="ID of the download job to cancel."),
|
||||
):
|
||||
"""Cancel a download job using its ID."""
|
||||
try:
|
||||
queue = ApiDependencies.invoker.services.download_queue
|
||||
job = queue.id_to_job(id)
|
||||
queue.cancel_job(job)
|
||||
return Response(status_code=204)
|
||||
except UnknownJobIDException as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
|
||||
|
||||
@download_queue_router.delete(
|
||||
"/i",
|
||||
operation_id="cancel_all_download_jobs",
|
||||
responses={
|
||||
204: {"description": "Download jobs have been cancelled"},
|
||||
},
|
||||
)
|
||||
async def cancel_all_download_jobs():
|
||||
"""Cancel all download jobs."""
|
||||
ApiDependencies.invoker.services.download_queue.cancel_all_jobs()
|
||||
return Response(status_code=204)
|
@ -45,6 +45,7 @@ if True: # hack to make flake8 happy with imports coming after setting up the c
|
||||
app_info,
|
||||
board_images,
|
||||
boards,
|
||||
download_queue,
|
||||
images,
|
||||
model_records,
|
||||
models,
|
||||
@ -116,6 +117,7 @@ app.include_router(sessions.session_router, prefix="/api")
|
||||
app.include_router(utilities.utilities_router, prefix="/api")
|
||||
app.include_router(models.models_router, prefix="/api")
|
||||
app.include_router(model_records.model_records_router, prefix="/api")
|
||||
app.include_router(download_queue.download_queue_router, prefix="/api")
|
||||
app.include_router(images.images_router, prefix="/api")
|
||||
app.include_router(boards.boards_router, prefix="/api")
|
||||
app.include_router(board_images.board_images_router, prefix="/api")
|
||||
|
@ -1,4 +1,3 @@
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional, Union
|
||||
|
||||
@ -17,6 +16,7 @@ from invokeai.backend.stable_diffusion.diffusion.conditioning_data import (
|
||||
from ...backend.model_management.lora import ModelPatcher
|
||||
from ...backend.model_management.models import ModelNotFoundException, ModelType
|
||||
from ...backend.util.devices import torch_dtype
|
||||
from ..util.ti_utils import extract_ti_triggers_from_prompt
|
||||
from .baseinvocation import (
|
||||
BaseInvocation,
|
||||
BaseInvocationOutput,
|
||||
@ -87,7 +87,7 @@ class CompelInvocation(BaseInvocation):
|
||||
# loras = [(context.services.model_manager.get_model(**lora.dict(exclude={"weight"})).context.model, lora.weight) for lora in self.clip.loras]
|
||||
|
||||
ti_list = []
|
||||
for trigger in re.findall(r"<[a-zA-Z0-9., _-]+>", self.prompt):
|
||||
for trigger in extract_ti_triggers_from_prompt(self.prompt):
|
||||
name = trigger[1:-1]
|
||||
try:
|
||||
ti_list.append(
|
||||
@ -210,7 +210,7 @@ class SDXLPromptInvocationBase:
|
||||
# loras = [(context.services.model_manager.get_model(**lora.dict(exclude={"weight"})).context.model, lora.weight) for lora in self.clip.loras]
|
||||
|
||||
ti_list = []
|
||||
for trigger in re.findall(r"<[a-zA-Z0-9., _-]+>", prompt):
|
||||
for trigger in extract_ti_triggers_from_prompt(prompt):
|
||||
name = trigger[1:-1]
|
||||
try:
|
||||
ti_list.append(
|
||||
|
@ -1,7 +1,6 @@
|
||||
# Copyright (c) 2023 Borisov Sergey (https://github.com/StAlKeR7779)
|
||||
|
||||
import inspect
|
||||
import re
|
||||
|
||||
# from contextlib import ExitStack
|
||||
from typing import List, Literal, Union
|
||||
@ -21,6 +20,7 @@ from invokeai.backend import BaseModelType, ModelType, SubModelType
|
||||
from ...backend.model_management import ONNXModelPatcher
|
||||
from ...backend.stable_diffusion import PipelineIntermediateState
|
||||
from ...backend.util import choose_torch_device
|
||||
from ..util.ti_utils import extract_ti_triggers_from_prompt
|
||||
from .baseinvocation import (
|
||||
BaseInvocation,
|
||||
BaseInvocationOutput,
|
||||
@ -78,7 +78,7 @@ class ONNXPromptInvocation(BaseInvocation):
|
||||
]
|
||||
|
||||
ti_list = []
|
||||
for trigger in re.findall(r"<[a-zA-Z0-9., _-]+>", self.prompt):
|
||||
for trigger in extract_ti_triggers_from_prompt(self.prompt):
|
||||
name = trigger[1:-1]
|
||||
try:
|
||||
ti_list.append(
|
||||
|
@ -356,7 +356,7 @@ class InvokeAIAppConfig(InvokeAISettings):
|
||||
else:
|
||||
root = self.find_root().expanduser().absolute()
|
||||
self.root = root # insulate ourselves from relative paths that may change
|
||||
return root
|
||||
return root.resolve()
|
||||
|
||||
@property
|
||||
def root_dir(self) -> Path:
|
||||
|
12
invokeai/app/services/download/__init__.py
Normal file
12
invokeai/app/services/download/__init__.py
Normal file
@ -0,0 +1,12 @@
|
||||
"""Init file for download queue."""
|
||||
from .download_base import DownloadJob, DownloadJobStatus, DownloadQueueServiceBase, UnknownJobIDException
|
||||
from .download_default import DownloadQueueService, TqdmProgress
|
||||
|
||||
__all__ = [
|
||||
"DownloadJob",
|
||||
"DownloadQueueServiceBase",
|
||||
"DownloadQueueService",
|
||||
"TqdmProgress",
|
||||
"DownloadJobStatus",
|
||||
"UnknownJobIDException",
|
||||
]
|
217
invokeai/app/services/download/download_base.py
Normal file
217
invokeai/app/services/download/download_base.py
Normal file
@ -0,0 +1,217 @@
|
||||
# Copyright (c) 2023 Lincoln D. Stein and the InvokeAI Development Team
|
||||
"""Model download service."""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from enum import Enum
|
||||
from functools import total_ordering
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field, PrivateAttr
|
||||
from pydantic.networks import AnyHttpUrl
|
||||
|
||||
|
||||
class DownloadJobStatus(str, Enum):
|
||||
"""State of a download job."""
|
||||
|
||||
WAITING = "waiting" # not enqueued, will not run
|
||||
RUNNING = "running" # actively downloading
|
||||
COMPLETED = "completed" # finished running
|
||||
CANCELLED = "cancelled" # user cancelled
|
||||
ERROR = "error" # terminated with an error message
|
||||
|
||||
|
||||
class DownloadJobCancelledException(Exception):
|
||||
"""This exception is raised when a download job is cancelled."""
|
||||
|
||||
|
||||
class UnknownJobIDException(Exception):
|
||||
"""This exception is raised when an invalid job id is referened."""
|
||||
|
||||
|
||||
class ServiceInactiveException(Exception):
|
||||
"""This exception is raised when user attempts to initiate a download before the service is started."""
|
||||
|
||||
|
||||
DownloadEventHandler = Callable[["DownloadJob"], None]
|
||||
|
||||
|
||||
@total_ordering
|
||||
class DownloadJob(BaseModel):
|
||||
"""Class to monitor and control a model download request."""
|
||||
|
||||
# required variables to be passed in on creation
|
||||
source: AnyHttpUrl = Field(description="Where to download from. Specific types specified in child classes.")
|
||||
dest: Path = Field(description="Destination of downloaded model on local disk; a directory or file path")
|
||||
access_token: Optional[str] = Field(default=None, description="authorization token for protected resources")
|
||||
# automatically assigned on creation
|
||||
id: int = Field(description="Numeric ID of this job", default=-1) # default id is a sentinel
|
||||
priority: int = Field(default=10, description="Queue priority; lower values are higher priority")
|
||||
|
||||
# set internally during download process
|
||||
status: DownloadJobStatus = Field(default=DownloadJobStatus.WAITING, description="Status of the download")
|
||||
download_path: Optional[Path] = Field(default=None, description="Final location of downloaded file")
|
||||
job_started: Optional[str] = Field(default=None, description="Timestamp for when the download job started")
|
||||
job_ended: Optional[str] = Field(
|
||||
default=None, description="Timestamp for when the download job ende1d (completed or errored)"
|
||||
)
|
||||
bytes: int = Field(default=0, description="Bytes downloaded so far")
|
||||
total_bytes: int = Field(default=0, description="Total file size (bytes)")
|
||||
|
||||
# set when an error occurs
|
||||
error_type: Optional[str] = Field(default=None, description="Name of exception that caused an error")
|
||||
error: Optional[str] = Field(default=None, description="Traceback of the exception that caused an error")
|
||||
|
||||
# internal flag
|
||||
_cancelled: bool = PrivateAttr(default=False)
|
||||
|
||||
# optional event handlers passed in on creation
|
||||
_on_start: Optional[DownloadEventHandler] = PrivateAttr(default=None)
|
||||
_on_progress: Optional[DownloadEventHandler] = PrivateAttr(default=None)
|
||||
_on_complete: Optional[DownloadEventHandler] = PrivateAttr(default=None)
|
||||
_on_cancelled: Optional[DownloadEventHandler] = PrivateAttr(default=None)
|
||||
_on_error: Optional[DownloadEventHandler] = PrivateAttr(default=None)
|
||||
|
||||
def __le__(self, other: "DownloadJob") -> bool:
|
||||
"""Return True if this job's priority is less than another's."""
|
||||
return self.priority <= other.priority
|
||||
|
||||
def cancel(self) -> None:
|
||||
"""Call to cancel the job."""
|
||||
self._cancelled = True
|
||||
|
||||
# cancelled and the callbacks are private attributes in order to prevent
|
||||
# them from being serialized and/or used in the Json Schema
|
||||
@property
|
||||
def cancelled(self) -> bool:
|
||||
"""Call to cancel the job."""
|
||||
return self._cancelled
|
||||
|
||||
@property
|
||||
def on_start(self) -> Optional[DownloadEventHandler]:
|
||||
"""Return the on_start event handler."""
|
||||
return self._on_start
|
||||
|
||||
@property
|
||||
def on_progress(self) -> Optional[DownloadEventHandler]:
|
||||
"""Return the on_progress event handler."""
|
||||
return self._on_progress
|
||||
|
||||
@property
|
||||
def on_complete(self) -> Optional[DownloadEventHandler]:
|
||||
"""Return the on_complete event handler."""
|
||||
return self._on_complete
|
||||
|
||||
@property
|
||||
def on_error(self) -> Optional[DownloadEventHandler]:
|
||||
"""Return the on_error event handler."""
|
||||
return self._on_error
|
||||
|
||||
@property
|
||||
def on_cancelled(self) -> Optional[DownloadEventHandler]:
|
||||
"""Return the on_cancelled event handler."""
|
||||
return self._on_cancelled
|
||||
|
||||
def set_callbacks(
|
||||
self,
|
||||
on_start: Optional[DownloadEventHandler] = None,
|
||||
on_progress: Optional[DownloadEventHandler] = None,
|
||||
on_complete: Optional[DownloadEventHandler] = None,
|
||||
on_cancelled: Optional[DownloadEventHandler] = None,
|
||||
on_error: Optional[DownloadEventHandler] = None,
|
||||
) -> None:
|
||||
"""Set the callbacks for download events."""
|
||||
self._on_start = on_start
|
||||
self._on_progress = on_progress
|
||||
self._on_complete = on_complete
|
||||
self._on_error = on_error
|
||||
self._on_cancelled = on_cancelled
|
||||
|
||||
|
||||
class DownloadQueueServiceBase(ABC):
|
||||
"""Multithreaded queue for downloading models via URL."""
|
||||
|
||||
@abstractmethod
|
||||
def start(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Start the download worker threads."""
|
||||
|
||||
@abstractmethod
|
||||
def stop(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Stop the download worker threads."""
|
||||
|
||||
@abstractmethod
|
||||
def download(
|
||||
self,
|
||||
source: AnyHttpUrl,
|
||||
dest: Path,
|
||||
priority: int = 10,
|
||||
access_token: Optional[str] = None,
|
||||
on_start: Optional[DownloadEventHandler] = None,
|
||||
on_progress: Optional[DownloadEventHandler] = None,
|
||||
on_complete: Optional[DownloadEventHandler] = None,
|
||||
on_cancelled: Optional[DownloadEventHandler] = None,
|
||||
on_error: Optional[DownloadEventHandler] = None,
|
||||
) -> DownloadJob:
|
||||
"""
|
||||
Create a download job.
|
||||
|
||||
:param source: Source of the download as a URL.
|
||||
:param dest: Path to download to. See below.
|
||||
:param on_start, on_progress, on_complete, on_error: Callbacks for the indicated
|
||||
events.
|
||||
:returns: A DownloadJob object for monitoring the state of the download.
|
||||
|
||||
The `dest` argument is a Path object. Its behavior is:
|
||||
|
||||
1. If the path exists and is a directory, then the URL contents will be downloaded
|
||||
into that directory using the filename indicated in the response's `Content-Disposition` field.
|
||||
If no content-disposition is present, then the last component of the URL will be used (similar to
|
||||
wget's behavior).
|
||||
2. If the path does not exist, then it is taken as the name of a new file to create with the downloaded
|
||||
content.
|
||||
3. If the path exists and is an existing file, then the downloader will try to resume the download from
|
||||
the end of the existing file.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def list_jobs(self) -> List[DownloadJob]:
|
||||
"""
|
||||
List active download jobs.
|
||||
|
||||
:returns List[DownloadJob]: List of download jobs whose state is not "completed."
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def id_to_job(self, id: int) -> DownloadJob:
|
||||
"""
|
||||
Return the DownloadJob corresponding to the integer ID.
|
||||
|
||||
:param id: ID of the DownloadJob.
|
||||
|
||||
Exceptions:
|
||||
* UnknownJobIDException
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def cancel_all_jobs(self):
|
||||
"""Cancel all active and enquedjobs."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def prune_jobs(self):
|
||||
"""Prune completed and errored queue items from the job list."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def cancel_job(self, job: DownloadJob):
|
||||
"""Cancel the job, clearing partial downloads and putting it into ERROR state."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def join(self):
|
||||
"""Wait until all jobs are off the queue."""
|
||||
pass
|
418
invokeai/app/services/download/download_default.py
Normal file
418
invokeai/app/services/download/download_default.py
Normal file
@ -0,0 +1,418 @@
|
||||
# Copyright (c) 2023, Lincoln D. Stein
|
||||
"""Implementation of multithreaded download queue for invokeai."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import threading
|
||||
import traceback
|
||||
from logging import Logger
|
||||
from pathlib import Path
|
||||
from queue import Empty, PriorityQueue
|
||||
from typing import Any, Dict, List, Optional, Set
|
||||
|
||||
import requests
|
||||
from pydantic.networks import AnyHttpUrl
|
||||
from requests import HTTPError
|
||||
from tqdm import tqdm
|
||||
|
||||
from invokeai.app.services.events.events_base import EventServiceBase
|
||||
from invokeai.app.util.misc import get_iso_timestamp
|
||||
from invokeai.backend.util.logging import InvokeAILogger
|
||||
|
||||
from .download_base import (
|
||||
DownloadEventHandler,
|
||||
DownloadJob,
|
||||
DownloadJobCancelledException,
|
||||
DownloadJobStatus,
|
||||
DownloadQueueServiceBase,
|
||||
ServiceInactiveException,
|
||||
UnknownJobIDException,
|
||||
)
|
||||
|
||||
# Maximum number of bytes to download during each call to requests.iter_content()
|
||||
DOWNLOAD_CHUNK_SIZE = 100000
|
||||
|
||||
|
||||
class DownloadQueueService(DownloadQueueServiceBase):
|
||||
"""Class for queued download of models."""
|
||||
|
||||
_jobs: Dict[int, DownloadJob]
|
||||
_max_parallel_dl: int = 5
|
||||
_worker_pool: Set[threading.Thread]
|
||||
_queue: PriorityQueue[DownloadJob]
|
||||
_stop_event: threading.Event
|
||||
_lock: threading.Lock
|
||||
_logger: Logger
|
||||
_events: Optional[EventServiceBase] = None
|
||||
_next_job_id: int = 0
|
||||
_accept_download_requests: bool = False
|
||||
_requests: requests.sessions.Session
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
max_parallel_dl: int = 5,
|
||||
event_bus: Optional[EventServiceBase] = None,
|
||||
requests_session: Optional[requests.sessions.Session] = None,
|
||||
):
|
||||
"""
|
||||
Initialize DownloadQueue.
|
||||
|
||||
:param max_parallel_dl: Number of simultaneous downloads allowed [5].
|
||||
:param requests_session: Optional requests.sessions.Session object, for unit tests.
|
||||
"""
|
||||
self._jobs = {}
|
||||
self._next_job_id = 0
|
||||
self._queue = PriorityQueue()
|
||||
self._stop_event = threading.Event()
|
||||
self._worker_pool = set()
|
||||
self._lock = threading.Lock()
|
||||
self._logger = InvokeAILogger.get_logger("DownloadQueueService")
|
||||
self._event_bus = event_bus
|
||||
self._requests = requests_session or requests.Session()
|
||||
self._accept_download_requests = False
|
||||
self._max_parallel_dl = max_parallel_dl
|
||||
|
||||
def start(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Start the download worker threads."""
|
||||
with self._lock:
|
||||
if self._worker_pool:
|
||||
raise Exception("Attempt to start the download service twice")
|
||||
self._stop_event.clear()
|
||||
self._start_workers(self._max_parallel_dl)
|
||||
self._accept_download_requests = True
|
||||
|
||||
def stop(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Stop the download worker threads."""
|
||||
with self._lock:
|
||||
if not self._worker_pool:
|
||||
raise Exception("Attempt to stop the download service before it was started")
|
||||
self._accept_download_requests = False # reject attempts to add new jobs to queue
|
||||
queued_jobs = [x for x in self.list_jobs() if x.status == DownloadJobStatus.WAITING]
|
||||
active_jobs = [x for x in self.list_jobs() if x.status == DownloadJobStatus.RUNNING]
|
||||
if queued_jobs:
|
||||
self._logger.warning(f"Cancelling {len(queued_jobs)} queued downloads")
|
||||
if active_jobs:
|
||||
self._logger.info(f"Waiting for {len(active_jobs)} active download jobs to complete")
|
||||
with self._queue.mutex:
|
||||
self._queue.queue.clear()
|
||||
self.join() # wait for all active jobs to finish
|
||||
self._stop_event.set()
|
||||
self._worker_pool.clear()
|
||||
|
||||
def download(
|
||||
self,
|
||||
source: AnyHttpUrl,
|
||||
dest: Path,
|
||||
priority: int = 10,
|
||||
access_token: Optional[str] = None,
|
||||
on_start: Optional[DownloadEventHandler] = None,
|
||||
on_progress: Optional[DownloadEventHandler] = None,
|
||||
on_complete: Optional[DownloadEventHandler] = None,
|
||||
on_cancelled: Optional[DownloadEventHandler] = None,
|
||||
on_error: Optional[DownloadEventHandler] = None,
|
||||
) -> DownloadJob:
|
||||
"""Create a download job and return its ID."""
|
||||
if not self._accept_download_requests:
|
||||
raise ServiceInactiveException(
|
||||
"The download service is not currently accepting requests. Please call start() to initialize the service."
|
||||
)
|
||||
with self._lock:
|
||||
id = self._next_job_id
|
||||
self._next_job_id += 1
|
||||
job = DownloadJob(
|
||||
id=id,
|
||||
source=source,
|
||||
dest=dest,
|
||||
priority=priority,
|
||||
access_token=access_token,
|
||||
)
|
||||
job.set_callbacks(
|
||||
on_start=on_start,
|
||||
on_progress=on_progress,
|
||||
on_complete=on_complete,
|
||||
on_cancelled=on_cancelled,
|
||||
on_error=on_error,
|
||||
)
|
||||
self._jobs[id] = job
|
||||
self._queue.put(job)
|
||||
return job
|
||||
|
||||
def join(self) -> None:
|
||||
"""Wait for all jobs to complete."""
|
||||
self._queue.join()
|
||||
|
||||
def list_jobs(self) -> List[DownloadJob]:
|
||||
"""List all the jobs."""
|
||||
return list(self._jobs.values())
|
||||
|
||||
def prune_jobs(self) -> None:
|
||||
"""Prune completed and errored queue items from the job list."""
|
||||
with self._lock:
|
||||
to_delete = set()
|
||||
for job_id, job in self._jobs.items():
|
||||
if self._in_terminal_state(job):
|
||||
to_delete.add(job_id)
|
||||
for job_id in to_delete:
|
||||
del self._jobs[job_id]
|
||||
|
||||
def id_to_job(self, id: int) -> DownloadJob:
|
||||
"""Translate a job ID into a DownloadJob object."""
|
||||
try:
|
||||
return self._jobs[id]
|
||||
except KeyError as excp:
|
||||
raise UnknownJobIDException("Unrecognized job") from excp
|
||||
|
||||
def cancel_job(self, job: DownloadJob) -> None:
|
||||
"""
|
||||
Cancel the indicated job.
|
||||
|
||||
If it is running it will be stopped.
|
||||
job.status will be set to DownloadJobStatus.CANCELLED
|
||||
"""
|
||||
with self._lock:
|
||||
job.cancel()
|
||||
|
||||
def cancel_all_jobs(self, preserve_partial: bool = False) -> None:
|
||||
"""Cancel all jobs (those not in enqueued, running or paused state)."""
|
||||
for job in self._jobs.values():
|
||||
if not self._in_terminal_state(job):
|
||||
self.cancel_job(job)
|
||||
|
||||
def _in_terminal_state(self, job: DownloadJob) -> bool:
|
||||
return job.status in [
|
||||
DownloadJobStatus.COMPLETED,
|
||||
DownloadJobStatus.CANCELLED,
|
||||
DownloadJobStatus.ERROR,
|
||||
]
|
||||
|
||||
def _start_workers(self, max_workers: int) -> None:
|
||||
"""Start the requested number of worker threads."""
|
||||
self._stop_event.clear()
|
||||
for i in range(0, max_workers): # noqa B007
|
||||
worker = threading.Thread(target=self._download_next_item, daemon=True)
|
||||
self._logger.debug(f"Download queue worker thread {worker.name} starting.")
|
||||
worker.start()
|
||||
self._worker_pool.add(worker)
|
||||
|
||||
def _download_next_item(self) -> None:
|
||||
"""Worker thread gets next job on priority queue."""
|
||||
done = False
|
||||
while not done:
|
||||
if self._stop_event.is_set():
|
||||
done = True
|
||||
continue
|
||||
try:
|
||||
job = self._queue.get(timeout=1)
|
||||
except Empty:
|
||||
continue
|
||||
|
||||
try:
|
||||
job.job_started = get_iso_timestamp()
|
||||
self._do_download(job)
|
||||
self._signal_job_complete(job)
|
||||
|
||||
except (OSError, HTTPError) as excp:
|
||||
job.error_type = excp.__class__.__name__ + f"({str(excp)})"
|
||||
job.error = traceback.format_exc()
|
||||
self._signal_job_error(job)
|
||||
except DownloadJobCancelledException:
|
||||
self._signal_job_cancelled(job)
|
||||
self._cleanup_cancelled_job(job)
|
||||
|
||||
finally:
|
||||
job.job_ended = get_iso_timestamp()
|
||||
self._queue.task_done()
|
||||
self._logger.debug(f"Download queue worker thread {threading.current_thread().name} exiting.")
|
||||
|
||||
def _do_download(self, job: DownloadJob) -> None:
|
||||
"""Do the actual download."""
|
||||
url = job.source
|
||||
header = {"Authorization": f"Bearer {job.access_token}"} if job.access_token else {}
|
||||
open_mode = "wb"
|
||||
|
||||
# Make a streaming request. This will retrieve headers including
|
||||
# content-length and content-disposition, but not fetch any content itself
|
||||
resp = self._requests.get(str(url), headers=header, stream=True)
|
||||
if not resp.ok:
|
||||
raise HTTPError(resp.reason)
|
||||
content_length = int(resp.headers.get("content-length", 0))
|
||||
job.total_bytes = content_length
|
||||
|
||||
if job.dest.is_dir():
|
||||
file_name = os.path.basename(str(url.path)) # default is to use the last bit of the URL
|
||||
|
||||
if match := re.search('filename="(.+)"', resp.headers.get("Content-Disposition", "")):
|
||||
remote_name = match.group(1)
|
||||
if self._validate_filename(job.dest.as_posix(), remote_name):
|
||||
file_name = remote_name
|
||||
|
||||
job.download_path = job.dest / file_name
|
||||
|
||||
else:
|
||||
job.dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
job.download_path = job.dest
|
||||
|
||||
assert job.download_path
|
||||
|
||||
# Don't clobber an existing file. See commit 82c2c85202f88c6d24ff84710f297cfc6ae174af
|
||||
# for code that instead resumes an interrupted download.
|
||||
if job.download_path.exists():
|
||||
raise OSError(f"[Errno 17] File {job.download_path} exists")
|
||||
|
||||
# append ".downloading" to the path
|
||||
in_progress_path = self._in_progress_path(job.download_path)
|
||||
|
||||
# signal caller that the download is starting. At this point, key fields such as
|
||||
# download_path and total_bytes will be populated. We call it here because the might
|
||||
# discover that the local file is already complete and generate a COMPLETED status.
|
||||
self._signal_job_started(job)
|
||||
|
||||
# "range not satisfiable" - local file is at least as large as the remote file
|
||||
if resp.status_code == 416 or (content_length > 0 and job.bytes >= content_length):
|
||||
self._logger.warning(f"{job.download_path}: complete file found. Skipping.")
|
||||
return
|
||||
|
||||
# "partial content" - local file is smaller than remote file
|
||||
elif resp.status_code == 206 or job.bytes > 0:
|
||||
self._logger.warning(f"{job.download_path}: partial file found. Resuming")
|
||||
|
||||
# some other error
|
||||
elif resp.status_code != 200:
|
||||
raise HTTPError(resp.reason)
|
||||
|
||||
self._logger.debug(f"{job.source}: Downloading {job.download_path}")
|
||||
report_delta = job.total_bytes / 100 # report every 1% change
|
||||
last_report_bytes = 0
|
||||
|
||||
# DOWNLOAD LOOP
|
||||
with open(in_progress_path, open_mode) as file:
|
||||
for data in resp.iter_content(chunk_size=DOWNLOAD_CHUNK_SIZE):
|
||||
if job.cancelled:
|
||||
raise DownloadJobCancelledException("Job was cancelled at caller's request")
|
||||
|
||||
job.bytes += file.write(data)
|
||||
if (job.bytes - last_report_bytes >= report_delta) or (job.bytes >= job.total_bytes):
|
||||
last_report_bytes = job.bytes
|
||||
self._signal_job_progress(job)
|
||||
|
||||
# if we get here we are done and can rename the file to the original dest
|
||||
in_progress_path.rename(job.download_path)
|
||||
|
||||
def _validate_filename(self, directory: str, filename: str) -> bool:
|
||||
pc_name_max = os.pathconf(directory, "PC_NAME_MAX") if hasattr(os, "pathconf") else 260 # hardcoded for windows
|
||||
pc_path_max = (
|
||||
os.pathconf(directory, "PC_PATH_MAX") if hasattr(os, "pathconf") else 32767
|
||||
) # hardcoded for windows with long names enabled
|
||||
if "/" in filename:
|
||||
return False
|
||||
if filename.startswith(".."):
|
||||
return False
|
||||
if len(filename) > pc_name_max:
|
||||
return False
|
||||
if len(os.path.join(directory, filename)) > pc_path_max:
|
||||
return False
|
||||
return True
|
||||
|
||||
def _in_progress_path(self, path: Path) -> Path:
|
||||
return path.with_name(path.name + ".downloading")
|
||||
|
||||
def _signal_job_started(self, job: DownloadJob) -> None:
|
||||
job.status = DownloadJobStatus.RUNNING
|
||||
if job.on_start:
|
||||
try:
|
||||
job.on_start(job)
|
||||
except Exception as e:
|
||||
self._logger.error(e)
|
||||
if self._event_bus:
|
||||
assert job.download_path
|
||||
self._event_bus.emit_download_started(str(job.source), job.download_path.as_posix())
|
||||
|
||||
def _signal_job_progress(self, job: DownloadJob) -> None:
|
||||
if job.on_progress:
|
||||
try:
|
||||
job.on_progress(job)
|
||||
except Exception as e:
|
||||
self._logger.error(e)
|
||||
if self._event_bus:
|
||||
assert job.download_path
|
||||
self._event_bus.emit_download_progress(
|
||||
str(job.source),
|
||||
download_path=job.download_path.as_posix(),
|
||||
current_bytes=job.bytes,
|
||||
total_bytes=job.total_bytes,
|
||||
)
|
||||
|
||||
def _signal_job_complete(self, job: DownloadJob) -> None:
|
||||
job.status = DownloadJobStatus.COMPLETED
|
||||
if job.on_complete:
|
||||
try:
|
||||
job.on_complete(job)
|
||||
except Exception as e:
|
||||
self._logger.error(e)
|
||||
if self._event_bus:
|
||||
assert job.download_path
|
||||
self._event_bus.emit_download_complete(
|
||||
str(job.source), download_path=job.download_path.as_posix(), total_bytes=job.total_bytes
|
||||
)
|
||||
|
||||
def _signal_job_cancelled(self, job: DownloadJob) -> None:
|
||||
job.status = DownloadJobStatus.CANCELLED
|
||||
if job.on_cancelled:
|
||||
try:
|
||||
job.on_cancelled(job)
|
||||
except Exception as e:
|
||||
self._logger.error(e)
|
||||
if self._event_bus:
|
||||
self._event_bus.emit_download_cancelled(str(job.source))
|
||||
|
||||
def _signal_job_error(self, job: DownloadJob) -> None:
|
||||
job.status = DownloadJobStatus.ERROR
|
||||
if job.on_error:
|
||||
try:
|
||||
job.on_error(job)
|
||||
except Exception as e:
|
||||
self._logger.error(e)
|
||||
if self._event_bus:
|
||||
assert job.error_type
|
||||
assert job.error
|
||||
self._event_bus.emit_download_error(str(job.source), error_type=job.error_type, error=job.error)
|
||||
|
||||
def _cleanup_cancelled_job(self, job: DownloadJob) -> None:
|
||||
self._logger.warning(f"Cleaning up leftover files from cancelled download job {job.download_path}")
|
||||
try:
|
||||
if job.download_path:
|
||||
partial_file = self._in_progress_path(job.download_path)
|
||||
partial_file.unlink()
|
||||
except OSError as excp:
|
||||
self._logger.warning(excp)
|
||||
|
||||
|
||||
# Example on_progress event handler to display a TQDM status bar
|
||||
# Activate with:
|
||||
# download_service.download('http://foo.bar/baz', '/tmp', on_progress=TqdmProgress().job_update
|
||||
class TqdmProgress(object):
|
||||
"""TQDM-based progress bar object to use in on_progress handlers."""
|
||||
|
||||
_bars: Dict[int, tqdm] # the tqdm object
|
||||
_last: Dict[int, int] # last bytes downloaded
|
||||
|
||||
def __init__(self) -> None: # noqa D107
|
||||
self._bars = {}
|
||||
self._last = {}
|
||||
|
||||
def update(self, job: DownloadJob) -> None: # noqa D102
|
||||
job_id = job.id
|
||||
# new job
|
||||
if job_id not in self._bars:
|
||||
assert job.download_path
|
||||
dest = Path(job.download_path).name
|
||||
self._bars[job_id] = tqdm(
|
||||
desc=dest,
|
||||
initial=0,
|
||||
total=job.total_bytes,
|
||||
unit="iB",
|
||||
unit_scale=True,
|
||||
)
|
||||
self._last[job_id] = 0
|
||||
self._bars[job_id].update(job.bytes - self._last[job_id])
|
||||
self._last[job_id] = job.bytes
|
@ -17,6 +17,7 @@ from invokeai.backend.model_management.models.base import BaseModelType, ModelTy
|
||||
|
||||
class EventServiceBase:
|
||||
queue_event: str = "queue_event"
|
||||
download_event: str = "download_event"
|
||||
model_event: str = "model_event"
|
||||
|
||||
"""Basic event bus, to have an empty stand-in when not needed"""
|
||||
@ -32,6 +33,13 @@ class EventServiceBase:
|
||||
payload={"event": event_name, "data": payload},
|
||||
)
|
||||
|
||||
def __emit_download_event(self, event_name: str, payload: dict) -> None:
|
||||
payload["timestamp"] = get_timestamp()
|
||||
self.dispatch(
|
||||
event_name=EventServiceBase.download_event,
|
||||
payload={"event": event_name, "data": payload},
|
||||
)
|
||||
|
||||
def __emit_model_event(self, event_name: str, payload: dict) -> None:
|
||||
payload["timestamp"] = get_timestamp()
|
||||
self.dispatch(
|
||||
@ -323,6 +331,79 @@ class EventServiceBase:
|
||||
payload={"queue_id": queue_id},
|
||||
)
|
||||
|
||||
def emit_download_started(self, source: str, download_path: str) -> None:
|
||||
"""
|
||||
Emit when a download job is started.
|
||||
|
||||
:param url: The downloaded url
|
||||
"""
|
||||
self.__emit_download_event(
|
||||
event_name="download_started",
|
||||
payload={"source": source, "download_path": download_path},
|
||||
)
|
||||
|
||||
def emit_download_progress(self, source: str, download_path: str, current_bytes: int, total_bytes: int) -> None:
|
||||
"""
|
||||
Emit "download_progress" events at regular intervals during a download job.
|
||||
|
||||
:param source: The downloaded source
|
||||
:param download_path: The local downloaded file
|
||||
:param current_bytes: Number of bytes downloaded so far
|
||||
:param total_bytes: The size of the file being downloaded (if known)
|
||||
"""
|
||||
self.__emit_download_event(
|
||||
event_name="download_progress",
|
||||
payload={
|
||||
"source": source,
|
||||
"download_path": download_path,
|
||||
"current_bytes": current_bytes,
|
||||
"total_bytes": total_bytes,
|
||||
},
|
||||
)
|
||||
|
||||
def emit_download_complete(self, source: str, download_path: str, total_bytes: int) -> None:
|
||||
"""
|
||||
Emit a "download_complete" event at the end of a successful download.
|
||||
|
||||
:param source: Source URL
|
||||
:param download_path: Path to the locally downloaded file
|
||||
:param total_bytes: The size of the downloaded file
|
||||
"""
|
||||
self.__emit_download_event(
|
||||
event_name="download_complete",
|
||||
payload={
|
||||
"source": source,
|
||||
"download_path": download_path,
|
||||
"total_bytes": total_bytes,
|
||||
},
|
||||
)
|
||||
|
||||
def emit_download_cancelled(self, source: str) -> None:
|
||||
"""Emit a "download_cancelled" event in the event that the download was cancelled by user."""
|
||||
self.__emit_download_event(
|
||||
event_name="download_cancelled",
|
||||
payload={
|
||||
"source": source,
|
||||
},
|
||||
)
|
||||
|
||||
def emit_download_error(self, source: str, error_type: str, error: str) -> None:
|
||||
"""
|
||||
Emit a "download_error" event when an download job encounters an exception.
|
||||
|
||||
:param source: Source URL
|
||||
:param error_type: The name of the exception that raised the error
|
||||
:param error: The traceback from this error
|
||||
"""
|
||||
self.__emit_download_event(
|
||||
event_name="download_error",
|
||||
payload={
|
||||
"source": source,
|
||||
"error_type": error_type,
|
||||
"error": error,
|
||||
},
|
||||
)
|
||||
|
||||
def emit_model_install_started(self, source: str) -> None:
|
||||
"""
|
||||
Emitted when an install job is started.
|
||||
|
@ -11,6 +11,7 @@ if TYPE_CHECKING:
|
||||
from .board_records.board_records_base import BoardRecordStorageBase
|
||||
from .boards.boards_base import BoardServiceABC
|
||||
from .config import InvokeAIAppConfig
|
||||
from .download import DownloadQueueServiceBase
|
||||
from .events.events_base import EventServiceBase
|
||||
from .image_files.image_files_base import ImageFileStorageBase
|
||||
from .image_records.image_records_base import ImageRecordStorageBase
|
||||
@ -27,7 +28,7 @@ if TYPE_CHECKING:
|
||||
from .names.names_base import NameServiceBase
|
||||
from .session_processor.session_processor_base import SessionProcessorBase
|
||||
from .session_queue.session_queue_base import SessionQueueBase
|
||||
from .shared.graph import GraphExecutionState, LibraryGraph
|
||||
from .shared.graph import GraphExecutionState
|
||||
from .urls.urls_base import UrlServiceBase
|
||||
from .workflow_records.workflow_records_base import WorkflowRecordsStorageBase
|
||||
|
||||
@ -43,7 +44,6 @@ class InvocationServices:
|
||||
configuration: "InvokeAIAppConfig"
|
||||
events: "EventServiceBase"
|
||||
graph_execution_manager: "ItemStorageABC[GraphExecutionState]"
|
||||
graph_library: "ItemStorageABC[LibraryGraph]"
|
||||
images: "ImageServiceABC"
|
||||
image_records: "ImageRecordStorageBase"
|
||||
image_files: "ImageFileStorageBase"
|
||||
@ -51,6 +51,7 @@ class InvocationServices:
|
||||
logger: "Logger"
|
||||
model_manager: "ModelManagerServiceBase"
|
||||
model_records: "ModelRecordServiceBase"
|
||||
download_queue: "DownloadQueueServiceBase"
|
||||
model_install: "ModelInstallServiceBase"
|
||||
processor: "InvocationProcessorABC"
|
||||
performance_statistics: "InvocationStatsServiceBase"
|
||||
@ -71,7 +72,6 @@ class InvocationServices:
|
||||
configuration: "InvokeAIAppConfig",
|
||||
events: "EventServiceBase",
|
||||
graph_execution_manager: "ItemStorageABC[GraphExecutionState]",
|
||||
graph_library: "ItemStorageABC[LibraryGraph]",
|
||||
images: "ImageServiceABC",
|
||||
image_files: "ImageFileStorageBase",
|
||||
image_records: "ImageRecordStorageBase",
|
||||
@ -79,6 +79,7 @@ class InvocationServices:
|
||||
logger: "Logger",
|
||||
model_manager: "ModelManagerServiceBase",
|
||||
model_records: "ModelRecordServiceBase",
|
||||
download_queue: "DownloadQueueServiceBase",
|
||||
model_install: "ModelInstallServiceBase",
|
||||
processor: "InvocationProcessorABC",
|
||||
performance_statistics: "InvocationStatsServiceBase",
|
||||
@ -97,7 +98,6 @@ class InvocationServices:
|
||||
self.configuration = configuration
|
||||
self.events = events
|
||||
self.graph_execution_manager = graph_execution_manager
|
||||
self.graph_library = graph_library
|
||||
self.images = images
|
||||
self.image_files = image_files
|
||||
self.image_records = image_records
|
||||
@ -105,6 +105,7 @@ class InvocationServices:
|
||||
self.logger = logger
|
||||
self.model_manager = model_manager
|
||||
self.model_records = model_records
|
||||
self.download_queue = download_queue
|
||||
self.model_install = model_install
|
||||
self.processor = processor
|
||||
self.performance_statistics = performance_statistics
|
||||
|
@ -11,7 +11,6 @@ from typing_extensions import Annotated
|
||||
|
||||
from invokeai.app.services.config import InvokeAIAppConfig
|
||||
from invokeai.app.services.events import EventServiceBase
|
||||
from invokeai.app.services.invoker import Invoker
|
||||
from invokeai.app.services.model_records import ModelRecordServiceBase
|
||||
from invokeai.backend.model_manager import AnyModelConfig
|
||||
|
||||
@ -157,12 +156,12 @@ class ModelInstallServiceBase(ABC):
|
||||
:param event_bus: InvokeAI event bus for reporting events to.
|
||||
"""
|
||||
|
||||
def start(self, invoker: Invoker) -> None:
|
||||
"""Call at InvokeAI startup time."""
|
||||
self.sync_to_config()
|
||||
@abstractmethod
|
||||
def start(self, *args: Any, **kwarg: Any) -> None:
|
||||
"""Start the installer service."""
|
||||
|
||||
@abstractmethod
|
||||
def stop(self) -> None:
|
||||
def stop(self, *args: Any, **kwarg: Any) -> None:
|
||||
"""Stop the model install service. After this the objection can be safely deleted."""
|
||||
|
||||
@property
|
||||
|
@ -71,7 +71,6 @@ class ModelInstallService(ModelInstallServiceBase):
|
||||
self._install_queue = Queue()
|
||||
self._cached_model_paths = set()
|
||||
self._models_installed = set()
|
||||
self._start_installer_thread()
|
||||
|
||||
@property
|
||||
def app_config(self) -> InvokeAIAppConfig: # noqa D102
|
||||
@ -85,8 +84,13 @@ class ModelInstallService(ModelInstallServiceBase):
|
||||
def event_bus(self) -> Optional[EventServiceBase]: # noqa D102
|
||||
return self._event_bus
|
||||
|
||||
def stop(self, *args, **kwargs) -> None:
|
||||
"""Stop the install thread; after this the object can be deleted and garbage collected."""
|
||||
def start(self, *args: Any, **kwarg: Any) -> None:
|
||||
"""Start the installer thread."""
|
||||
self._start_installer_thread()
|
||||
self.sync_to_config()
|
||||
|
||||
def stop(self, *args: Any, **kwarg: Any) -> None:
|
||||
"""Stop the installer thread; after this the object can be deleted and garbage collected."""
|
||||
self._install_queue.put(STOP_JOB)
|
||||
|
||||
def _start_installer_thread(self) -> None:
|
||||
|
8
invokeai/app/util/ti_utils.py
Normal file
8
invokeai/app/util/ti_utils.py
Normal file
@ -0,0 +1,8 @@
|
||||
import re
|
||||
|
||||
|
||||
def extract_ti_triggers_from_prompt(prompt: str) -> list[str]:
|
||||
ti_triggers = []
|
||||
for trigger in re.findall(r"<[a-zA-Z0-9., _-]+>", prompt):
|
||||
ti_triggers.append(trigger)
|
||||
return ti_triggers
|
@ -28,7 +28,7 @@ def check_invokeai_root(config: InvokeAIAppConfig):
|
||||
print("== STARTUP ABORTED ==")
|
||||
print("** One or more necessary files is missing from your InvokeAI root directory **")
|
||||
print("** Please rerun the configuration script to fix this problem. **")
|
||||
print("** From the launcher, selection option [7]. **")
|
||||
print("** From the launcher, selection option [6]. **")
|
||||
print(
|
||||
'** From the command line, activate the virtual environment and run "invokeai-configure --yes --skip-sd-weights" **'
|
||||
)
|
||||
|
@ -113,7 +113,8 @@
|
||||
"orderBy": "Ordinato per",
|
||||
"nextPage": "Pagina successiva",
|
||||
"saveAs": "Salva come",
|
||||
"unsaved": "Non salvato"
|
||||
"unsaved": "Non salvato",
|
||||
"direction": "Direzione"
|
||||
},
|
||||
"gallery": {
|
||||
"generations": "Generazioni",
|
||||
@ -1112,7 +1113,8 @@
|
||||
"betaDesc": "Questa invocazione è in versione beta. Fino a quando non sarà stabile, potrebbe subire modifiche importanti durante gli aggiornamenti dell'app. Abbiamo intenzione di supportare questa invocazione a lungo termine.",
|
||||
"newWorkflow": "Nuovo flusso di lavoro",
|
||||
"newWorkflowDesc": "Creare un nuovo flusso di lavoro?",
|
||||
"newWorkflowDesc2": "Il flusso di lavoro attuale presenta modifiche non salvate."
|
||||
"newWorkflowDesc2": "Il flusso di lavoro attuale presenta modifiche non salvate.",
|
||||
"unsupportedAnyOfLength": "unione di troppi elementi ({{count}})"
|
||||
},
|
||||
"boards": {
|
||||
"autoAddBoard": "Aggiungi automaticamente bacheca",
|
||||
|
@ -15,7 +15,7 @@
|
||||
"langBrPortuguese": "Português do Brasil",
|
||||
"langRussian": "Русский",
|
||||
"langSpanish": "Español",
|
||||
"nodes": "노드",
|
||||
"nodes": "Workflow Editor",
|
||||
"nodesDesc": "이미지 생성을 위한 노드 기반 시스템은 현재 개발 중입니다. 이 놀라운 기능에 대한 업데이트를 계속 지켜봐 주세요.",
|
||||
"postProcessing": "후처리",
|
||||
"postProcessDesc2": "보다 진보된 후처리 작업을 위한 전용 UI가 곧 출시될 예정입니다.",
|
||||
@ -25,7 +25,7 @@
|
||||
"trainingDesc2": "InvokeAI는 이미 메인 스크립트를 사용한 Textual Inversion를 이용한 Custom embedding 학습을 지원하고 있습니다.",
|
||||
"upload": "업로드",
|
||||
"close": "닫기",
|
||||
"load": "로드",
|
||||
"load": "불러오기",
|
||||
"back": "뒤로 가기",
|
||||
"statusConnected": "연결됨",
|
||||
"statusDisconnected": "연결 끊김",
|
||||
@ -58,7 +58,69 @@
|
||||
"statusGeneratingImageToImage": "이미지->이미지 생성",
|
||||
"statusProcessingComplete": "처리 완료",
|
||||
"statusIterationComplete": "반복(Iteration) 완료",
|
||||
"statusSavingImage": "이미지 저장"
|
||||
"statusSavingImage": "이미지 저장",
|
||||
"t2iAdapter": "T2I 어댑터",
|
||||
"communityLabel": "커뮤니티",
|
||||
"txt2img": "텍스트->이미지",
|
||||
"dontAskMeAgain": "다시 묻지 마세요",
|
||||
"loadingInvokeAI": "Invoke AI 불러오는 중",
|
||||
"checkpoint": "체크포인트",
|
||||
"format": "형식",
|
||||
"unknown": "알려지지 않음",
|
||||
"areYouSure": "확실하나요?",
|
||||
"folder": "폴더",
|
||||
"inpaint": "inpaint",
|
||||
"updated": "업데이트 됨",
|
||||
"on": "켜기",
|
||||
"save": "저장",
|
||||
"langPortuguese": "Português",
|
||||
"created": "생성됨",
|
||||
"nodeEditor": "Node Editor",
|
||||
"error": "에러",
|
||||
"prevPage": "이전 페이지",
|
||||
"ipAdapter": "IP 어댑터",
|
||||
"controlAdapter": "제어 어댑터",
|
||||
"installed": "설치됨",
|
||||
"accept": "수락",
|
||||
"ai": "인공지능",
|
||||
"auto": "자동",
|
||||
"file": "파일",
|
||||
"openInNewTab": "새 탭에서 열기",
|
||||
"delete": "삭제",
|
||||
"template": "템플릿",
|
||||
"cancel": "취소",
|
||||
"controlNet": "컨트롤넷",
|
||||
"outputs": "결과물",
|
||||
"unknownError": "알려지지 않은 에러",
|
||||
"statusProcessing": "처리 중",
|
||||
"linear": "선형",
|
||||
"imageFailedToLoad": "이미지를 로드할 수 없음",
|
||||
"direction": "방향",
|
||||
"data": "데이터",
|
||||
"somethingWentWrong": "뭔가 잘못됐어요",
|
||||
"imagePrompt": "이미지 프롬프트",
|
||||
"modelManager": "Model Manager",
|
||||
"lightMode": "라이트 모드",
|
||||
"safetensors": "Safetensors",
|
||||
"outpaint": "outpaint",
|
||||
"langKorean": "한국어",
|
||||
"orderBy": "정렬 기준",
|
||||
"generate": "생성",
|
||||
"copyError": "$t(gallery.copy) 에러",
|
||||
"learnMore": "더 알아보기",
|
||||
"nextPage": "다음 페이지",
|
||||
"saveAs": "다른 이름으로 저장",
|
||||
"darkMode": "다크 모드",
|
||||
"loading": "불러오는 중",
|
||||
"random": "랜덤",
|
||||
"langHebrew": "Hebrew",
|
||||
"batch": "Batch 매니저",
|
||||
"postprocessing": "후처리",
|
||||
"advanced": "고급",
|
||||
"unsaved": "저장되지 않음",
|
||||
"input": "입력",
|
||||
"details": "세부사항",
|
||||
"notInstalled": "설치되지 않음"
|
||||
},
|
||||
"gallery": {
|
||||
"showGenerations": "생성된 이미지 보기",
|
||||
@ -68,7 +130,35 @@
|
||||
"galleryImageSize": "이미지 크기",
|
||||
"galleryImageResetSize": "사이즈 리셋",
|
||||
"gallerySettings": "갤러리 설정",
|
||||
"maintainAspectRatio": "종횡비 유지"
|
||||
"maintainAspectRatio": "종횡비 유지",
|
||||
"deleteSelection": "선택 항목 삭제",
|
||||
"featuresWillReset": "이 이미지를 삭제하면 해당 기능이 즉시 재설정됩니다.",
|
||||
"deleteImageBin": "삭제된 이미지는 운영 체제의 Bin으로 전송됩니다.",
|
||||
"assets": "자산",
|
||||
"problemDeletingImagesDesc": "하나 이상의 이미지를 삭제할 수 없습니다",
|
||||
"noImagesInGallery": "보여줄 이미지가 없음",
|
||||
"autoSwitchNewImages": "새로운 이미지로 자동 전환",
|
||||
"loading": "불러오는 중",
|
||||
"unableToLoad": "갤러리를 로드할 수 없음",
|
||||
"preparingDownload": "다운로드 준비",
|
||||
"preparingDownloadFailed": "다운로드 준비 중 발생한 문제",
|
||||
"singleColumnLayout": "단일 열 레이아웃",
|
||||
"image": "이미지",
|
||||
"loadMore": "더 불러오기",
|
||||
"drop": "드랍",
|
||||
"problemDeletingImages": "이미지 삭제 중 발생한 문제",
|
||||
"downloadSelection": "선택 항목 다운로드",
|
||||
"deleteImage": "이미지 삭제",
|
||||
"currentlyInUse": "이 이미지는 현재 다음 기능에서 사용되고 있습니다:",
|
||||
"allImagesLoaded": "불러온 모든 이미지",
|
||||
"dropOrUpload": "$t(gallery.drop) 또는 업로드",
|
||||
"copy": "복사",
|
||||
"download": "다운로드",
|
||||
"deleteImagePermanent": "삭제된 이미지는 복원할 수 없습니다.",
|
||||
"noImageSelected": "선택된 이미지 없음",
|
||||
"autoAssignBoardOnClick": "클릭 시 Board로 자동 할당",
|
||||
"setCurrentImage": "현재 이미지로 설정",
|
||||
"dropToUpload": "업로드를 위해 $t(gallery.drop)"
|
||||
},
|
||||
"unifiedCanvas": {
|
||||
"betaPreserveMasked": "마스크 레이어 유지"
|
||||
@ -79,6 +169,752 @@
|
||||
"nextImage": "다음 이미지",
|
||||
"mode": "모드",
|
||||
"menu": "메뉴",
|
||||
"modelSelect": "모델 선택"
|
||||
"modelSelect": "모델 선택",
|
||||
"zoomIn": "확대하기",
|
||||
"rotateClockwise": "시계방향으로 회전",
|
||||
"uploadImage": "이미지 업로드",
|
||||
"showGalleryPanel": "갤러리 패널 표시",
|
||||
"useThisParameter": "해당 변수 사용",
|
||||
"reset": "리셋",
|
||||
"loadMore": "더 불러오기",
|
||||
"zoomOut": "축소하기",
|
||||
"rotateCounterClockwise": "반시계방향으로 회전",
|
||||
"showOptionsPanel": "사이드 패널 표시",
|
||||
"toggleAutoscroll": "자동 스크롤 전환",
|
||||
"toggleLogViewer": "Log Viewer 전환"
|
||||
},
|
||||
"modelManager": {
|
||||
"pathToCustomConfig": "사용자 지정 구성 경로",
|
||||
"importModels": "모델 가져오기",
|
||||
"availableModels": "사용 가능한 모델",
|
||||
"conversionNotSupported": "변환이 지원되지 않음",
|
||||
"noCustomLocationProvided": "사용자 지정 위치가 제공되지 않음",
|
||||
"onnxModels": "Onnx",
|
||||
"vaeRepoID": "VAE Repo ID",
|
||||
"modelExists": "모델 존재",
|
||||
"custom": "사용자 지정",
|
||||
"addModel": "모델 추가",
|
||||
"none": "없음",
|
||||
"modelConverted": "변환된 모델",
|
||||
"width": "너비",
|
||||
"weightedSum": "가중합",
|
||||
"inverseSigmoid": "Inverse Sigmoid",
|
||||
"invokeAIFolder": "Invoke AI 폴더",
|
||||
"syncModelsDesc": "모델이 백엔드와 동기화되지 않은 경우 이 옵션을 사용하여 새로 고침할 수 있습니다. 이는 일반적으로 응용 프로그램이 부팅된 후 수동으로 모델.yaml 파일을 업데이트하거나 InvokeAI root 폴더에 모델을 추가하는 경우에 유용합니다.",
|
||||
"convert": "변환",
|
||||
"vae": "VAE",
|
||||
"noModels": "모델을 찾을 수 없음",
|
||||
"statusConverting": "변환중",
|
||||
"sigmoid": "Sigmoid",
|
||||
"deleteModel": "모델 삭제",
|
||||
"modelLocation": "모델 위치",
|
||||
"merge": "병합",
|
||||
"v1": "v1",
|
||||
"description": "Description",
|
||||
"modelMergeInterpAddDifferenceHelp": "이 모드에서 모델 3은 먼저 모델 2에서 차감됩니다. 결과 버전은 위에 설정된 Alpha 비율로 모델 1과 혼합됩니다.",
|
||||
"customConfig": "사용자 지정 구성",
|
||||
"cannotUseSpaces": "공백을 사용할 수 없음",
|
||||
"formMessageDiffusersModelLocationDesc": "적어도 하나 이상 입력해 주세요.",
|
||||
"addDiffuserModel": "Diffusers 추가",
|
||||
"search": "검색",
|
||||
"predictionType": "예측 유형(안정 확산 2.x 모델 및 간혹 안정 확산 1.x 모델의 경우)",
|
||||
"widthValidationMsg": "모형의 기본 너비.",
|
||||
"selectAll": "모두 선택",
|
||||
"vaeLocation": "VAE 위치",
|
||||
"selectModel": "모델 선택",
|
||||
"modelAdded": "추가된 모델",
|
||||
"repo_id": "Repo ID",
|
||||
"modelSyncFailed": "모델 동기화 실패",
|
||||
"convertToDiffusersHelpText6": "이 모델을 변환하시겠습니까?",
|
||||
"config": "구성",
|
||||
"quickAdd": "빠른 추가",
|
||||
"selected": "선택된",
|
||||
"modelTwo": "모델 2",
|
||||
"simpleModelDesc": "로컬 Difffusers 모델, 로컬 체크포인트/안전 센서 모델 HuggingFace Repo ID 또는 체크포인트/Diffusers 모델 URL의 경로를 제공합니다.",
|
||||
"customSaveLocation": "사용자 정의 저장 위치",
|
||||
"advanced": "고급",
|
||||
"modelsFound": "발견된 모델",
|
||||
"load": "불러오기",
|
||||
"height": "높이",
|
||||
"modelDeleted": "삭제된 모델",
|
||||
"inpainting": "v1 Inpainting",
|
||||
"vaeLocationValidationMsg": "VAE가 있는 경로.",
|
||||
"convertToDiffusersHelpText2": "이 프로세스는 모델 관리자 항목을 동일한 모델의 Diffusers 버전으로 대체합니다.",
|
||||
"modelUpdateFailed": "모델 업데이트 실패",
|
||||
"modelUpdated": "업데이트된 모델",
|
||||
"noModelsFound": "모델을 찾을 수 없음",
|
||||
"useCustomConfig": "사용자 지정 구성 사용",
|
||||
"formMessageDiffusersVAELocationDesc": "제공되지 않은 경우 호출AIA 파일을 위의 모델 위치 내에서 VAE 파일을 찾습니다.",
|
||||
"formMessageDiffusersVAELocation": "VAE 위치",
|
||||
"checkpointModels": "Checkpoints",
|
||||
"modelOne": "모델 1",
|
||||
"settings": "설정",
|
||||
"heightValidationMsg": "모델의 기본 높이입니다.",
|
||||
"selectAndAdd": "아래 나열된 모델 선택 및 추가",
|
||||
"convertToDiffusersHelpText5": "디스크 공간이 충분한지 확인해 주세요. 모델은 일반적으로 2GB에서 7GB 사이로 다양합니다.",
|
||||
"deleteConfig": "구성 삭제",
|
||||
"deselectAll": "모두 선택 취소",
|
||||
"modelConversionFailed": "모델 변환 실패",
|
||||
"clearCheckpointFolder": "Checkpoint Folder 지우기",
|
||||
"modelEntryDeleted": "모델 항목 삭제",
|
||||
"deleteMsg1": "InvokeAI에서 이 모델을 삭제하시겠습니까?",
|
||||
"syncModels": "동기화 모델",
|
||||
"mergedModelSaveLocation": "위치 저장",
|
||||
"checkpointOrSafetensors": "$t(common.checkpoint) / $t(common.safetensors)",
|
||||
"modelType": "모델 유형",
|
||||
"nameValidationMsg": "모델 이름 입력",
|
||||
"cached": "cached",
|
||||
"modelsMerged": "병합된 모델",
|
||||
"formMessageDiffusersModelLocation": "Diffusers 모델 위치",
|
||||
"modelsMergeFailed": "모델 병합 실패",
|
||||
"convertingModelBegin": "모델 변환 중입니다. 잠시만 기다려 주십시오.",
|
||||
"v2_base": "v2 (512px)",
|
||||
"scanForModels": "모델 검색",
|
||||
"modelLocationValidationMsg": "Diffusers 모델이 저장된 로컬 폴더의 경로 제공",
|
||||
"name": "이름",
|
||||
"selectFolder": "폴더 선택",
|
||||
"updateModel": "모델 업데이트",
|
||||
"addNewModel": "새로운 모델 추가",
|
||||
"customConfigFileLocation": "사용자 지정 구성 파일 위치",
|
||||
"descriptionValidationMsg": "모델에 대한 description 추가",
|
||||
"safetensorModels": "SafeTensors",
|
||||
"convertToDiffusersHelpText1": "이 모델은 🧨 Diffusers 형식으로 변환됩니다.",
|
||||
"modelsSynced": "동기화된 모델",
|
||||
"vaePrecision": "VAE 정밀도",
|
||||
"invokeRoot": "InvokeAI 폴더",
|
||||
"checkpointFolder": "Checkpoint Folder",
|
||||
"mergedModelCustomSaveLocation": "사용자 지정 경로",
|
||||
"mergeModels": "모델 병합",
|
||||
"interpolationType": "Interpolation 타입",
|
||||
"modelMergeHeaderHelp2": "Diffusers만 병합이 가능합니다. 체크포인트 모델 병합을 원하신다면 먼저 Diffusers로 변환해주세요.",
|
||||
"convertToDiffusersSaveLocation": "위치 저장",
|
||||
"deleteMsg2": "모델이 InvokeAI root 폴더에 있으면 디스크에서 모델이 삭제됩니다. 사용자 지정 위치를 사용하는 경우 모델이 디스크에서 삭제되지 않습니다.",
|
||||
"oliveModels": "Olives",
|
||||
"repoIDValidationMsg": "모델의 온라인 저장소",
|
||||
"baseModel": "기본 모델",
|
||||
"scanAgain": "다시 검색",
|
||||
"pickModelType": "모델 유형 선택",
|
||||
"sameFolder": "같은 폴더",
|
||||
"addNew": "New 추가",
|
||||
"manual": "매뉴얼",
|
||||
"convertToDiffusersHelpText3": "디스크의 체크포인트 파일이 InvokeAI root 폴더에 있으면 삭제됩니다. 사용자 지정 위치에 있으면 삭제되지 않습니다.",
|
||||
"addCheckpointModel": "체크포인트 / 안전 센서 모델 추가",
|
||||
"configValidationMsg": "모델의 구성 파일에 대한 경로.",
|
||||
"modelManager": "모델 매니저",
|
||||
"variant": "Variant",
|
||||
"vaeRepoIDValidationMsg": "VAE의 온라인 저장소",
|
||||
"loraModels": "LoRAs",
|
||||
"modelDeleteFailed": "모델을 삭제하지 못했습니다",
|
||||
"convertToDiffusers": "Diffusers로 변환",
|
||||
"allModels": "모든 모델",
|
||||
"modelThree": "모델 3",
|
||||
"findModels": "모델 찾기",
|
||||
"notLoaded": "로드되지 않음",
|
||||
"alpha": "Alpha",
|
||||
"diffusersModels": "Diffusers",
|
||||
"modelMergeAlphaHelp": "Alpha는 모델의 혼합 강도를 제어합니다. Alpha 값이 낮을수록 두 번째 모델의 영향력이 줄어듭니다.",
|
||||
"addDifference": "Difference 추가",
|
||||
"noModelSelected": "선택한 모델 없음",
|
||||
"modelMergeHeaderHelp1": "최대 3개의 다른 모델을 병합하여 필요에 맞는 혼합물을 만들 수 있습니다.",
|
||||
"ignoreMismatch": "선택한 모델 간의 불일치 무시",
|
||||
"v2_768": "v2 (768px)",
|
||||
"convertToDiffusersHelpText4": "이것은 한 번의 과정일 뿐입니다. 컴퓨터 사양에 따라 30-60초 정도 소요될 수 있습니다.",
|
||||
"model": "모델",
|
||||
"addManually": "Manually 추가",
|
||||
"addSelected": "Selected 추가",
|
||||
"mergedModelName": "병합된 모델 이름",
|
||||
"delete": "삭제"
|
||||
},
|
||||
"controlnet": {
|
||||
"amult": "a_mult",
|
||||
"resize": "크기 조정",
|
||||
"showAdvanced": "고급 표시",
|
||||
"contentShuffleDescription": "이미지에서 content 섞기",
|
||||
"bgth": "bg_th",
|
||||
"addT2IAdapter": "$t(common.t2iAdapter) 추가",
|
||||
"pidi": "PIDI",
|
||||
"importImageFromCanvas": "캔버스에서 이미지 가져오기",
|
||||
"lineartDescription": "이미지->lineart 변환",
|
||||
"normalBae": "Normal BAE",
|
||||
"importMaskFromCanvas": "캔버스에서 Mask 가져오기",
|
||||
"hed": "HED",
|
||||
"contentShuffle": "Content Shuffle",
|
||||
"controlNetEnabledT2IDisabled": "$t(common.controlNet) 사용 가능, $t(common.t2iAdapter) 사용 불가능",
|
||||
"ipAdapterModel": "Adapter 모델",
|
||||
"resetControlImage": "Control Image 재설정",
|
||||
"beginEndStepPercent": "Begin / End Step Percentage",
|
||||
"mlsdDescription": "Minimalist Line Segment Detector",
|
||||
"duplicate": "복제",
|
||||
"balanced": "Balanced",
|
||||
"f": "F",
|
||||
"h": "H",
|
||||
"prompt": "프롬프트",
|
||||
"depthMidasDescription": "Midas를 사용하여 Depth map 생성하기",
|
||||
"openPoseDescription": "Openpose를 이용한 사람 포즈 추정",
|
||||
"control": "Control",
|
||||
"resizeMode": "크기 조정 모드",
|
||||
"t2iEnabledControlNetDisabled": "$t(common.t2iAdapter) 사용 가능,$t(common.controlNet) 사용 불가능",
|
||||
"coarse": "Coarse",
|
||||
"weight": "Weight",
|
||||
"selectModel": "모델 선택",
|
||||
"crop": "Crop",
|
||||
"depthMidas": "Depth (Midas)",
|
||||
"w": "W",
|
||||
"processor": "프로세서",
|
||||
"addControlNet": "$t(common.controlNet) 추가",
|
||||
"none": "해당없음",
|
||||
"incompatibleBaseModel": "호환되지 않는 기본 모델:",
|
||||
"enableControlnet": "사용 가능한 ControlNet",
|
||||
"detectResolution": "해상도 탐지",
|
||||
"controlNetT2IMutexDesc": "$t(common.controlNet)와 $t(common.t2iAdapter)는 현재 동시에 지원되지 않습니다.",
|
||||
"pidiDescription": "PIDI image 처리",
|
||||
"mediapipeFace": "Mediapipe Face",
|
||||
"mlsd": "M-LSD",
|
||||
"controlMode": "Control Mode",
|
||||
"fill": "채우기",
|
||||
"cannyDescription": "Canny 모서리 삭제",
|
||||
"addIPAdapter": "$t(common.ipAdapter) 추가",
|
||||
"lineart": "Lineart",
|
||||
"colorMapDescription": "이미지에서 color map을 생성합니다",
|
||||
"lineartAnimeDescription": "Anime-style lineart 처리",
|
||||
"minConfidence": "Min Confidence",
|
||||
"imageResolution": "이미지 해상도",
|
||||
"megaControl": "Mega Control",
|
||||
"depthZoe": "Depth (Zoe)",
|
||||
"colorMap": "색",
|
||||
"lowThreshold": "Low Threshold",
|
||||
"autoConfigure": "프로세서 자동 구성",
|
||||
"highThreshold": "High Threshold",
|
||||
"normalBaeDescription": "Normal BAE 처리",
|
||||
"noneDescription": "처리되지 않음",
|
||||
"saveControlImage": "Control Image 저장",
|
||||
"openPose": "Openpose",
|
||||
"toggleControlNet": "해당 ControlNet으로 전환",
|
||||
"delete": "삭제",
|
||||
"controlAdapter_other": "Control Adapter(s)",
|
||||
"safe": "Safe",
|
||||
"colorMapTileSize": "타일 크기",
|
||||
"lineartAnime": "Lineart Anime",
|
||||
"ipAdapterImageFallback": "IP Adapter Image가 선택되지 않음",
|
||||
"mediapipeFaceDescription": "Mediapipe를 사용하여 Face 탐지",
|
||||
"canny": "Canny",
|
||||
"depthZoeDescription": "Zoe를 사용하여 Depth map 생성하기",
|
||||
"hedDescription": "Holistically-Nested 모서리 탐지",
|
||||
"setControlImageDimensions": "Control Image Dimensions를 W/H로 설정",
|
||||
"scribble": "scribble",
|
||||
"resetIPAdapterImage": "IP Adapter Image 재설정",
|
||||
"handAndFace": "Hand and Face",
|
||||
"enableIPAdapter": "사용 가능한 IP Adapter",
|
||||
"maxFaces": "Max Faces"
|
||||
},
|
||||
"hotkeys": {
|
||||
"toggleGalleryPin": {
|
||||
"title": "Gallery Pin 전환",
|
||||
"desc": "갤러리를 UI에 고정했다가 풉니다"
|
||||
},
|
||||
"toggleSnap": {
|
||||
"desc": "Snap을 Grid로 전환",
|
||||
"title": "Snap 전환"
|
||||
},
|
||||
"setSeed": {
|
||||
"title": "시드 설정",
|
||||
"desc": "현재 이미지의 시드 사용"
|
||||
},
|
||||
"keyboardShortcuts": "키보드 바로 가기",
|
||||
"decreaseGalleryThumbSize": {
|
||||
"desc": "갤러리 미리 보기 크기 축소",
|
||||
"title": "갤러리 이미지 크기 축소"
|
||||
},
|
||||
"previousStagingImage": {
|
||||
"title": "이전 스테이징 이미지",
|
||||
"desc": "이전 스테이징 영역 이미지"
|
||||
},
|
||||
"decreaseBrushSize": {
|
||||
"title": "브러시 크기 줄이기",
|
||||
"desc": "캔버스 브러시/지우개 크기 감소"
|
||||
},
|
||||
"consoleToggle": {
|
||||
"desc": "콘솔 열고 닫기",
|
||||
"title": "콘솔 전환"
|
||||
},
|
||||
"selectBrush": {
|
||||
"desc": "캔버스 브러시를 선택",
|
||||
"title": "브러시 선택"
|
||||
},
|
||||
"upscale": {
|
||||
"desc": "현재 이미지를 업스케일",
|
||||
"title": "업스케일"
|
||||
},
|
||||
"previousImage": {
|
||||
"title": "이전 이미지",
|
||||
"desc": "갤러리에 이전 이미지 표시"
|
||||
},
|
||||
"unifiedCanvasHotkeys": "Unified Canvas Hotkeys",
|
||||
"toggleOptions": {
|
||||
"desc": "옵션 패널을 열고 닫기",
|
||||
"title": "옵션 전환"
|
||||
},
|
||||
"selectEraser": {
|
||||
"title": "지우개 선택",
|
||||
"desc": "캔버스 지우개를 선택"
|
||||
},
|
||||
"setPrompt": {
|
||||
"title": "프롬프트 설정",
|
||||
"desc": "현재 이미지의 프롬프트 사용"
|
||||
},
|
||||
"acceptStagingImage": {
|
||||
"desc": "현재 준비 영역 이미지 허용",
|
||||
"title": "준비 이미지 허용"
|
||||
},
|
||||
"resetView": {
|
||||
"desc": "Canvas View 초기화",
|
||||
"title": "View 초기화"
|
||||
},
|
||||
"hideMask": {
|
||||
"title": "Mask 숨김",
|
||||
"desc": "mask 숨김/숨김 해제"
|
||||
},
|
||||
"pinOptions": {
|
||||
"title": "옵션 고정",
|
||||
"desc": "옵션 패널을 고정"
|
||||
},
|
||||
"toggleGallery": {
|
||||
"desc": "gallery drawer 열기 및 닫기",
|
||||
"title": "Gallery 전환"
|
||||
},
|
||||
"quickToggleMove": {
|
||||
"title": "빠른 토글 이동",
|
||||
"desc": "일시적으로 이동 모드 전환"
|
||||
},
|
||||
"generalHotkeys": "General Hotkeys",
|
||||
"showHideBoundingBox": {
|
||||
"desc": "bounding box 표시 전환",
|
||||
"title": "Bounding box 표시/숨김"
|
||||
},
|
||||
"showInfo": {
|
||||
"desc": "현재 이미지의 metadata 정보 표시",
|
||||
"title": "정보 표시"
|
||||
},
|
||||
"copyToClipboard": {
|
||||
"title": "클립보드로 복사",
|
||||
"desc": "현재 캔버스를 클립보드로 복사"
|
||||
},
|
||||
"restoreFaces": {
|
||||
"title": "Faces 복원",
|
||||
"desc": "현재 이미지 복원"
|
||||
},
|
||||
"fillBoundingBox": {
|
||||
"title": "Bounding Box 채우기",
|
||||
"desc": "bounding box를 브러시 색으로 채웁니다"
|
||||
},
|
||||
"closePanels": {
|
||||
"desc": "열린 panels 닫기",
|
||||
"title": "panels 닫기"
|
||||
},
|
||||
"downloadImage": {
|
||||
"desc": "현재 캔버스 다운로드",
|
||||
"title": "이미지 다운로드"
|
||||
},
|
||||
"setParameters": {
|
||||
"title": "매개 변수 설정",
|
||||
"desc": "현재 이미지의 모든 매개 변수 사용"
|
||||
},
|
||||
"maximizeWorkSpace": {
|
||||
"desc": "패널을 닫고 작업 면적을 극대화",
|
||||
"title": "작업 공간 극대화"
|
||||
},
|
||||
"galleryHotkeys": "Gallery Hotkeys",
|
||||
"cancel": {
|
||||
"desc": "이미지 생성 취소",
|
||||
"title": "취소"
|
||||
},
|
||||
"saveToGallery": {
|
||||
"title": "갤러리에 저장",
|
||||
"desc": "현재 캔버스를 갤러리에 저장"
|
||||
},
|
||||
"eraseBoundingBox": {
|
||||
"desc": "bounding box 영역을 지웁니다",
|
||||
"title": "Bounding Box 지우기"
|
||||
},
|
||||
"nextImage": {
|
||||
"title": "다음 이미지",
|
||||
"desc": "갤러리에 다음 이미지 표시"
|
||||
},
|
||||
"colorPicker": {
|
||||
"desc": "canvas color picker 선택",
|
||||
"title": "Color Picker 선택"
|
||||
},
|
||||
"invoke": {
|
||||
"desc": "이미지 생성",
|
||||
"title": "불러오기"
|
||||
},
|
||||
"sendToImageToImage": {
|
||||
"desc": "현재 이미지를 이미지로 보내기"
|
||||
},
|
||||
"toggleLayer": {
|
||||
"desc": "mask/base layer 선택 전환",
|
||||
"title": "Layer 전환"
|
||||
},
|
||||
"increaseBrushSize": {
|
||||
"title": "브러시 크기 증가",
|
||||
"desc": "캔버스 브러시/지우개 크기 증가"
|
||||
},
|
||||
"appHotkeys": "App Hotkeys",
|
||||
"deleteImage": {
|
||||
"title": "이미지 삭제",
|
||||
"desc": "현재 이미지 삭제"
|
||||
},
|
||||
"moveTool": {
|
||||
"desc": "캔버스 탐색 허용",
|
||||
"title": "툴 옮기기"
|
||||
},
|
||||
"clearMask": {
|
||||
"desc": "전체 mask 제거",
|
||||
"title": "Mask 제거"
|
||||
},
|
||||
"increaseGalleryThumbSize": {
|
||||
"title": "갤러리 이미지 크기 증가",
|
||||
"desc": "갤러리 미리 보기 크기를 늘립니다"
|
||||
},
|
||||
"increaseBrushOpacity": {
|
||||
"desc": "캔버스 브러시의 불투명도를 높입니다",
|
||||
"title": "브러시 불투명도 증가"
|
||||
},
|
||||
"focusPrompt": {
|
||||
"desc": "프롬프트 입력 영역에 초점을 맞춥니다",
|
||||
"title": "프롬프트에 초점 맞추기"
|
||||
},
|
||||
"decreaseBrushOpacity": {
|
||||
"desc": "캔버스 브러시의 불투명도를 줄입니다",
|
||||
"title": "브러시 불투명도 감소"
|
||||
},
|
||||
"nextStagingImage": {
|
||||
"desc": "다음 스테이징 영역 이미지",
|
||||
"title": "다음 스테이징 이미지"
|
||||
},
|
||||
"redoStroke": {
|
||||
"title": "Stroke 다시 실행",
|
||||
"desc": "brush stroke 다시 실행"
|
||||
},
|
||||
"nodesHotkeys": "Nodes Hotkeys",
|
||||
"addNodes": {
|
||||
"desc": "노드 추가 메뉴 열기",
|
||||
"title": "노드 추가"
|
||||
},
|
||||
"toggleViewer": {
|
||||
"desc": "이미지 뷰어 열기 및 닫기",
|
||||
"title": "Viewer 전환"
|
||||
},
|
||||
"undoStroke": {
|
||||
"title": "Stroke 실행 취소",
|
||||
"desc": "brush stroke 실행 취소"
|
||||
},
|
||||
"changeTabs": {
|
||||
"desc": "다른 workspace으로 전환",
|
||||
"title": "탭 바꾸기"
|
||||
},
|
||||
"mergeVisible": {
|
||||
"desc": "캔버스의 보이는 모든 레이어 병합"
|
||||
}
|
||||
},
|
||||
"nodes": {
|
||||
"inputField": "입력 필드",
|
||||
"controlFieldDescription": "노드 간에 전달된 Control 정보입니다.",
|
||||
"latentsFieldDescription": "노드 사이에 Latents를 전달할 수 있습니다.",
|
||||
"denoiseMaskFieldDescription": "노드 간에 Denoise Mask가 전달될 수 있음",
|
||||
"floatCollectionDescription": "실수 컬렉션.",
|
||||
"missingTemplate": "잘못된 노드: {{type}} 유형의 {{node}} 템플릿 누락(설치되지 않으셨나요?)",
|
||||
"outputSchemaNotFound": "Output schema가 발견되지 않음",
|
||||
"ipAdapterPolymorphicDescription": "IP-Adapters 컬렉션.",
|
||||
"latentsPolymorphicDescription": "노드 사이에 Latents를 전달할 수 있습니다.",
|
||||
"colorFieldDescription": "RGBA 색.",
|
||||
"mainModelField": "모델",
|
||||
"ipAdapterCollection": "IP-Adapters 컬렉션",
|
||||
"conditioningCollection": "Conditioning 컬렉션",
|
||||
"maybeIncompatible": "설치된 것과 호환되지 않을 수 있음",
|
||||
"ipAdapterPolymorphic": "IP-Adapter 다형성",
|
||||
"noNodeSelected": "선택한 노드 없음",
|
||||
"addNode": "노드 추가",
|
||||
"hideGraphNodes": "그래프 오버레이 숨기기",
|
||||
"enum": "Enum",
|
||||
"loadWorkflow": "Workflow 불러오기",
|
||||
"integerPolymorphicDescription": "정수 컬렉션.",
|
||||
"noOutputRecorded": "기록된 출력 없음",
|
||||
"conditioningCollectionDescription": "노드 간에 Conditioning을 전달할 수 있습니다.",
|
||||
"colorPolymorphic": "색상 다형성",
|
||||
"colorCodeEdgesHelp": "연결된 필드에 따른 색상 코드 선",
|
||||
"collectionDescription": "해야 할 일",
|
||||
"hideLegendNodes": "필드 유형 범례 숨기기",
|
||||
"addLinearView": "Linear View에 추가",
|
||||
"float": "실수",
|
||||
"targetNodeFieldDoesNotExist": "잘못된 모서리: 대상/입력 필드 {{node}}. {{field}}이(가) 없습니다",
|
||||
"animatedEdges": "애니메이션 모서리",
|
||||
"conditioningPolymorphic": "Conditioning 다형성",
|
||||
"integer": "정수",
|
||||
"colorField": "색",
|
||||
"boardField": "Board",
|
||||
"nodeTemplate": "노드 템플릿",
|
||||
"latentsCollection": "Latents 컬렉션",
|
||||
"nodeOpacity": "노드 불투명도",
|
||||
"sourceNodeDoesNotExist": "잘못된 모서리: 소스/출력 노드 {{node}}이(가) 없습니다",
|
||||
"pickOne": "하나 고르기",
|
||||
"collectionItemDescription": "해야 할 일",
|
||||
"integerDescription": "정수는 소수점이 없는 숫자입니다.",
|
||||
"outputField": "출력 필드",
|
||||
"conditioningPolymorphicDescription": "노드 간에 Conditioning을 전달할 수 있습니다.",
|
||||
"noFieldsLinearview": "Linear View에 추가된 필드 없음",
|
||||
"imagePolymorphic": "이미지 다형성",
|
||||
"nodeSearch": "노드 검색",
|
||||
"imagePolymorphicDescription": "이미지 컬렉션.",
|
||||
"floatPolymorphic": "실수 다형성",
|
||||
"outputFieldInInput": "입력 중 출력필드",
|
||||
"doesNotExist": "존재하지 않음",
|
||||
"ipAdapterCollectionDescription": "IP-Adapters 컬렉션.",
|
||||
"controlCollection": "Control 컬렉션",
|
||||
"inputMayOnlyHaveOneConnection": "입력에 하나의 연결만 있을 수 있습니다",
|
||||
"notes": "메모",
|
||||
"nodeOutputs": "노드 결과물",
|
||||
"currentImageDescription": "Node Editor에 현재 이미지를 표시합니다",
|
||||
"downloadWorkflow": "Workflow JSON 다운로드",
|
||||
"ipAdapter": "IP-Adapter",
|
||||
"integerCollection": "정수 컬렉션",
|
||||
"collectionItem": "컬렉션 아이템",
|
||||
"noConnectionInProgress": "진행중인 연결이 없습니다",
|
||||
"controlCollectionDescription": "노드 간에 전달된 Control 정보입니다.",
|
||||
"noConnectionData": "연결 데이터 없음",
|
||||
"outputFields": "출력 필드",
|
||||
"fieldTypesMustMatch": "필드 유형은 일치해야 합니다",
|
||||
"edge": "Edge",
|
||||
"inputNode": "입력 노드",
|
||||
"enumDescription": "Enums은 여러 옵션 중 하나일 수 있는 값입니다.",
|
||||
"sourceNodeFieldDoesNotExist": "잘못된 모서리: 소스/출력 필드 {{node}}. {{field}}이(가) 없습니다",
|
||||
"loRAModelFieldDescription": "해야 할 일",
|
||||
"imageField": "이미지",
|
||||
"animatedEdgesHelp": "선택한 노드에 연결된 선택한 가장자리 및 가장자리를 애니메이션화합니다",
|
||||
"cannotDuplicateConnection": "중복 연결을 만들 수 없습니다",
|
||||
"booleanPolymorphic": "Boolean 다형성",
|
||||
"noWorkflow": "Workflow 없음",
|
||||
"colorCollectionDescription": "해야 할 일",
|
||||
"integerCollectionDescription": "정수 컬렉션.",
|
||||
"colorPolymorphicDescription": "색의 컬렉션.",
|
||||
"denoiseMaskField": "Denoise Mask",
|
||||
"missingCanvaInitImage": "캔버스 init 이미지 누락",
|
||||
"conditioningFieldDescription": "노드 간에 Conditioning을 전달할 수 있습니다.",
|
||||
"clipFieldDescription": "Tokenizer 및 text_encoder 서브모델.",
|
||||
"fullyContainNodesHelp": "선택하려면 노드가 선택 상자 안에 완전히 있어야 합니다",
|
||||
"noImageFoundState": "상태에서 초기 이미지를 찾을 수 없습니다",
|
||||
"clipField": "Clip",
|
||||
"nodePack": "Node pack",
|
||||
"nodeType": "노드 유형",
|
||||
"noMatchingNodes": "일치하는 노드 없음",
|
||||
"fullyContainNodes": "선택할 노드 전체 포함",
|
||||
"integerPolymorphic": "정수 다형성",
|
||||
"executionStateInProgress": "진행중",
|
||||
"noFieldType": "필드 유형 없음",
|
||||
"colorCollection": "색의 컬렉션.",
|
||||
"executionStateError": "에러",
|
||||
"noOutputSchemaName": "ref 개체에 output schema 이름이 없습니다",
|
||||
"ipAdapterModel": "IP-Adapter 모델",
|
||||
"latentsPolymorphic": "Latents 다형성",
|
||||
"ipAdapterDescription": "이미지 프롬프트 어댑터(IP-Adapter).",
|
||||
"boolean": "Booleans",
|
||||
"missingCanvaInitMaskImages": "캔버스 init 및 mask 이미지 누락",
|
||||
"problemReadingMetadata": "이미지에서 metadata를 읽는 중 문제가 발생했습니다",
|
||||
"hideMinimapnodes": "미니맵 숨기기",
|
||||
"oNNXModelField": "ONNX 모델",
|
||||
"executionStateCompleted": "완료된",
|
||||
"node": "노드",
|
||||
"currentImage": "현재 이미지",
|
||||
"controlField": "Control",
|
||||
"booleanDescription": "Booleans은 참 또는 거짓입니다.",
|
||||
"collection": "컬렉션",
|
||||
"ipAdapterModelDescription": "IP-Adapter 모델 필드",
|
||||
"cannotConnectInputToInput": "입력을 입력에 연결할 수 없습니다",
|
||||
"invalidOutputSchema": "잘못된 output schema",
|
||||
"boardFieldDescription": "A gallery board",
|
||||
"floatDescription": "실수는 소수점이 있는 숫자입니다.",
|
||||
"floatPolymorphicDescription": "실수 컬렉션.",
|
||||
"conditioningField": "Conditioning",
|
||||
"collectionFieldType": "{{name}} 컬렉션",
|
||||
"floatCollection": "실수 컬렉션",
|
||||
"latentsField": "Latents",
|
||||
"cannotConnectOutputToOutput": "출력을 출력에 연결할 수 없습니다",
|
||||
"booleanCollection": "Boolean 컬렉션",
|
||||
"connectionWouldCreateCycle": "연결하면 주기가 생성됩니다",
|
||||
"cannotConnectToSelf": "자체에 연결할 수 없습니다",
|
||||
"notesDescription": "Workflow에 대한 메모 추가",
|
||||
"inputFields": "입력 필드",
|
||||
"colorCodeEdges": "색상-코드 선",
|
||||
"targetNodeDoesNotExist": "잘못된 모서리: 대상/입력 노드 {{node}}이(가) 없습니다",
|
||||
"imageCollectionDescription": "이미지 컬렉션.",
|
||||
"mismatchedVersion": "잘못된 노드: {{type}} 유형의 {{node}} 노드에 일치하지 않는 버전이 있습니다(업데이트 해보시겠습니까?)",
|
||||
"imageFieldDescription": "노드 간에 이미지를 전달할 수 있습니다.",
|
||||
"outputNode": "출력노드",
|
||||
"addNodeToolTip": "노드 추가(Shift+A, Space)",
|
||||
"collectionOrScalarFieldType": "{{name}} 컬렉션|Scalar",
|
||||
"nodeVersion": "노드 버전",
|
||||
"loadingNodes": "노드 로딩중...",
|
||||
"mainModelFieldDescription": "해야 할 일",
|
||||
"loRAModelField": "LoRA",
|
||||
"deletedInvalidEdge": "잘못된 모서리 {{source}} -> {{target}} 삭제",
|
||||
"latentsCollectionDescription": "노드 사이에 Latents를 전달할 수 있습니다.",
|
||||
"oNNXModelFieldDescription": "ONNX 모델 필드.",
|
||||
"imageCollection": "이미지 컬렉션"
|
||||
},
|
||||
"queue": {
|
||||
"status": "상태",
|
||||
"pruneSucceeded": "Queue로부터 {{item_count}} 완성된 항목 잘라내기",
|
||||
"cancelTooltip": "현재 항목 취소",
|
||||
"queueEmpty": "비어있는 Queue",
|
||||
"pauseSucceeded": "중지된 프로세서",
|
||||
"in_progress": "진행 중",
|
||||
"queueFront": "Front of Queue에 추가",
|
||||
"notReady": "Queue를 생성할 수 없음",
|
||||
"batchFailedToQueue": "Queue Batch에 실패",
|
||||
"completed": "완성된",
|
||||
"queueBack": "Queue에 추가",
|
||||
"batchValues": "Batch 값들",
|
||||
"cancelFailed": "항목 취소 중 발생한 문제",
|
||||
"queueCountPrediction": "Queue에 {{predicted}} 추가",
|
||||
"batchQueued": "Batch Queued",
|
||||
"pauseFailed": "프로세서 중지 중 발생한 문제",
|
||||
"clearFailed": "Queue 제거 중 발생한 문제",
|
||||
"queuedCount": "{{pending}} Pending",
|
||||
"front": "front",
|
||||
"clearSucceeded": "제거된 Queue",
|
||||
"pause": "중지",
|
||||
"pruneTooltip": "{{item_count}} 완성된 항목 잘라내기",
|
||||
"cancelSucceeded": "취소된 항목",
|
||||
"batchQueuedDesc_other": "queue의 {{direction}}에 추가된 {{count}}세션",
|
||||
"queue": "Queue",
|
||||
"batch": "Batch",
|
||||
"clearQueueAlertDialog": "Queue를 지우면 처리 항목이 즉시 취소되고 Queue가 완전히 지워집니다.",
|
||||
"resumeFailed": "프로세서 재개 중 발생한 문제",
|
||||
"clear": "제거하다",
|
||||
"prune": "잘라내다",
|
||||
"total": "총 개수",
|
||||
"canceled": "취소된",
|
||||
"pruneFailed": "Queue 잘라내는 중 발생한 문제",
|
||||
"cancelBatchSucceeded": "취소된 Batch",
|
||||
"clearTooltip": "모든 항목을 취소하고 제거",
|
||||
"current": "최근",
|
||||
"pauseTooltip": "프로세서 중지",
|
||||
"failed": "실패한",
|
||||
"cancelItem": "항목 취소",
|
||||
"next": "다음",
|
||||
"cancelBatch": "Batch 취소",
|
||||
"back": "back",
|
||||
"batchFieldValues": "Batch 필드 값들",
|
||||
"cancel": "취소",
|
||||
"session": "세션",
|
||||
"time": "시간",
|
||||
"queueTotal": "{{total}} Total",
|
||||
"resumeSucceeded": "재개된 프로세서",
|
||||
"enqueueing": "Queueing Batch",
|
||||
"resumeTooltip": "프로세서 재개",
|
||||
"resume": "재개",
|
||||
"cancelBatchFailed": "Batch 취소 중 발생한 문제",
|
||||
"clearQueueAlertDialog2": "Queue를 지우시겠습니까?",
|
||||
"item": "항목",
|
||||
"graphFailedToQueue": "queue graph에 실패"
|
||||
},
|
||||
"metadata": {
|
||||
"positivePrompt": "긍정적 프롬프트",
|
||||
"negativePrompt": "부정적인 프롬프트",
|
||||
"generationMode": "Generation Mode",
|
||||
"Threshold": "Noise Threshold",
|
||||
"metadata": "Metadata",
|
||||
"seed": "시드",
|
||||
"imageDetails": "이미지 세부 정보",
|
||||
"perlin": "Perlin Noise",
|
||||
"model": "모델",
|
||||
"noImageDetails": "이미지 세부 정보를 찾을 수 없습니다",
|
||||
"hiresFix": "고해상도 최적화",
|
||||
"cfgScale": "CFG scale",
|
||||
"initImage": "초기이미지",
|
||||
"recallParameters": "매개변수 호출",
|
||||
"height": "Height",
|
||||
"variations": "Seed-weight 쌍",
|
||||
"noMetaData": "metadata를 찾을 수 없습니다",
|
||||
"cfgRescaleMultiplier": "$t(parameters.cfgRescaleMultiplier)",
|
||||
"width": "너비",
|
||||
"vae": "VAE",
|
||||
"createdBy": "~에 의해 생성된",
|
||||
"workflow": "작업의 흐름",
|
||||
"steps": "단계",
|
||||
"scheduler": "스케줄러",
|
||||
"noRecallParameters": "호출할 매개 변수가 없습니다"
|
||||
},
|
||||
"invocationCache": {
|
||||
"useCache": "캐시 사용",
|
||||
"disable": "이용 불가능한",
|
||||
"misses": "캐시 미스",
|
||||
"enableFailed": "Invocation 캐시를 사용하도록 설정하는 중 발생한 문제",
|
||||
"invocationCache": "Invocation 캐시",
|
||||
"clearSucceeded": "제거된 Invocation 캐시",
|
||||
"enableSucceeded": "이용 가능한 Invocation 캐시",
|
||||
"clearFailed": "Invocation 캐시 제거 중 발생한 문제",
|
||||
"hits": "캐시 적중",
|
||||
"disableSucceeded": "이용 불가능한 Invocation 캐시",
|
||||
"disableFailed": "Invocation 캐시를 이용하지 못하게 설정 중 발생한 문제",
|
||||
"enable": "이용 가능한",
|
||||
"clear": "제거",
|
||||
"maxCacheSize": "최대 캐시 크기",
|
||||
"cacheSize": "캐시 크기"
|
||||
},
|
||||
"embedding": {
|
||||
"noEmbeddingsLoaded": "불러온 Embeddings이 없음",
|
||||
"noMatchingEmbedding": "일치하는 Embeddings이 없음",
|
||||
"addEmbedding": "Embedding 추가",
|
||||
"incompatibleModel": "호환되지 않는 기본 모델:"
|
||||
},
|
||||
"hrf": {
|
||||
"enableHrf": "이용 가능한 고해상도 고정",
|
||||
"upscaleMethod": "업스케일 방법",
|
||||
"enableHrfTooltip": "낮은 초기 해상도로 생성하고 기본 해상도로 업스케일한 다음 Image-to-Image를 실행합니다.",
|
||||
"metadata": {
|
||||
"strength": "고해상도 고정 강도",
|
||||
"enabled": "고해상도 고정 사용",
|
||||
"method": "고해상도 고정 방법"
|
||||
},
|
||||
"hrf": "고해상도 고정",
|
||||
"hrfStrength": "고해상도 고정 강도"
|
||||
},
|
||||
"models": {
|
||||
"noLoRAsLoaded": "로드된 LoRA 없음",
|
||||
"noMatchingModels": "일치하는 모델 없음",
|
||||
"esrganModel": "ESRGAN 모델",
|
||||
"loading": "로딩중",
|
||||
"noMatchingLoRAs": "일치하는 LoRA 없음",
|
||||
"noLoRAsAvailable": "사용 가능한 LoRA 없음",
|
||||
"noModelsAvailable": "사용 가능한 모델이 없음",
|
||||
"addLora": "LoRA 추가",
|
||||
"selectModel": "모델 선택",
|
||||
"noRefinerModelsInstalled": "SDXL Refiner 모델이 설치되지 않음",
|
||||
"noLoRAsInstalled": "설치된 LoRA 없음",
|
||||
"selectLoRA": "LoRA 선택"
|
||||
},
|
||||
"boards": {
|
||||
"autoAddBoard": "자동 추가 Board",
|
||||
"topMessage": "이 보드에는 다음 기능에 사용되는 이미지가 포함되어 있습니다:",
|
||||
"move": "이동",
|
||||
"menuItemAutoAdd": "해당 Board에 자동 추가",
|
||||
"myBoard": "나의 Board",
|
||||
"searchBoard": "Board 찾는 중...",
|
||||
"deleteBoardOnly": "Board만 삭제",
|
||||
"noMatching": "일치하는 Board들이 없음",
|
||||
"movingImagesToBoard_other": "{{count}}이미지를 Board로 이동시키기",
|
||||
"selectBoard": "Board 선택",
|
||||
"cancel": "취소",
|
||||
"addBoard": "Board 추가",
|
||||
"bottomMessage": "이 보드와 이미지를 삭제하면 현재 사용 중인 모든 기능이 재설정됩니다.",
|
||||
"uncategorized": "미분류",
|
||||
"downloadBoard": "Board 다운로드",
|
||||
"changeBoard": "Board 바꾸기",
|
||||
"loading": "불러오는 중...",
|
||||
"clearSearch": "검색 지우기",
|
||||
"deleteBoard": "Board 삭제",
|
||||
"deleteBoardAndImages": "Board와 이미지 삭제",
|
||||
"deletedBoardsCannotbeRestored": "삭제된 Board는 복원할 수 없습니다"
|
||||
}
|
||||
}
|
||||
|
@ -120,7 +120,8 @@
|
||||
"orderBy": "排序方式:",
|
||||
"nextPage": "下一页",
|
||||
"saveAs": "保存为",
|
||||
"unsaved": "未保存"
|
||||
"unsaved": "未保存",
|
||||
"ai": "ai"
|
||||
},
|
||||
"gallery": {
|
||||
"generations": "生成的图像",
|
||||
@ -1122,7 +1123,8 @@
|
||||
"betaDesc": "此调用尚处于测试阶段。在稳定之前,它可能会在项目更新期间发生破坏性更改。本项目计划长期支持这种调用。",
|
||||
"newWorkflow": "新建工作流",
|
||||
"newWorkflowDesc": "是否创建一个新的工作流?",
|
||||
"newWorkflowDesc2": "当前工作流有未保存的更改。"
|
||||
"newWorkflowDesc2": "当前工作流有未保存的更改。",
|
||||
"unsupportedAnyOfLength": "联合(union)数据类型数目过多 ({{count}})"
|
||||
},
|
||||
"controlnet": {
|
||||
"resize": "直接缩放",
|
||||
|
@ -172,6 +172,8 @@ nav:
|
||||
- Adding Tests: 'contributing/TESTS.md'
|
||||
- Documentation: 'contributing/contribution_guides/documentation.md'
|
||||
- Nodes: 'contributing/INVOCATIONS.md'
|
||||
- Model Manager: 'contributing/MODEL_MANAGER.md'
|
||||
- Download Queue: 'contributing/DOWNLOAD_QUEUE.md'
|
||||
- Translation: 'contributing/contribution_guides/translation.md'
|
||||
- Tutorials: 'contributing/contribution_guides/tutorials.md'
|
||||
- Changelog: 'CHANGELOG.md'
|
||||
|
@ -105,6 +105,7 @@ dependencies = [
|
||||
"pytest>6.0.0",
|
||||
"pytest-cov",
|
||||
"pytest-datadir",
|
||||
"requests_testadapter",
|
||||
]
|
||||
"xformers" = [
|
||||
"xformers==0.0.23; sys_platform!='darwin'",
|
||||
|
@ -26,7 +26,6 @@ from invokeai.app.services.shared.graph import (
|
||||
Graph,
|
||||
GraphExecutionState,
|
||||
IterateInvocation,
|
||||
LibraryGraph,
|
||||
)
|
||||
from invokeai.backend.util.logging import InvokeAILogger
|
||||
from tests.fixtures.sqlite_database import create_mock_sqlite_database
|
||||
@ -61,7 +60,6 @@ def mock_services() -> InvocationServices:
|
||||
configuration=configuration,
|
||||
events=TestEventService(),
|
||||
graph_execution_manager=graph_execution_manager,
|
||||
graph_library=SqliteItemStorage[LibraryGraph](db=db, table_name="graphs"),
|
||||
image_files=None, # type: ignore
|
||||
image_records=None, # type: ignore
|
||||
images=None, # type: ignore
|
||||
@ -70,6 +68,7 @@ def mock_services() -> InvocationServices:
|
||||
logger=logging, # type: ignore
|
||||
model_manager=None, # type: ignore
|
||||
model_records=None, # type: ignore
|
||||
download_queue=None, # type: ignore
|
||||
model_install=None, # type: ignore
|
||||
names=None, # type: ignore
|
||||
performance_statistics=InvocationStatsService(),
|
||||
|
@ -24,7 +24,7 @@ from invokeai.app.services.invocation_stats.invocation_stats_default import Invo
|
||||
from invokeai.app.services.invoker import Invoker
|
||||
from invokeai.app.services.item_storage.item_storage_sqlite import SqliteItemStorage
|
||||
from invokeai.app.services.session_queue.session_queue_common import DEFAULT_QUEUE_ID
|
||||
from invokeai.app.services.shared.graph import Graph, GraphExecutionState, GraphInvocation, LibraryGraph
|
||||
from invokeai.app.services.shared.graph import Graph, GraphExecutionState, GraphInvocation
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -66,7 +66,6 @@ def mock_services() -> InvocationServices:
|
||||
configuration=configuration,
|
||||
events=TestEventService(),
|
||||
graph_execution_manager=graph_execution_manager,
|
||||
graph_library=SqliteItemStorage[LibraryGraph](db=db, table_name="graphs"),
|
||||
image_files=None, # type: ignore
|
||||
image_records=None, # type: ignore
|
||||
images=None, # type: ignore
|
||||
@ -75,6 +74,7 @@ def mock_services() -> InvocationServices:
|
||||
logger=logging, # type: ignore
|
||||
model_manager=None, # type: ignore
|
||||
model_records=None, # type: ignore
|
||||
download_queue=None, # type: ignore
|
||||
model_install=None, # type: ignore
|
||||
names=None, # type: ignore
|
||||
performance_statistics=InvocationStatsService(),
|
||||
|
223
tests/app/services/download/test_download_queue.py
Normal file
223
tests/app/services/download/test_download_queue.py
Normal file
@ -0,0 +1,223 @@
|
||||
"""Test the queued download facility"""
|
||||
import re
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
from pydantic import BaseModel
|
||||
from pydantic.networks import AnyHttpUrl
|
||||
from requests.sessions import Session
|
||||
from requests_testadapter import TestAdapter
|
||||
|
||||
from invokeai.app.services.download import DownloadJob, DownloadJobStatus, DownloadQueueService
|
||||
from invokeai.app.services.events.events_base import EventServiceBase
|
||||
|
||||
# Prevent pytest deprecation warnings
|
||||
TestAdapter.__test__ = False
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def session() -> requests.sessions.Session:
|
||||
sess = requests.Session()
|
||||
for i in ["12345", "9999", "54321"]:
|
||||
content = (
|
||||
b"I am a safetensors file " + bytearray(i, "utf-8") + bytearray(32_000)
|
||||
) # for pause tests, must make content large
|
||||
sess.mount(
|
||||
f"http://www.civitai.com/models/{i}",
|
||||
TestAdapter(
|
||||
content,
|
||||
headers={
|
||||
"Content-Length": len(content),
|
||||
"Content-Disposition": f'filename="mock{i}.safetensors"',
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
# here are some malformed URLs to test
|
||||
# missing the content length
|
||||
sess.mount(
|
||||
"http://www.civitai.com/models/missing",
|
||||
TestAdapter(
|
||||
b"Missing content length",
|
||||
headers={
|
||||
"Content-Disposition": 'filename="missing.txt"',
|
||||
},
|
||||
),
|
||||
)
|
||||
# not found test
|
||||
sess.mount("http://www.civitai.com/models/broken", TestAdapter(b"Not found", status=404))
|
||||
|
||||
return sess
|
||||
|
||||
|
||||
class DummyEvent(BaseModel):
|
||||
"""Dummy Event to use with Dummy Event service."""
|
||||
|
||||
event_name: str
|
||||
payload: Dict[str, Any]
|
||||
|
||||
|
||||
# A dummy event service for testing event issuing
|
||||
class DummyEventService(EventServiceBase):
|
||||
"""Dummy event service for testing."""
|
||||
|
||||
events: List[DummyEvent]
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.events = []
|
||||
|
||||
def dispatch(self, event_name: str, payload: Any) -> None:
|
||||
"""Dispatch an event by appending it to self.events."""
|
||||
self.events.append(DummyEvent(event_name=payload["event"], payload=payload["data"]))
|
||||
|
||||
|
||||
def test_basic_queue_download(tmp_path: Path, session: Session) -> None:
|
||||
events = set()
|
||||
|
||||
def event_handler(job: DownloadJob) -> None:
|
||||
events.add(job.status)
|
||||
|
||||
queue = DownloadQueueService(
|
||||
requests_session=session,
|
||||
)
|
||||
queue.start()
|
||||
job = queue.download(
|
||||
source=AnyHttpUrl("http://www.civitai.com/models/12345"),
|
||||
dest=tmp_path,
|
||||
on_start=event_handler,
|
||||
on_progress=event_handler,
|
||||
on_complete=event_handler,
|
||||
on_error=event_handler,
|
||||
)
|
||||
assert isinstance(job, DownloadJob), "expected the job to be of type DownloadJobBase"
|
||||
assert isinstance(job.id, int), "expected the job id to be numeric"
|
||||
queue.join()
|
||||
|
||||
assert job.status == DownloadJobStatus("completed"), "expected job status to be completed"
|
||||
assert Path(tmp_path, "mock12345.safetensors").exists(), f"expected {tmp_path}/mock12345.safetensors to exist"
|
||||
|
||||
assert events == {DownloadJobStatus.RUNNING, DownloadJobStatus.COMPLETED}
|
||||
queue.stop()
|
||||
|
||||
|
||||
def test_errors(tmp_path: Path, session: Session) -> None:
|
||||
queue = DownloadQueueService(
|
||||
requests_session=session,
|
||||
)
|
||||
queue.start()
|
||||
|
||||
for bad_url in ["http://www.civitai.com/models/broken", "http://www.civitai.com/models/missing"]:
|
||||
queue.download(AnyHttpUrl(bad_url), dest=tmp_path)
|
||||
|
||||
queue.join()
|
||||
jobs = queue.list_jobs()
|
||||
print(jobs)
|
||||
assert len(jobs) == 2
|
||||
jobs_dict = {str(x.source): x for x in jobs}
|
||||
assert jobs_dict["http://www.civitai.com/models/broken"].status == DownloadJobStatus.ERROR
|
||||
assert jobs_dict["http://www.civitai.com/models/broken"].error_type == "HTTPError(NOT FOUND)"
|
||||
assert jobs_dict["http://www.civitai.com/models/missing"].status == DownloadJobStatus.COMPLETED
|
||||
assert jobs_dict["http://www.civitai.com/models/missing"].total_bytes == 0
|
||||
queue.stop()
|
||||
|
||||
|
||||
def test_event_bus(tmp_path: Path, session: Session) -> None:
|
||||
event_bus = DummyEventService()
|
||||
|
||||
queue = DownloadQueueService(requests_session=session, event_bus=event_bus)
|
||||
queue.start()
|
||||
queue.download(
|
||||
source=AnyHttpUrl("http://www.civitai.com/models/12345"),
|
||||
dest=tmp_path,
|
||||
)
|
||||
queue.join()
|
||||
events = event_bus.events
|
||||
assert len(events) == 3
|
||||
assert events[0].payload["timestamp"] <= events[1].payload["timestamp"]
|
||||
assert events[1].payload["timestamp"] <= events[2].payload["timestamp"]
|
||||
assert events[0].event_name == "download_started"
|
||||
assert events[1].event_name == "download_progress"
|
||||
assert events[1].payload["total_bytes"] > 0
|
||||
assert events[1].payload["current_bytes"] <= events[1].payload["total_bytes"]
|
||||
assert events[2].event_name == "download_complete"
|
||||
assert events[2].payload["total_bytes"] == 32029
|
||||
|
||||
# test a failure
|
||||
event_bus.events = [] # reset our accumulator
|
||||
queue.download(source=AnyHttpUrl("http://www.civitai.com/models/broken"), dest=tmp_path)
|
||||
queue.join()
|
||||
events = event_bus.events
|
||||
print("\n".join([x.model_dump_json() for x in events]))
|
||||
assert len(events) == 1
|
||||
assert events[0].event_name == "download_error"
|
||||
assert events[0].payload["error_type"] == "HTTPError(NOT FOUND)"
|
||||
assert events[0].payload["error"] is not None
|
||||
assert re.search(r"requests.exceptions.HTTPError: NOT FOUND", events[0].payload["error"])
|
||||
queue.stop()
|
||||
|
||||
|
||||
def test_broken_callbacks(tmp_path: Path, session: requests.sessions.Session, capsys) -> None:
|
||||
queue = DownloadQueueService(
|
||||
requests_session=session,
|
||||
)
|
||||
queue.start()
|
||||
|
||||
callback_ran = False
|
||||
|
||||
def broken_callback(job: DownloadJob) -> None:
|
||||
nonlocal callback_ran
|
||||
callback_ran = True
|
||||
print(1 / 0) # deliberate error here
|
||||
|
||||
job = queue.download(
|
||||
source=AnyHttpUrl("http://www.civitai.com/models/12345"),
|
||||
dest=tmp_path,
|
||||
on_progress=broken_callback,
|
||||
)
|
||||
|
||||
queue.join()
|
||||
assert job.status == DownloadJobStatus.COMPLETED # should complete even though the callback is borked
|
||||
assert Path(tmp_path, "mock12345.safetensors").exists()
|
||||
assert callback_ran
|
||||
# LS: The pytest capsys fixture does not seem to be working. I can see the
|
||||
# correct stderr message in the pytest log, but it is not appearing in
|
||||
# capsys.readouterr().
|
||||
# captured = capsys.readouterr()
|
||||
# assert re.search("division by zero", captured.err)
|
||||
queue.stop()
|
||||
|
||||
|
||||
def test_cancel(tmp_path: Path, session: requests.sessions.Session) -> None:
|
||||
event_bus = DummyEventService()
|
||||
|
||||
queue = DownloadQueueService(requests_session=session, event_bus=event_bus)
|
||||
queue.start()
|
||||
|
||||
cancelled = False
|
||||
|
||||
def slow_callback(job: DownloadJob) -> None:
|
||||
time.sleep(2)
|
||||
|
||||
def cancelled_callback(job: DownloadJob) -> None:
|
||||
nonlocal cancelled
|
||||
cancelled = True
|
||||
|
||||
job = queue.download(
|
||||
source=AnyHttpUrl("http://www.civitai.com/models/12345"),
|
||||
dest=tmp_path,
|
||||
on_start=slow_callback,
|
||||
on_cancelled=cancelled_callback,
|
||||
)
|
||||
queue.cancel_job(job)
|
||||
queue.join()
|
||||
|
||||
assert job.status == DownloadJobStatus.CANCELLED
|
||||
assert cancelled
|
||||
events = event_bus.events
|
||||
assert events[-1].event_name == "download_cancelled"
|
||||
assert events[-1].payload["source"] == "http://www.civitai.com/models/12345"
|
||||
queue.stop()
|
@ -48,11 +48,13 @@ def store(
|
||||
|
||||
@pytest.fixture
|
||||
def installer(app_config: InvokeAIAppConfig, store: ModelRecordServiceBase) -> ModelInstallServiceBase:
|
||||
return ModelInstallService(
|
||||
installer = ModelInstallService(
|
||||
app_config=app_config,
|
||||
record_store=store,
|
||||
event_bus=DummyEventService(),
|
||||
)
|
||||
installer.start()
|
||||
return installer
|
||||
|
||||
|
||||
class DummyEvent(BaseModel):
|
||||
|
Loading…
Reference in New Issue
Block a user