mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
Debloat-docker (#1612)
* debloat Dockerfile - less options more but more userfriendly - better Entrypoint to simulate CLI usage - without command the container still starts the web-host * debloat build.sh * better syntax in run.sh * update Docker docs - fix description of VOLUMENAME - update run script example to reflect new entrypoint
This commit is contained in:
parent
f43b767b87
commit
5897e511f1
@ -18,24 +18,17 @@ RUN apt-get update \
|
|||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
ARG PIP_REQUIREMENTS=requirements-lin-cuda.txt
|
|
||||||
ARG PROJECT_NAME=invokeai
|
|
||||||
ARG INVOKEAI_ROOT=/data
|
|
||||||
ENV INVOKEAI_ROOT=${INVOKEAI_ROOT}
|
|
||||||
|
|
||||||
# set workdir and copy sources
|
# set workdir and copy sources
|
||||||
WORKDIR /${PROJECT_NAME}
|
WORKDIR /invokeai
|
||||||
COPY . .
|
ARG PIP_REQUIREMENTS=requirements-lin-cuda.txt
|
||||||
|
COPY . ./environments-and-requirements/${PIP_REQUIREMENTS} ./
|
||||||
|
|
||||||
# install requirements and link outputs folder
|
# install requirements and link outputs folder
|
||||||
RUN cp \
|
RUN pip install \
|
||||||
./environments-and-requirements/${PIP_REQUIREMENTS} \
|
|
||||||
${PIP_REQUIREMENTS} \
|
|
||||||
&& pip install \
|
|
||||||
--no-cache-dir \
|
--no-cache-dir \
|
||||||
-r ${PIP_REQUIREMENTS} \
|
-r ${PIP_REQUIREMENTS}
|
||||||
&& ln -sf /data/outputs /${PROJECT_NAME}/outputs
|
|
||||||
|
|
||||||
# set Entrypoint and default CMD
|
# set Environment, Entrypoint and default CMD
|
||||||
ENTRYPOINT [ "python3" ]
|
ENV INVOKEAI_ROOT /data
|
||||||
CMD [ "scripts/invoke.py", "--web", "--host", "0.0.0.0" ]
|
ENTRYPOINT [ "python3", "scripts/invoke.py", "--outdir=/data/outputs" ]
|
||||||
|
CMD [ "--web", "--host=0.0.0.0" ]
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
set -e
|
set -e
|
||||||
# IMPORTANT: You need to have a token on huggingface.co to be able to download the checkpoint!!!
|
|
||||||
# configure values by using env when executing build.sh
|
|
||||||
# f.e. env ARCH=aarch64 GITHUB_INVOKE_AI=https://github.com/yourname/yourfork.git ./build.sh
|
|
||||||
|
|
||||||
source ./docker-build/env.sh || echo "please run from repository root" || exit 1
|
# IMPORTANT: You need to have a token on huggingface.co to be able to download the checkpoints!!!
|
||||||
|
# configure values by using env when executing build.sh f.e. `env ARCH=aarch64 ./build.sh`
|
||||||
|
|
||||||
|
source ./docker-build/env.sh \
|
||||||
|
|| echo "please execute docker-build/build.sh from repository root" \
|
||||||
|
|| exit 1
|
||||||
|
|
||||||
pip_requirements=${PIP_REQUIREMENTS:-requirements-lin-cuda.txt}
|
pip_requirements=${PIP_REQUIREMENTS:-requirements-lin-cuda.txt}
|
||||||
dockerfile=${INVOKE_DOCKERFILE:-docker-build/Dockerfile}
|
dockerfile=${INVOKE_DOCKERFILE:-docker-build/Dockerfile}
|
||||||
@ -13,28 +15,14 @@ dockerfile=${INVOKE_DOCKERFILE:-docker-build/Dockerfile}
|
|||||||
echo "You are using these values:"
|
echo "You are using these values:"
|
||||||
echo -e "Dockerfile:\t\t ${dockerfile}"
|
echo -e "Dockerfile:\t\t ${dockerfile}"
|
||||||
echo -e "requirements:\t\t ${pip_requirements}"
|
echo -e "requirements:\t\t ${pip_requirements}"
|
||||||
echo -e "project_name:\t\t ${project_name}"
|
|
||||||
echo -e "volumename:\t\t ${volumename}"
|
echo -e "volumename:\t\t ${volumename}"
|
||||||
echo -e "arch:\t\t\t ${arch}"
|
echo -e "arch:\t\t\t ${arch}"
|
||||||
echo -e "platform:\t\t ${platform}"
|
echo -e "platform:\t\t ${platform}"
|
||||||
echo -e "invokeai_tag:\t\t ${invokeai_tag}\n"
|
echo -e "invokeai_tag:\t\t ${invokeai_tag}\n"
|
||||||
|
|
||||||
_runAlpine() {
|
|
||||||
docker run \
|
|
||||||
--rm \
|
|
||||||
--interactive \
|
|
||||||
--tty \
|
|
||||||
--mount source="$volumename",target=/data \
|
|
||||||
--workdir /data \
|
|
||||||
alpine "$@"
|
|
||||||
}
|
|
||||||
|
|
||||||
_checkVolumeContent() {
|
|
||||||
_runAlpine ls -lhA /data/models
|
|
||||||
}
|
|
||||||
|
|
||||||
if [[ -n "$(docker volume ls -f name="${volumename}" -q)" ]]; then
|
if [[ -n "$(docker volume ls -f name="${volumename}" -q)" ]]; then
|
||||||
echo "Volume already exists"
|
echo "Volume already exists"
|
||||||
|
echo
|
||||||
else
|
else
|
||||||
echo -n "createing docker volume "
|
echo -n "createing docker volume "
|
||||||
docker volume create "${volumename}"
|
docker volume create "${volumename}"
|
||||||
@ -44,7 +32,6 @@ fi
|
|||||||
docker build \
|
docker build \
|
||||||
--platform="${platform}" \
|
--platform="${platform}" \
|
||||||
--tag="${invokeai_tag}" \
|
--tag="${invokeai_tag}" \
|
||||||
--build-arg="PROJECT_NAME=${project_name}" \
|
|
||||||
--build-arg="PIP_REQUIREMENTS=${pip_requirements}" \
|
--build-arg="PIP_REQUIREMENTS=${pip_requirements}" \
|
||||||
--file="${dockerfile}" \
|
--file="${dockerfile}" \
|
||||||
.
|
.
|
||||||
@ -57,5 +44,6 @@ docker run \
|
|||||||
--mount="source=$volumename,target=/data" \
|
--mount="source=$volumename,target=/data" \
|
||||||
--mount="type=bind,source=$HOME/.huggingface,target=/root/.huggingface" \
|
--mount="type=bind,source=$HOME/.huggingface,target=/root/.huggingface" \
|
||||||
--env="HUGGINGFACE_TOKEN=${HUGGINGFACE_TOKEN}" \
|
--env="HUGGINGFACE_TOKEN=${HUGGINGFACE_TOKEN}" \
|
||||||
|
--entrypoint="python3" \
|
||||||
"${invokeai_tag}" \
|
"${invokeai_tag}" \
|
||||||
scripts/configure_invokeai.py --yes
|
scripts/configure_invokeai.py --yes
|
||||||
|
@ -7,9 +7,9 @@ docker run \
|
|||||||
--interactive \
|
--interactive \
|
||||||
--tty \
|
--tty \
|
||||||
--rm \
|
--rm \
|
||||||
--platform "$platform" \
|
--platform="$platform" \
|
||||||
--name "$project_name" \
|
--name="$project_name" \
|
||||||
--hostname "$project_name" \
|
--hostname="$project_name" \
|
||||||
--mount source="$volumename",target=/data \
|
--mount="source=$volumename,target=/data" \
|
||||||
--publish 9090:9090 \
|
--publish=9090:9090 \
|
||||||
"$invokeai_tag" ${1:+$@}
|
"$invokeai_tag" ${1:+$@}
|
||||||
|
@ -78,7 +78,7 @@ Some Suggestions of variables you may want to change besides the Token:
|
|||||||
| -------------------- | ----------------------------- | -------------------------------------------------------------------------------------------- |
|
| -------------------- | ----------------------------- | -------------------------------------------------------------------------------------------- |
|
||||||
| `HUGGINGFACE_TOKEN` | No default, but **required**! | This is the only **required** variable, without it you can't download the huggingface models |
|
| `HUGGINGFACE_TOKEN` | No default, but **required**! | This is the only **required** variable, without it you can't download the huggingface models |
|
||||||
| `PROJECT_NAME` | `invokeai` | affects the project folder, tag- and volume name |
|
| `PROJECT_NAME` | `invokeai` | affects the project folder, tag- and volume name |
|
||||||
| `VOLUMENAME` | `${PROJECT_NAME}_data` | affects the project folder, tag- and volume name |
|
| `VOLUMENAME` | `${PROJECT_NAME}_data` | Name of the Docker Volume where model files will be stored |
|
||||||
| `ARCH` | `x86_64` | can be changed to f.e. aarch64 if you are using a ARM based CPU |
|
| `ARCH` | `x86_64` | can be changed to f.e. aarch64 if you are using a ARM based CPU |
|
||||||
| `INVOKEAI_TAG` | `${PROJECT_NAME}:${ARCH}` | the Container Repository / Tag which will be used |
|
| `INVOKEAI_TAG` | `${PROJECT_NAME}:${ARCH}` | the Container Repository / Tag which will be used |
|
||||||
| `PIP_REQUIREMENTS` | `requirements-lin-cuda.txt` | the requirements file to use (from `environments-and-requirements`) |
|
| `PIP_REQUIREMENTS` | `requirements-lin-cuda.txt` | the requirements file to use (from `environments-and-requirements`) |
|
||||||
@ -111,13 +111,13 @@ When used without arguments, the container will start the webserver and provide
|
|||||||
you the link to open it. But if you want to use some other parameters you can
|
you the link to open it. But if you want to use some other parameters you can
|
||||||
also do so.
|
also do so.
|
||||||
|
|
||||||
!!! example ""
|
!!! example "run script example"
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
./docker-build/run.sh scripts/invoke.py
|
./docker-build/run.sh "banana sushi" -Ak_lms -S42 -s10
|
||||||
```
|
```
|
||||||
|
|
||||||
This would start the CLI instead of the default command that starts the webserver.
|
This would generate the legendary "banana sushi" with Seed 42, k_lms Sampler and 10 steps.
|
||||||
|
|
||||||
Find out more about available CLI-Parameters at [features/CLI.md](../../features/CLI/#arguments)
|
Find out more about available CLI-Parameters at [features/CLI.md](../../features/CLI/#arguments)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user