Merge branch 'main' into webui-model-conversion

This commit is contained in:
blessedcoolant 2023-02-12 23:35:49 +13:00
commit 7410a60208
7 changed files with 147 additions and 87 deletions

View File

@ -3,21 +3,23 @@
!invokeai !invokeai
!ldm !ldm
!pyproject.toml !pyproject.toml
!README.md
# Guard against pulling in any models that might exist in the directory tree # Guard against pulling in any models that might exist in the directory tree
**/*.pt* **/*.pt*
**/*.ckpt **/*.ckpt
# ignore frontend but whitelist dist # ignore frontend but whitelist dist
invokeai/frontend/** invokeai/frontend/
!invokeai/frontend/dist !invokeai/frontend/dist/
# ignore invokeai/assets but whitelist invokeai/assets/web # ignore invokeai/assets but whitelist invokeai/assets/web
invokeai/assets invokeai/assets/
!invokeai/assets/web !invokeai/assets/web/
# ignore python cache # Byte-compiled / optimized / DLL files
**/__pycache__ **/__pycache__/
**/*.py[cod] **/*.py[cod]
**/*.egg-info
# Distribution / packaging
*.egg-info/
*.egg

View File

@ -3,7 +3,8 @@ on:
push: push:
branches: branches:
- 'main' - 'main'
- 'update/ci/*' - 'update/ci/docker/*'
- 'update/docker/*'
tags: tags:
- 'v*.*.*' - 'v*.*.*'
@ -20,18 +21,15 @@ jobs:
include: include:
- flavor: amd - flavor: amd
pip-extra-index-url: 'https://download.pytorch.org/whl/rocm5.2' pip-extra-index-url: 'https://download.pytorch.org/whl/rocm5.2'
dockerfile: docker/Dockerfile
platforms: linux/amd64,linux/arm64
- flavor: cuda - flavor: cuda
pip-extra-index-url: '' pip-extra-index-url: ''
dockerfile: docker/Dockerfile
platforms: linux/amd64,linux/arm64
- flavor: cpu - flavor: cpu
pip-extra-index-url: 'https://download.pytorch.org/whl/cpu' pip-extra-index-url: 'https://download.pytorch.org/whl/cpu'
dockerfile: docker/Dockerfile
platforms: linux/amd64,linux/arm64
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: ${{ matrix.flavor }} name: ${{ matrix.flavor }}
env:
PLATFORMS: 'linux/amd64,linux/arm64'
DOCKERFILE: 'docker/Dockerfile'
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v3
@ -41,7 +39,9 @@ jobs:
uses: docker/metadata-action@v4 uses: docker/metadata-action@v4
with: with:
github-token: ${{ secrets.GITHUB_TOKEN }} github-token: ${{ secrets.GITHUB_TOKEN }}
images: ghcr.io/${{ github.repository }} images: |
ghcr.io/${{ github.repository }}
${{ vars.DOCKERHUB_REPOSITORY }}
tags: | tags: |
type=ref,event=branch type=ref,event=branch
type=ref,event=tag type=ref,event=tag
@ -52,13 +52,14 @@ jobs:
flavor: | flavor: |
latest=${{ matrix.flavor == 'cuda' && github.ref == 'refs/heads/main' }} latest=${{ matrix.flavor == 'cuda' && github.ref == 'refs/heads/main' }}
suffix=-${{ matrix.flavor }},onlatest=false suffix=-${{ matrix.flavor }},onlatest=false
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v2 uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v2
with: with:
platforms: ${{ matrix.platforms }} platforms: ${{ env.PLATFORMS }}
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
if: github.event_name != 'pull_request' if: github.event_name != 'pull_request'
@ -68,25 +69,34 @@ jobs:
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
if: github.event_name != 'pull_request' && vars.DOCKERHUB_REPOSITORY != ''
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build container - name: Build container
id: docker_build
uses: docker/build-push-action@v4 uses: docker/build-push-action@v4
with: with:
context: . context: .
file: ${{ matrix.dockerfile }} file: ${{ env.DOCKERFILE }}
platforms: ${{ matrix.platforms }} platforms: ${{ env.PLATFORMS }}
push: ${{ github.event_name != 'pull_request' }} push: ${{ github.ref == 'refs/heads/main' || github.ref == 'refs/tags/*' }}
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }} labels: ${{ steps.meta.outputs.labels }}
build-args: PIP_EXTRA_INDEX_URL=${{ matrix.pip-extra-index-url }} build-args: PIP_EXTRA_INDEX_URL=${{ matrix.pip-extra-index-url }}
cache-from: type=gha cache-from: |
cache-to: type=gha,mode=max type=gha,scope=${{ github.ref_name }}-${{ matrix.flavor }}
type=gha,scope=main-${{ matrix.flavor }}
cache-to: type=gha,mode=max,scope=${{ github.ref_name }}-${{ matrix.flavor }}
- name: Output image, digest and metadata to summary - name: Docker Hub Description
run: | if: github.ref == 'refs/heads/main' || github.ref == 'refs/tags/*' && vars.DOCKERHUB_REPOSITORY != ''
{ uses: peter-evans/dockerhub-description@v3
echo imageid: "${{ steps.docker_build.outputs.imageid }}" with:
echo digest: "${{ steps.docker_build.outputs.digest }}" username: ${{ secrets.DOCKERHUB_USERNAME }}
echo labels: "${{ steps.meta.outputs.labels }}" password: ${{ secrets.DOCKERHUB_TOKEN }}
echo tags: "${{ steps.meta.outputs.tags }}" repository: ${{ vars.DOCKERHUB_REPOSITORY }}
echo version: "${{ steps.meta.outputs.version }}" short-description: ${{ github.event.repository.description }}
} >> "$GITHUB_STEP_SUMMARY"

View File

@ -8,6 +8,7 @@ on:
- 'ready_for_review' - 'ready_for_review'
- 'opened' - 'opened'
- 'synchronize' - 'synchronize'
merge_group:
workflow_dispatch: workflow_dispatch:
concurrency: concurrency:

View File

@ -1,57 +1,63 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
ARG PYTHON_VERSION=3.9 ARG PYTHON_VERSION=3.9
################## ##################
## base image ## ## base image ##
################## ##################
FROM python:${PYTHON_VERSION}-slim AS python-base FROM python:${PYTHON_VERSION}-slim AS python-base
LABEL org.opencontainers.image.authors="mauwii@outlook.de"
# prepare for buildkit cache # prepare for buildkit cache
RUN rm -f /etc/apt/apt.conf.d/docker-clean RUN rm -f /etc/apt/apt.conf.d/docker-clean \
&& echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' >/etc/apt/apt.conf.d/keep-cache
# Install necesarry packages # Install necesarry packages
RUN \ RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update \ apt-get update \
&& apt-get install \ && apt-get install -y \
-yqq \
--no-install-recommends \ --no-install-recommends \
libgl1-mesa-glx=20.3.* \ libgl1-mesa-glx=20.3.* \
libglib2.0-0=2.66.* \ libglib2.0-0=2.66.* \
libopencv-dev=4.5.* \ libopencv-dev=4.5.*
&& rm -rf /var/lib/apt/lists/*
# set working directory and path # set working directory and env
ARG APPDIR=/usr/src ARG APPDIR=/usr/src
ARG APPNAME=InvokeAI ARG APPNAME=InvokeAI
WORKDIR ${APPDIR} WORKDIR ${APPDIR}
ENV PATH=${APPDIR}/${APPNAME}/bin:$PATH ENV PATH ${APPDIR}/${APPNAME}/bin:$PATH
# Keeps Python from generating .pyc files in the container
ENV PYTHONDONTWRITEBYTECODE 1
# Turns off buffering for easier container logging
ENV PYTHONUNBUFFERED 1
# don't fall back to legacy build system
ENV PIP_USE_PEP517=1
####################### #######################
## build pyproject ## ## build pyproject ##
####################### #######################
FROM python-base AS pyproject-builder FROM python-base AS pyproject-builder
ENV PIP_USE_PEP517=1
# prepare for buildkit cache # Install dependencies
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update \
&& apt-get install -y \
--no-install-recommends \
build-essential=12.9 \
gcc=4:10.2.* \
python3-dev=3.9.*
# prepare pip for buildkit cache
ARG PIP_CACHE_DIR=/var/cache/buildkit/pip ARG PIP_CACHE_DIR=/var/cache/buildkit/pip
ENV PIP_CACHE_DIR ${PIP_CACHE_DIR} ENV PIP_CACHE_DIR ${PIP_CACHE_DIR}
RUN mkdir -p ${PIP_CACHE_DIR} RUN mkdir -p ${PIP_CACHE_DIR}
# Install dependencies
RUN \
--mount=type=cache,target=${PIP_CACHE_DIR} \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
apt-get update \
&& apt-get install \
-yqq \
--no-install-recommends \
build-essential=12.9 \
gcc=4:10.2.* \
python3-dev=3.9.* \
&& rm -rf /var/lib/apt/lists/*
# create virtual environment # create virtual environment
RUN --mount=type=cache,target=${PIP_CACHE_DIR} \ RUN --mount=type=cache,target=${PIP_CACHE_DIR},sharing=locked \
python3 -m venv "${APPNAME}" \ python3 -m venv "${APPNAME}" \
--upgrade-deps --upgrade-deps
@ -61,9 +67,8 @@ COPY --link . .
# install pyproject.toml # install pyproject.toml
ARG PIP_EXTRA_INDEX_URL ARG PIP_EXTRA_INDEX_URL
ENV PIP_EXTRA_INDEX_URL ${PIP_EXTRA_INDEX_URL} ENV PIP_EXTRA_INDEX_URL ${PIP_EXTRA_INDEX_URL}
ARG PIP_PACKAGE=. RUN --mount=type=cache,target=${PIP_CACHE_DIR},sharing=locked \
RUN --mount=type=cache,target=${PIP_CACHE_DIR} \ "${APPNAME}/bin/pip" install .
"${APPDIR}/${APPNAME}/bin/pip" install ${PIP_PACKAGE}
# build patchmatch # build patchmatch
RUN python3 -c "from patchmatch import patch_match" RUN python3 -c "from patchmatch import patch_match"
@ -73,14 +78,26 @@ RUN python3 -c "from patchmatch import patch_match"
##################### #####################
FROM python-base AS runtime FROM python-base AS runtime
# setup environment # Create a new User
COPY --from=pyproject-builder --link ${APPDIR}/${APPNAME} ${APPDIR}/${APPNAME} ARG UNAME=appuser
ENV INVOKEAI_ROOT=/data RUN useradd \
ENV INVOKE_MODEL_RECONFIGURE="--yes --default_only" --no-log-init \
-m \
-U \
"${UNAME}"
# set Entrypoint and default CMD # create volume directory
ARG VOLUME_DIR=/data
RUN mkdir -p "${VOLUME_DIR}" \
&& chown -R "${UNAME}" "${VOLUME_DIR}"
# setup runtime environment
USER ${UNAME}
COPY --chown=${UNAME} --from=pyproject-builder ${APPDIR}/${APPNAME} ${APPNAME}
ENV INVOKEAI_ROOT ${VOLUME_DIR}
ENV TRANSFORMERS_CACHE ${VOLUME_DIR}/.cache
ENV INVOKE_MODEL_RECONFIGURE "--yes --default_only"
EXPOSE 9090
ENTRYPOINT [ "invokeai" ] ENTRYPOINT [ "invokeai" ]
CMD [ "--web", "--host=0.0.0.0" ] CMD [ "--web", "--host", "0.0.0.0", "--port", "9090" ]
VOLUME [ "/data" ] VOLUME [ "${VOLUME_DIR}" ]
LABEL org.opencontainers.image.authors="mauwii@outlook.de"

View File

@ -1,19 +1,24 @@
#!/usr/bin/env bash #!/usr/bin/env bash
set -e set -e
# How to use: https://invoke-ai.github.io/InvokeAI/installation/INSTALL_DOCKER/#setup # If you want to build a specific flavor, set the CONTAINER_FLAVOR environment variable
# Some possible pip extra-index urls (cuda 11.7 is available without extra url): # e.g. CONTAINER_FLAVOR=cpu ./build.sh
# CUDA 11.6: https://download.pytorch.org/whl/cu116 # Possible Values are:
# ROCm 5.2: https://download.pytorch.org/whl/rocm5.2 # - cpu
# CPU: https://download.pytorch.org/whl/cpu # - cuda
# as found on https://pytorch.org/get-started/locally/ # - rocm
# Don't forget to also set it when executing run.sh
# if it is not set, the script will try to detect the flavor by itself.
#
# Doc can be found here:
# https://invoke-ai.github.io/InvokeAI/installation/040_INSTALL_DOCKER/
SCRIPTDIR=$(dirname "$0") SCRIPTDIR=$(dirname "${BASH_SOURCE[0]}")
cd "$SCRIPTDIR" || exit 1 cd "$SCRIPTDIR" || exit 1
source ./env.sh source ./env.sh
DOCKERFILE=${INVOKE_DOCKERFILE:-Dockerfile} DOCKERFILE=${INVOKE_DOCKERFILE:-./Dockerfile}
# print the settings # print the settings
echo -e "You are using these values:\n" echo -e "You are using these values:\n"
@ -21,9 +26,10 @@ echo -e "Dockerfile:\t\t${DOCKERFILE}"
echo -e "index-url:\t\t${PIP_EXTRA_INDEX_URL:-none}" echo -e "index-url:\t\t${PIP_EXTRA_INDEX_URL:-none}"
echo -e "Volumename:\t\t${VOLUMENAME}" echo -e "Volumename:\t\t${VOLUMENAME}"
echo -e "Platform:\t\t${PLATFORM}" echo -e "Platform:\t\t${PLATFORM}"
echo -e "Registry:\t\t${CONTAINER_REGISTRY}" echo -e "Container Registry:\t${CONTAINER_REGISTRY}"
echo -e "Repository:\t\t${CONTAINER_REPOSITORY}" echo -e "Container Repository:\t${CONTAINER_REPOSITORY}"
echo -e "Container Tag:\t\t${CONTAINER_TAG}" echo -e "Container Tag:\t\t${CONTAINER_TAG}"
echo -e "Container Flavor:\t${CONTAINER_FLAVOR}"
echo -e "Container Image:\t${CONTAINER_IMAGE}\n" echo -e "Container Image:\t${CONTAINER_IMAGE}\n"
# Create docker volume # Create docker volume
@ -36,8 +42,9 @@ fi
# Build Container # Build Container
DOCKER_BUILDKIT=1 docker build \ DOCKER_BUILDKIT=1 docker build \
--platform="${PLATFORM}" \ --platform="${PLATFORM:-linux/amd64}" \
--tag="${CONTAINER_IMAGE}" \ --tag="${CONTAINER_IMAGE:-invokeai}" \
${CONTAINER_FLAVOR:+--build-arg="CONTAINER_FLAVOR=${CONTAINER_FLAVOR}"} \
${PIP_EXTRA_INDEX_URL:+--build-arg="PIP_EXTRA_INDEX_URL=${PIP_EXTRA_INDEX_URL}"} \ ${PIP_EXTRA_INDEX_URL:+--build-arg="PIP_EXTRA_INDEX_URL=${PIP_EXTRA_INDEX_URL}"} \
${PIP_PACKAGE:+--build-arg="PIP_PACKAGE=${PIP_PACKAGE}"} \ ${PIP_PACKAGE:+--build-arg="PIP_PACKAGE=${PIP_PACKAGE}"} \
--file="${DOCKERFILE}" \ --file="${DOCKERFILE}" \

View File

@ -1,19 +1,31 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# This file is used to set environment variables for the build.sh and run.sh scripts.
# Try to detect the container flavor if no PIP_EXTRA_INDEX_URL got specified
if [[ -z "$PIP_EXTRA_INDEX_URL" ]]; then if [[ -z "$PIP_EXTRA_INDEX_URL" ]]; then
# Activate virtual environment if not already activated and exists
if [[ -z $VIRTUAL_ENV ]]; then
[[ -e "$(dirname "${BASH_SOURCE[0]}")/../.venv/bin/activate" ]] \
&& source "$(dirname "${BASH_SOURCE[0]}")/../.venv/bin/activate" \
&& echo "Activated virtual environment: $VIRTUAL_ENV"
fi
# Decide which container flavor to build if not specified # Decide which container flavor to build if not specified
if [[ -z "$CONTAINER_FLAVOR" ]] && python -c "import torch" &>/dev/null; then if [[ -z "$CONTAINER_FLAVOR" ]] && python -c "import torch" &>/dev/null; then
# Check for CUDA and ROCm # Check for CUDA and ROCm
CUDA_AVAILABLE=$(python -c "import torch;print(torch.cuda.is_available())") CUDA_AVAILABLE=$(python -c "import torch;print(torch.cuda.is_available())")
ROCM_AVAILABLE=$(python -c "import torch;print(torch.version.hip is not None)") ROCM_AVAILABLE=$(python -c "import torch;print(torch.version.hip is not None)")
if [[ "$(uname -s)" != "Darwin" && "${CUDA_AVAILABLE}" == "True" ]]; then if [[ "${CUDA_AVAILABLE}" == "True" ]]; then
CONTAINER_FLAVOR="cuda" CONTAINER_FLAVOR="cuda"
elif [[ "$(uname -s)" != "Darwin" && "${ROCM_AVAILABLE}" == "True" ]]; then elif [[ "${ROCM_AVAILABLE}" == "True" ]]; then
CONTAINER_FLAVOR="rocm" CONTAINER_FLAVOR="rocm"
else else
CONTAINER_FLAVOR="cpu" CONTAINER_FLAVOR="cpu"
fi fi
fi fi
# Set PIP_EXTRA_INDEX_URL based on container flavor # Set PIP_EXTRA_INDEX_URL based on container flavor
if [[ "$CONTAINER_FLAVOR" == "rocm" ]]; then if [[ "$CONTAINER_FLAVOR" == "rocm" ]]; then
PIP_EXTRA_INDEX_URL="https://download.pytorch.org/whl/rocm" PIP_EXTRA_INDEX_URL="https://download.pytorch.org/whl/rocm"
@ -26,9 +38,10 @@ fi
# Variables shared by build.sh and run.sh # Variables shared by build.sh and run.sh
REPOSITORY_NAME="${REPOSITORY_NAME-$(basename "$(git rev-parse --show-toplevel)")}" REPOSITORY_NAME="${REPOSITORY_NAME-$(basename "$(git rev-parse --show-toplevel)")}"
VOLUMENAME="${VOLUMENAME-"${REPOSITORY_NAME,,}_data"}" REPOSITORY_NAME="${REPOSITORY_NAME,,}"
VOLUMENAME="${VOLUMENAME-"${REPOSITORY_NAME}_data"}"
ARCH="${ARCH-$(uname -m)}" ARCH="${ARCH-$(uname -m)}"
PLATFORM="${PLATFORM-Linux/${ARCH}}" PLATFORM="${PLATFORM-linux/${ARCH}}"
INVOKEAI_BRANCH="${INVOKEAI_BRANCH-$(git branch --show)}" INVOKEAI_BRANCH="${INVOKEAI_BRANCH-$(git branch --show)}"
CONTAINER_REGISTRY="${CONTAINER_REGISTRY-"ghcr.io"}" CONTAINER_REGISTRY="${CONTAINER_REGISTRY-"ghcr.io"}"
CONTAINER_REPOSITORY="${CONTAINER_REPOSITORY-"$(whoami)/${REPOSITORY_NAME}"}" CONTAINER_REPOSITORY="${CONTAINER_REPOSITORY-"$(whoami)/${REPOSITORY_NAME}"}"

View File

@ -1,14 +1,16 @@
#!/usr/bin/env bash #!/usr/bin/env bash
set -e set -e
# How to use: https://invoke-ai.github.io/InvokeAI/installation/INSTALL_DOCKER/#run-the-container # How to use: https://invoke-ai.github.io/InvokeAI/installation/040_INSTALL_DOCKER/
# IMPORTANT: You need to have a token on huggingface.co to be able to download the checkpoints!!!
SCRIPTDIR=$(dirname "$0") SCRIPTDIR=$(dirname "${BASH_SOURCE[0]}")
cd "$SCRIPTDIR" || exit 1 cd "$SCRIPTDIR" || exit 1
source ./env.sh source ./env.sh
# Create outputs directory if it does not exist
[[ -d ./outputs ]] || mkdir ./outputs
echo -e "You are using these values:\n" echo -e "You are using these values:\n"
echo -e "Volumename:\t${VOLUMENAME}" echo -e "Volumename:\t${VOLUMENAME}"
echo -e "Invokeai_tag:\t${CONTAINER_IMAGE}" echo -e "Invokeai_tag:\t${CONTAINER_IMAGE}"
@ -22,10 +24,18 @@ docker run \
--name="${REPOSITORY_NAME,,}" \ --name="${REPOSITORY_NAME,,}" \
--hostname="${REPOSITORY_NAME,,}" \ --hostname="${REPOSITORY_NAME,,}" \
--mount=source="${VOLUMENAME}",target=/data \ --mount=source="${VOLUMENAME}",target=/data \
${MODELSPATH:+-u "$(id -u):$(id -g)"} \ --mount type=bind,source="$(pwd)"/outputs,target=/data/outputs \
${MODELSPATH:+--mount="type=bind,source=${MODELSPATH},target=/data/models"} \ ${MODELSPATH:+--mount="type=bind,source=${MODELSPATH},target=/data/models"} \
${HUGGING_FACE_HUB_TOKEN:+--env="HUGGING_FACE_HUB_TOKEN=${HUGGING_FACE_HUB_TOKEN}"} \ ${HUGGING_FACE_HUB_TOKEN:+--env="HUGGING_FACE_HUB_TOKEN=${HUGGING_FACE_HUB_TOKEN}"} \
--publish=9090:9090 \ --publish=9090:9090 \
--cap-add=sys_nice \ --cap-add=sys_nice \
${GPU_FLAGS:+--gpus="${GPU_FLAGS}"} \ ${GPU_FLAGS:+--gpus="${GPU_FLAGS}"} \
"${CONTAINER_IMAGE}" ${1:+$@} "${CONTAINER_IMAGE}" ${@:+$@}
# Remove Trash folder
for f in outputs/.Trash*; do
if [ -e "$f" ]; then
rm -Rf "$f"
break
fi
done