mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
37e2418ee0
* Added linux to the workflows
- rename workflow files
Signed-off-by: Ben Alkov <ben.alkov@gmail.com>
* fixes: run on merge to 'main', 'dev';
- reduce dev merge test cases to 1 (1 takes 11 minutes 😯)
- fix model cache name
Signed-off-by: Ben Alkov <ben.alkov@gmail.com>
* add test prompts to workflows
Signed-off-by: Ben Alkov <ben.alkov@gmail.com>
Signed-off-by: Ben Alkov <ben.alkov@gmail.com>
Co-authored-by: James Reynolds <magnsuviri@me.com>
Co-authored-by: Ben Alkov <ben.alkov@gmail.com>
Co-authored-by: Lincoln Stein <lincoln.stein@gmail.com>
71 lines
2.6 KiB
YAML
71 lines
2.6 KiB
YAML
name: Create Caches
|
|
on:
|
|
workflow_dispatch
|
|
jobs:
|
|
build:
|
|
strategy:
|
|
matrix:
|
|
os: [ ubuntu-latest, macos-12 ]
|
|
name: Create Caches on ${{ matrix.os }} conda
|
|
runs-on: ${{ matrix.os }}
|
|
steps:
|
|
- name: Set platform variables
|
|
id: vars
|
|
run: |
|
|
if [ "$RUNNER_OS" = "macOS" ]; then
|
|
echo "::set-output name=ENV_FILE::environment-mac.yaml"
|
|
echo "::set-output name=PYTHON_BIN::/usr/local/miniconda/envs/ldm/bin/python"
|
|
elif [ "$RUNNER_OS" = "Linux" ]; then
|
|
echo "::set-output name=ENV_FILE::environment.yaml"
|
|
echo "::set-output name=PYTHON_BIN::/usr/share/miniconda/envs/ldm/bin/python"
|
|
fi
|
|
- name: Checkout sources
|
|
uses: actions/checkout@v3
|
|
- name: Use Cached Stable Diffusion v1.4 Model
|
|
id: cache-sd-v1-4
|
|
uses: actions/cache@v3
|
|
env:
|
|
cache-name: cache-sd-v1-4
|
|
with:
|
|
path: models/ldm/stable-diffusion-v1/model.ckpt
|
|
key: ${{ env.cache-name }}
|
|
restore-keys: |
|
|
${{ env.cache-name }}
|
|
- name: Download Stable Diffusion v1.4 Model
|
|
if: ${{ steps.cache-sd-v1-4.outputs.cache-hit != 'true' }}
|
|
run: |
|
|
if [ ! -e models/ldm/stable-diffusion-v1 ]; then
|
|
mkdir -p models/ldm/stable-diffusion-v1
|
|
fi
|
|
if [ ! -e models/ldm/stable-diffusion-v1/model.ckpt ]; then
|
|
curl -o models/ldm/stable-diffusion-v1/model.ckpt ${{ secrets.SD_V1_4_URL }}
|
|
fi
|
|
- name: Use Cached Dependencies
|
|
id: cache-conda-env-ldm
|
|
uses: actions/cache@v3
|
|
env:
|
|
cache-name: cache-conda-env-ldm
|
|
with:
|
|
path: ~/.conda/envs/ldm
|
|
key: ${{ env.cache-name }}
|
|
restore-keys: |
|
|
${{ env.cache-name }}-${{ runner.os }}-${{ hashFiles(steps.vars.outputs.ENV_FILE) }}
|
|
- name: Install Dependencies
|
|
if: ${{ steps.cache-conda-env-ldm.outputs.cache-hit != 'true' }}
|
|
run: |
|
|
conda env create -f ${{ steps.vars.outputs.ENV_FILE }}
|
|
- name: Use Cached Huggingface and Torch models
|
|
id: cache-huggingface-torch
|
|
uses: actions/cache@v3
|
|
env:
|
|
cache-name: cache-huggingface-torch
|
|
with:
|
|
path: ~/.cache
|
|
key: ${{ env.cache-name }}
|
|
restore-keys: |
|
|
${{ env.cache-name }}-${{ hashFiles('scripts/preload_models.py') }}
|
|
- name: Download Huggingface and Torch models
|
|
if: ${{ steps.cache-huggingface-torch.outputs.cache-hit != 'true' }}
|
|
run: |
|
|
${{ steps.vars.outputs.PYTHON_BIN }} scripts/preload_models.py
|