Merge branch 'development' into asymmetric-tiling

This commit is contained in:
Carson Katri 2022-10-18 17:27:29 -04:00 committed by GitHub
commit d93cd10b0d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 15 additions and 10 deletions

View File

@ -47,11 +47,11 @@ dependencies:
- dependency_injector==4.40.0 - dependency_injector==4.40.0
- eventlet==0.33.1 - eventlet==0.33.1
- opencv-python==4.6.0 - opencv-python==4.6.0
- protobuf==3.19.5 - protobuf==3.19.6
- realesrgan==0.2.5.0 - realesrgan==0.2.5.0
- send2trash==1.8.0 - send2trash==1.8.0
- test-tube==0.7.5 - test-tube==0.7.5
- transformers==4.21.2 - transformers==4.21.3
- torch-fidelity==0.3.0 - torch-fidelity==0.3.0
- -e git+https://github.com/CompVis/taming-transformers.git@master#egg=taming-transformers - -e git+https://github.com/CompVis/taming-transformers.git@master#egg=taming-transformers
- -e git+https://github.com/openai/CLIP.git@main#egg=clip - -e git+https://github.com/openai/CLIP.git@main#egg=clip

View File

@ -15,7 +15,7 @@ dependencies:
- pudb==2019.2 - pudb==2019.2
- imageio==2.9.0 - imageio==2.9.0
- imageio-ffmpeg==0.4.2 - imageio-ffmpeg==0.4.2
- pytorch-lightning==1.4.2 - pytorch-lightning==1.7.7
- omegaconf==2.1.1 - omegaconf==2.1.1
- realesrgan==0.2.5.0 - realesrgan==0.2.5.0
- test-tube>=0.7.5 - test-tube>=0.7.5
@ -25,8 +25,8 @@ dependencies:
- einops==0.3.0 - einops==0.3.0
- pyreadline3 - pyreadline3
- torch-fidelity==0.3.0 - torch-fidelity==0.3.0
- transformers==4.19.2 - transformers==4.21.3
- torchmetrics==0.6.0 - torchmetrics==0.7.0
- flask==2.1.3 - flask==2.1.3
- flask_socketio==5.3.0 - flask_socketio==5.3.0
- flask_cors==3.0.10 - flask_cors==3.0.10

View File

@ -12,12 +12,12 @@ pillow==9.2.0
pudb==2019.2 pudb==2019.2
torch==1.12.1 torch==1.12.1
torchvision==0.13.0 torchvision==0.13.0
pytorch-lightning==1.4.2 pytorch-lightning==1.7.7
streamlit==1.12.0 streamlit==1.12.0
test-tube>=0.7.5 test-tube>=0.7.5
torch-fidelity==0.3.0 torch-fidelity==0.3.0
torchmetrics==0.6.0 torchmetrics==0.6.0
transformers==4.19.2 transformers==4.21.3
-e git+https://github.com/openai/CLIP.git@main#egg=clip -e git+https://github.com/openai/CLIP.git@main#egg=clip
-e git+https://github.com/CompVis/taming-transformers.git@master#egg=taming-transformers -e git+https://github.com/CompVis/taming-transformers.git@master#egg=taming-transformers
-e git+https://github.com/lstein/k-diffusion.git@master#egg=k-diffusion -e git+https://github.com/lstein/k-diffusion.git@master#egg=k-diffusion

View File

@ -1,6 +1,6 @@
-r requirements.txt -r requirements.txt
protobuf==3.19.4 protobuf==3.19.6
torch torch
torchvision torchvision
-e . -e .

View File

@ -120,7 +120,12 @@ try:
from models.clipseg import CLIPDensePredT from models.clipseg import CLIPDensePredT
model = CLIPDensePredT(version='ViT-B/16', reduce_dim=64, ) model = CLIPDensePredT(version='ViT-B/16', reduce_dim=64, )
model.eval() model.eval()
model.load_state_dict(torch.load('src/clipseg/weights/rd64-uni-refined.pth'), strict=False) model.load_state_dict(
torch.load('src/clipseg/weights/rd64-uni-refined.pth'),
model.load_state_dict(torch.load('src/clipseg/weights/rd64-uni-refined.pth'),
map_location=torch.device('cpu'),
strict=False,
)
except Exception: except Exception:
print('Error installing clipseg model:') print('Error installing clipseg model:')
print(traceback.format_exc()) print(traceback.format_exc())

View File

@ -2,7 +2,7 @@ from setuptools import setup, find_packages
setup( setup(
name='invoke-ai', name='invoke-ai',
version='2.0.0', version='2.0.2',
description='', description='',
packages=find_packages(), packages=find_packages(),
install_requires=[ install_requires=[