diff --git a/environment-mac.yml b/environment-mac.yml index 535957122a..9b43fd0978 100644 --- a/environment-mac.yml +++ b/environment-mac.yml @@ -47,11 +47,11 @@ dependencies: - dependency_injector==4.40.0 - eventlet==0.33.1 - opencv-python==4.6.0 - - protobuf==3.19.5 + - protobuf==3.19.6 - realesrgan==0.2.5.0 - send2trash==1.8.0 - test-tube==0.7.5 - - transformers==4.21.2 + - transformers==4.21.3 - torch-fidelity==0.3.0 - -e git+https://github.com/CompVis/taming-transformers.git@master#egg=taming-transformers - -e git+https://github.com/openai/CLIP.git@main#egg=clip diff --git a/environment.yml b/environment.yml index 14e599fa20..72468067e4 100644 --- a/environment.yml +++ b/environment.yml @@ -15,7 +15,7 @@ dependencies: - pudb==2019.2 - imageio==2.9.0 - imageio-ffmpeg==0.4.2 - - pytorch-lightning==1.4.2 + - pytorch-lightning==1.7.7 - omegaconf==2.1.1 - realesrgan==0.2.5.0 - test-tube>=0.7.5 @@ -25,8 +25,8 @@ dependencies: - einops==0.3.0 - pyreadline3 - torch-fidelity==0.3.0 - - transformers==4.19.2 - - torchmetrics==0.6.0 + - transformers==4.21.3 + - torchmetrics==0.7.0 - flask==2.1.3 - flask_socketio==5.3.0 - flask_cors==3.0.10 diff --git a/requirements-linux-arm64.txt b/requirements-linux-arm64.txt index 2b94a1f6c5..3a6ab888c9 100644 --- a/requirements-linux-arm64.txt +++ b/requirements-linux-arm64.txt @@ -12,12 +12,12 @@ pillow==9.2.0 pudb==2019.2 torch==1.12.1 torchvision==0.13.0 -pytorch-lightning==1.4.2 +pytorch-lightning==1.7.7 streamlit==1.12.0 test-tube>=0.7.5 torch-fidelity==0.3.0 torchmetrics==0.6.0 -transformers==4.19.2 +transformers==4.21.3 -e git+https://github.com/openai/CLIP.git@main#egg=clip -e git+https://github.com/CompVis/taming-transformers.git@master#egg=taming-transformers -e git+https://github.com/lstein/k-diffusion.git@master#egg=k-diffusion diff --git a/requirements-mac-MPS-CPU.txt b/requirements-mac-MPS-CPU.txt index 3ea189c480..cba7563d6f 100644 --- a/requirements-mac-MPS-CPU.txt +++ b/requirements-mac-MPS-CPU.txt @@ -1,6 +1,6 @@ -r requirements.txt -protobuf==3.19.4 +protobuf==3.19.6 torch torchvision -e . diff --git a/scripts/preload_models.py b/scripts/preload_models.py index 2ef344f8c3..b23bec11f3 100644 --- a/scripts/preload_models.py +++ b/scripts/preload_models.py @@ -120,7 +120,12 @@ try: from models.clipseg import CLIPDensePredT model = CLIPDensePredT(version='ViT-B/16', reduce_dim=64, ) model.eval() - model.load_state_dict(torch.load('src/clipseg/weights/rd64-uni-refined.pth'), strict=False) + model.load_state_dict( + torch.load('src/clipseg/weights/rd64-uni-refined.pth'), + model.load_state_dict(torch.load('src/clipseg/weights/rd64-uni-refined.pth'), + map_location=torch.device('cpu'), + strict=False, + ) except Exception: print('Error installing clipseg model:') print(traceback.format_exc()) diff --git a/setup.py b/setup.py index cc4a1f7237..a544aa150a 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name='invoke-ai', - version='2.0.0', + version='2.0.2', description='', packages=find_packages(), install_requires=[