mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
more bug fixes to install scripts
This commit is contained in:
parent
ffa91be3f1
commit
b8870d8290
@ -11,6 +11,7 @@ dependencies:
|
||||
- numpy=1.19
|
||||
- imageio=2.9.0
|
||||
- opencv=4.6.0
|
||||
- getpass_asterisk
|
||||
- pillow=8.*
|
||||
- flask=2.1.*
|
||||
- flask_cors=3.0.10
|
||||
|
@ -22,6 +22,7 @@ dependencies:
|
||||
- diffusers=0.6.0
|
||||
- einops=0.4.1
|
||||
- grpcio=1.46.4
|
||||
- getpass_asterisk
|
||||
- humanfriendly=10.0
|
||||
- imageio=2.21.2
|
||||
- imageio-ffmpeg=0.4.7
|
||||
|
@ -34,6 +34,7 @@ dependencies:
|
||||
- flask_cors==3.0.10
|
||||
- dependency_injector==4.40.0
|
||||
- eventlet
|
||||
- getpass_asterisk
|
||||
- kornia==0.6.0
|
||||
- -e git+https://github.com/openai/CLIP.git@main#egg=clip
|
||||
- -e git+https://github.com/CompVis/taming-transformers.git@master#egg=taming-transformers
|
||||
|
@ -99,7 +99,7 @@ if "%ERRORLEVEL%" NEQ "0" (
|
||||
echo ""
|
||||
echo "The preload_models.py script crashed or was cancelled."
|
||||
echo "InvokeAI is not ready to run. To run preload_models.py again,"
|
||||
echo "run the command 'python scripts/preload_models.py'"
|
||||
echo "run the command 'update.bat' in this directory."
|
||||
echo "Press any key to continue"
|
||||
pause
|
||||
exit /b
|
||||
|
@ -119,8 +119,8 @@ else
|
||||
if test $status -ne 0
|
||||
then
|
||||
echo "The preload_models.py script crashed or was cancelled."
|
||||
echo "InvokeAI is not ready to run. To run preload_models.py again,"
|
||||
echo "give the command 'python scripts/preload_models.py'"
|
||||
echo "InvokeAI is not ready to run. Try again by running"
|
||||
echo "update.sh in this directory."
|
||||
else
|
||||
# tell the user their next steps
|
||||
echo "You can now start generating images by running invoke.sh (inside this folder), using ./invoke.sh"
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
albumentations
|
||||
einops
|
||||
getpass_asterisk
|
||||
huggingface-hub
|
||||
imageio-ffmpeg
|
||||
imageio
|
||||
|
@ -14,9 +14,10 @@ import warnings
|
||||
from urllib import request
|
||||
from tqdm import tqdm
|
||||
from omegaconf import OmegaConf
|
||||
from huggingface_hub import HfFolder, hf_hub_url
|
||||
from pathlib import Path
|
||||
from getpass_asterisk import getpass_asterisk
|
||||
import traceback
|
||||
import getpass
|
||||
import requests
|
||||
import clip
|
||||
import transformers
|
||||
@ -226,10 +227,13 @@ This involves a few easy steps.
|
||||
'''
|
||||
)
|
||||
input('Press <enter> when you are ready to continue:')
|
||||
|
||||
from huggingface_hub import HfFolder
|
||||
print('(Fetching Hugging Face token from cache...',end='')
|
||||
access_token = HfFolder.get_token()
|
||||
if access_token is not None:
|
||||
print('found')
|
||||
|
||||
if access_token is None:
|
||||
print('not found')
|
||||
print('''
|
||||
4. Thank you! The last step is to enter your HuggingFace access token so that
|
||||
this script is authorized to initiate the download. Go to the access tokens
|
||||
@ -243,7 +247,7 @@ This involves a few easy steps.
|
||||
|
||||
Now copy the token to your clipboard and paste it here: '''
|
||||
)
|
||||
access_token = getpass.getpass()
|
||||
access_token = getpass_asterisk.getpass_asterisk()
|
||||
return access_token
|
||||
|
||||
#---------------------------------------------
|
||||
@ -261,7 +265,6 @@ def migrate_models_ckpt():
|
||||
|
||||
#---------------------------------------------
|
||||
def download_weight_datasets(models:dict, access_token:str):
|
||||
from huggingface_hub import HfFolder
|
||||
migrate_models_ckpt()
|
||||
successful = dict()
|
||||
for mod in models.keys():
|
||||
@ -275,9 +278,11 @@ def download_weight_datasets(models:dict, access_token:str):
|
||||
if success:
|
||||
successful[mod] = True
|
||||
if len(successful) < len(models):
|
||||
print(f'\n* There were errors downloading one or more files.')
|
||||
print('Please double-check your license agreements, and your access token. Type ^C to quit.\n')
|
||||
hfFolder.delete_token()
|
||||
print(f'\n\n** There were errors downloading one or more files. **')
|
||||
print('Please double-check your license agreements, and your access token.')
|
||||
HfFolder.delete_token()
|
||||
print('Press any key to try again. Type ^C to quit.\n')
|
||||
input()
|
||||
return None
|
||||
|
||||
HfFolder.save_token(access_token)
|
||||
@ -287,8 +292,6 @@ def download_weight_datasets(models:dict, access_token:str):
|
||||
|
||||
#---------------------------------------------
|
||||
def download_with_resume(repo_id:str, model_name:str, access_token:str)->bool:
|
||||
from huggingface_hub import hf_hub_url
|
||||
|
||||
model_dest = os.path.join(Model_dir, model_name)
|
||||
os.makedirs(os.path.dirname(model_dest), exist_ok=True)
|
||||
url = hf_hub_url(repo_id, model_name)
|
||||
|
@ -8,6 +8,12 @@ if exist ".git" (
|
||||
call git pull
|
||||
)
|
||||
|
||||
conda env update
|
||||
|
||||
conda env update
|
||||
conda activate invokeai
|
||||
python scripts/preload_models.py
|
||||
|
||||
echo "Press any key to continue"
|
||||
pause
|
||||
exit 0
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user