Merge branch 'spezialspezial-patch-9' into development

This commit is contained in:
Lincoln Stein
2022-11-02 18:35:00 -04:00
21 changed files with 2768 additions and 136 deletions

View File

@ -14,18 +14,21 @@ import warnings
from urllib import request
from tqdm import tqdm
from omegaconf import OmegaConf
from huggingface_hub import HfFolder, hf_hub_url
from pathlib import Path
from getpass_asterisk import getpass_asterisk
import traceback
import getpass
import requests
import clip
import transformers
import torch
transformers.logging.set_verbosity_error()
# deferred loading so that help message can be printed quickly
def load_libs():
pass
import warnings
warnings.filterwarnings('ignore')
#warnings.simplefilter('ignore')
#warnings.filterwarnings('ignore',category=DeprecationWarning)
#warnings.filterwarnings('ignore',category=UserWarning)
#--------------------------globals--
Model_dir = './models/ldm/stable-diffusion-v1/'
@ -98,7 +101,7 @@ this program and resume later.\n'''
#--------------------------------------------
def postscript():
print(
'''You're all set! You may now launch InvokeAI using one of these two commands:
'''\n** Model Installation Successful **\nYou're all set! You may now launch InvokeAI using one of these two commands:
Web version:
python scripts/invoke.py --web (connect to http://localhost:9090)
@ -220,10 +223,18 @@ This involves a few easy steps.
'''
)
input('Press <enter> when you are ready to continue:')
<<<<<<< HEAD
from huggingface_hub import HfFolder
=======
print('(Fetching Hugging Face token from cache...',end='')
>>>>>>> spezialspezial-patch-9
access_token = HfFolder.get_token()
if access_token is not None:
print('found')
if access_token is None:
print('not found')
print('''
4. Thank you! The last step is to enter your HuggingFace access token so that
this script is authorized to initiate the download. Go to the access tokens
@ -237,8 +248,7 @@ This involves a few easy steps.
Now copy the token to your clipboard and paste it here: '''
)
access_token = getpass.getpass()
HfFolder.save_token(access_token)
access_token = getpass_asterisk.getpass_asterisk()
return access_token
#---------------------------------------------
@ -268,14 +278,26 @@ def download_weight_datasets(models:dict, access_token:str):
)
if success:
successful[mod] = True
if len(successful) < len(models):
print(f'\n\n** There were errors downloading one or more files. **')
print('Please double-check your license agreements, and your access token.')
HfFolder.delete_token()
print('Press any key to try again. Type ^C to quit.\n')
input()
return None
HfFolder.save_token(access_token)
keys = ', '.join(successful.keys())
print(f'Successfully installed {keys}')
return successful
#---------------------------------------------
def download_with_resume(repo_id:str, model_name:str, access_token:str)->bool:
<<<<<<< HEAD
from huggingface_hub import hf_hub_url
=======
>>>>>>> spezialspezial-patch-9
model_dest = os.path.join(Model_dir, model_name)
os.makedirs(os.path.dirname(model_dest), exist_ok=True)
url = hf_hub_url(repo_id, model_name)
@ -295,6 +317,8 @@ def download_with_resume(repo_id:str, model_name:str, access_token:str)->bool:
if resp.status_code==416: # "range not satisfiable", which means nothing to return
print(f'* {model_name}: complete file found. Skipping.')
return True
elif resp.status_code != 200:
print(f'** An error occurred during downloading {model_name}: {resp.reason}')
elif exist_size > 0:
print(f'* {model_name}: partial file found. Resuming...')
else:
@ -302,7 +326,7 @@ def download_with_resume(repo_id:str, model_name:str, access_token:str)->bool:
try:
if total < 2000:
print(f'* {model_name}: {resp.text}')
print(f'*** ERROR DOWNLOADING {model_name}: {resp.text}')
return False
with open(model_dest, open_mode) as file, tqdm(
@ -329,7 +353,11 @@ def update_config_file(successfully_downloaded:dict,opt:dict):
try:
if os.path.exists(Config_file):
<<<<<<< HEAD
print(f'** {Config_file} exists. Renaming to {Config_file}.orig')
=======
print(f'* {Config_file} exists. Renaming to {Config_file}.orig')
>>>>>>> spezialspezial-patch-9
os.rename(Config_file,f'{Config_file}.orig')
tmpfile = os.path.join(os.path.dirname(Config_file),'new_config.tmp')
with open(tmpfile, 'w') as outfile:
@ -383,26 +411,35 @@ def new_config_file_contents(successfully_downloaded:dict, Config_file:str)->str
# this will preload the Bert tokenizer fles
def download_bert():
print('Installing bert tokenizer (ignore deprecation errors)...', end='')
<<<<<<< HEAD
from transformers import BertTokenizerFast, AutoFeatureExtractor
=======
sys.stdout.flush()
>>>>>>> spezialspezial-patch-9
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
from transformers import BertTokenizerFast, AutoFeatureExtractor
tokenizer = BertTokenizerFast.from_pretrained('bert-base-uncased')
print('...success')
sys.stdout.flush()
#---------------------------------------------
# this will download requirements for Kornia
def download_kornia():
print('Installing Kornia requirements...', end='')
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
import kornia
print('Installing Kornia requirements (ignore deprecation errors)...', end='')
sys.stdout.flush()
import kornia
print('...success')
#---------------------------------------------
def download_clip():
print('Loading CLIP model...',end='')
<<<<<<< HEAD
from transformers import CLIPTokenizer, CLIPTextModel
=======
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
from transformers import CLIPTokenizer, CLIPTextModel
>>>>>>> spezialspezial-patch-9
sys.stdout.flush()
version = 'openai/clip-vit-large-patch14'
tokenizer = CLIPTokenizer.from_pretrained(version)
@ -531,7 +568,6 @@ if __name__ == '__main__':
default='./configs/models.yaml',
help='path to configuration file to create')
opt = parser.parse_args()
load_libs()
try:
if opt.interactive: