forked from AUTOMATIC1111/stable-diffusion-webui
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathaurobit_init.py
More file actions
200 lines (160 loc) · 8.5 KB
/
aurobit_init.py
File metadata and controls
200 lines (160 loc) · 8.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
import urllib.request
import os
import argparse
import shutil
import time
from git import repo
# parse arguments
parser = argparse.ArgumentParser()
parser.add_argument('-cache', default='yes', help="This is argument 1")
parser.add_argument('-s3', default='yes', help="This is argument 2")
args = parser.parse_args()
cache_resource = True if args.cache == 'yes' else False
use_s3 = True if args.s3 == 'yes' else False
print(f"Cache resource: {cache_resource}")
print(f"Use S3: {use_s3}")
cache_dir = "/workspace/sd_resource"
failed_retry = 5
resource_path = {
"tencent": {
"sd_models": [
"https://aiyo-1319341997.cos.ap-nanjing.myqcloud.com/common_resource/sd_models/beautifulRealistic_brav5.safetensors",
"https://aiyo-1319341997.cos.ap-nanjing.myqcloud.com/common_resource/sd_models/majicmixRealistic_v5.safetensors",
"https://aiyo-1319341997.cos.ap-nanjing.myqcloud.com/common_resource/sd_models/realisticVisionV51_v51VAE-inpainting.safetensors"
],
"sd_lora": ["https://aiyo-1319341997.cos.ap-nanjing.myqcloud.com/common_resource/sd_lora/meiyan_V1.safetensors"],
"controlnet_models": [
"https://aiyo-1319341997.cos.ap-nanjing.myqcloud.com/common_resource/controlnet_models/control_v11f1p_sd15_depth.pth",
"https://aiyo-1319341997.cos.ap-nanjing.myqcloud.com/common_resource/controlnet_models/control_v11p_sd15_canny.pth"
],
"ad_ext_models": [
"https://aiyo-1319341997.cos.ap-nanjing.myqcloud.com/common_resource/sd_other/sow_pyramid_a5_e3d2_remapped.pth"
],
"sd_embeddings":["https://aiyo-1319341997.cos.ap-nanjing.myqcloud.com/common_resource/sd_other/AS-YoungV2.pt",
"https://aiyo-1319341997.cos.ap-nanjing.myqcloud.com/common_resource/sd_other/AS-YoungerV2.pt",
"https://aiyo-1319341997.cos.ap-nanjing.myqcloud.com/common_resource/sd_other/AS-YoungestV2.pt"
]
},
"s3": {
"sd_models": [
"https://aurobit-s3-01.s3.ap-northeast-1.amazonaws.com/common_resource/sd/sd_models/beautifulRealistic_brav5.safetensors",
"https://aurobit-s3-01.s3.ap-northeast-1.amazonaws.com/common_resource/sd/sd_models/majicmixRealistic_v5.safetensors",
"https://aurobit-s3-01.s3.ap-northeast-1.amazonaws.com/common_resource/sd/sd_models/realisticVisionV51_v51VAE-inpainting.safetensors"
],
"sd_lora": ["https://aurobit-s3-01.s3.ap-northeast-1.amazonaws.com/common_resource/sd/lora/meiyan_V1.safetensors"],
"controlnet_models": [
"https://aurobit-s3-01.s3.ap-northeast-1.amazonaws.com/common_resource/sd/controlnet/control_v11f1p_sd15_depth.pth",
"https://aurobit-s3-01.s3.ap-northeast-1.amazonaws.com/common_resource/sd/controlnet/control_v11p_sd15_canny.pth"
],
"ad_ext_models": [
"https://aurobit-s3-01.s3.ap-northeast-1.amazonaws.com/common_resource/sd/sd_others/sow_pyramid_a5_e3d2_remapped.pth"
],
"sd_embeddings":["https://aurobit-s3-01.s3.ap-northeast-1.amazonaws.com/common_resource/sd/sd_others/AS-YoungerV2.pt",
"https://aurobit-s3-01.s3.ap-northeast-1.amazonaws.com/common_resource/sd/sd_others/AS-YoungestV2.pt",
"https://aurobit-s3-01.s3.ap-northeast-1.amazonaws.com/common_resource/sd/sd_others/AS-YoungV2.pt"
]
},
}
def ab_download_resource(url, file_dir):
base_name = os.path.basename(url)
# make target dir
if not os.path.exists(file_dir):
os.makedirs(file_dir)
# cache resource
if cache_resource:
# download to cache dir
file_cache_dir = f'{cache_dir}/{file_dir}'
file_cache_path = f'{file_cache_dir}/{base_name}'
if not os.path.exists(file_cache_dir):
os.makedirs(file_cache_dir)
if not os.path.exists(file_cache_path):
_start_time = time.time()
urllib.request.urlretrieve(url, file_cache_path)
_end_time = time.time()
print(f'[Download to cache] {url} -> {file_cache_path}')
print("Elapsed time: {:.2f} seconds".format(_end_time - _start_time))
else:
print(f'[Download to cache] file exists: {file_cache_path}')
# copy from cache
file_path = f'{file_dir}/{base_name}'
if not os.path.exists(file_path):
shutil.copy(file_cache_path, file_path)
print(f'[Copy from cache] {file_cache_path} -> {file_path}')
else:
print(f'[Copy from cache] file exists: {file_path}')
# download directly
else:
file_path = f'{file_dir}/{base_name}'
if not os.path.exists(file_path):
_start_time = time.time()
urllib.request.urlretrieve(url, file_path)
_end_time = time.time()
print(f'[Download directly] {url} -> {file_path}')
print("Elapsed time: {:.2f} seconds".format(_end_time - _start_time))
else:
print(f'[Download directly] file exists: {file_path}')
try_cnt = failed_retry
while try_cnt > 0:
try_cnt = try_cnt - 1
try:
# ==================== prepare extensions ============================
ext_git_path = {
"sd-webui-controlnet": ["https://github.com/Mikubill/sd-webui-controlnet", "c1f3d6f8505074d73b13eaf67f331bf869d7b940"], # controlnet
# "adetailer": "https://github.com/Bing-su/adetailer.git", # adtailer
"adetailer": ["https://github.com/AuroBit/adetailer.git", ""],
# "sd-weibui-inpaint-anything": "https://github.com/Uminosachi/sd-webui-inpaint-anything.git"
# "sd-webui-animatediff": "https://github.com/continue-revolution/sd-webui-animatediff.git"
}
ext_path = 'extensions'
for repo_name, rep_info in ext_git_path.items():
rep_path, rep_cm = rep_info
try:
local_path = f'{ext_path}/{repo_name}'
print(f'Cloning repo from: {rep_path}')
print(f' to: {local_path}')
repo.Repo.clone_from(rep_path, local_path)
if rep_cm is not None and rep_cm != "":
cur_rep = repo.Repo(local_path)
new_branch = cur_rep.create_head("aurobit_work", rep_cm)
cur_rep.head.reference = new_branch
cur_rep.head.reset(index=True, working_tree=True)
except Exception as e:
print(f'Clone {rep_path} FAIL. repo exist or network error.')
print(e.args)
print(f'Clone repo DONE: {rep_path}')
# ====================== prepare resource =================================
all_resource = resource_path["s3"] if use_s3 else resource_path["tencent"]
sd_path = 'models/Stable-diffusion'
sd_lora_path = 'models/Lora'
sd_embedding_path = 'embeddings'
control_net_dir = f'{ext_path}/sd-webui-controlnet/models'
ad_rel_model_dir = f"models/adetailer"
# download models
print(f'************ Download SD models ********************')
sd_models = all_resource["sd_models"]
for model_file in sd_models:
ab_download_resource(model_file, sd_path)
# download loras
print(f'************* Download Lora ******************')
sd_lora = all_resource["sd_lora"]
for model_file in sd_lora:
ab_download_resource(model_file, sd_lora_path)
# download embeddings
print(f'************* Download Embeddings ******************')
sd_embeddings = all_resource["sd_embeddings"]
for embedding in sd_embeddings:
ab_download_resource(embedding, sd_embedding_path)
# download control net model
print(f'************* Download controlnet models ******************')
controlnet_models = all_resource["controlnet_models"]
if os.path.exists(control_net_dir):
for model_file in controlnet_models:
ab_download_resource(model_file, control_net_dir)
# download ad-relative models
print(f'************* Download ad-relative models ******************')
ad_rel_models = all_resource["ad_ext_models"]
for md_url in ad_rel_models:
ab_download_resource(md_url, ad_rel_model_dir)
break
except Exception as e:
print(f"Exception: {e}")