Merge branch 'main' into feature-sort-by-frequent-use

This commit is contained in:
DominikDoom
2023-11-05 13:30:51 +01:00
2 changed files with 23 additions and 17 deletions

View File

@@ -77,7 +77,8 @@ def get_wildcards():
wildcard_files = list(WILDCARD_PATH.rglob("*.txt"))
resolved = [(w, w.relative_to(WILDCARD_PATH).as_posix())
for w in wildcard_files
if w.name != "put wildcards here.txt"]
if w.name != "put wildcards here.txt"
and w.is_file()]
return sort_models(resolved, name_has_subpath=True)
@@ -89,7 +90,8 @@ def get_ext_wildcards():
wildcard_files.append(path.as_posix())
resolved = [(w, w.relative_to(path).as_posix())
for w in path.rglob("*.txt")
if w.name != "put wildcards here.txt"]
if w.name != "put wildcards here.txt"
and w.is_file()]
wildcard_files.extend(sort_models(resolved, name_has_subpath=True))
wildcard_files.append("-----")
@@ -130,8 +132,8 @@ def get_yaml_wildcards():
"""Returns a list of all tags found in extension YAML files found under a Tags: key."""
yaml_files = []
for path in WILDCARD_EXT_PATHS:
yaml_files.extend(p for p in path.rglob("*.yml"))
yaml_files.extend(p for p in path.rglob("*.yaml"))
yaml_files.extend(p for p in path.rglob("*.yml") if p.is_file())
yaml_files.extend(p for p in path.rglob("*.yaml") if p.is_file())
yaml_wildcards = {}
@@ -198,7 +200,7 @@ def get_embeddings(sd_model):
except AttributeError:
print("tag_autocomplete_helper: Old webui version or unrecognized model shape, using fallback for embedding completion.")
# Get a list of all embeddings in the folder
all_embeds = [str(e.relative_to(EMB_PATH)) for e in EMB_PATH.rglob("*") if e.suffix in {".bin", ".pt", ".png",'.webp', '.jxl', '.avif'}]
all_embeds = [str(e.relative_to(EMB_PATH)) for e in EMB_PATH.rglob("*") if e.suffix in {".bin", ".pt", ".png",'.webp', '.jxl', '.avif'} and e.is_file()]
# Remove files with a size of 0
all_embeds = [e for e in all_embeds if EMB_PATH.joinpath(e).stat().st_size > 0]
# Remove file extensions
@@ -212,7 +214,7 @@ def get_hypernetworks():
# Get a list of all hypernetworks in the folder
hyp_paths = [Path(h) for h in glob.glob(HYP_PATH.joinpath("**/*").as_posix(), recursive=True)]
all_hypernetworks = [(h, h.stem) for h in hyp_paths if h.suffix in {".pt"}]
all_hypernetworks = [(h, h.stem) for h in hyp_paths if h.suffix in {".pt"} and h.is_file()]
return sort_models(all_hypernetworks)
model_keyword_installed = write_model_keyword_path()
@@ -223,7 +225,7 @@ def get_lora():
# Get a list of all lora in the folder
lora_paths = [Path(l) for l in glob.glob(LORA_PATH.joinpath("**/*").as_posix(), recursive=True)]
# Get hashes
valid_loras = [lf for lf in lora_paths if lf.suffix in {".safetensors", ".ckpt", ".pt"}]
valid_loras = [lf for lf in lora_paths if lf.suffix in {".safetensors", ".ckpt", ".pt"} and lf.is_file()]
loras_with_hash = []
for l in valid_loras:
name = l.relative_to(LORA_PATH).as_posix()
@@ -243,7 +245,7 @@ def get_lyco():
lyco_paths = [Path(ly) for ly in glob.glob(LYCO_PATH.joinpath("**/*").as_posix(), recursive=True)]
# Get hashes
valid_lycos = [lyf for lyf in lyco_paths if lyf.suffix in {".safetensors", ".ckpt", ".pt"}]
valid_lycos = [lyf for lyf in lyco_paths if lyf.suffix in {".safetensors", ".ckpt", ".pt"} and lyf.is_file()]
lycos_with_hash = []
for ly in valid_lycos:
name = ly.relative_to(LYCO_PATH).as_posix()
@@ -269,19 +271,19 @@ def write_to_temp_file(name, data):
csv_files = []
csv_files_withnone = []
def update_tag_files():
def update_tag_files(*args, **kwargs):
"""Returns a list of all potential tag files"""
global csv_files, csv_files_withnone
files = [str(t.relative_to(TAGS_PATH)) for t in TAGS_PATH.glob("*.csv")]
files = [str(t.relative_to(TAGS_PATH)) for t in TAGS_PATH.glob("*.csv") if t.is_file()]
csv_files = files
csv_files_withnone = ["None"] + files
json_files = []
json_files_withnone = []
def update_json_files():
def update_json_files(*args, **kwargs):
"""Returns a list of all potential json files"""
global json_files, json_files_withnone
files = [str(j.relative_to(TAGS_PATH)) for j in TAGS_PATH.glob("*.json")]
files = [str(j.relative_to(TAGS_PATH)) for j in TAGS_PATH.glob("*.json") if j.is_file()]
json_files = files
json_files_withnone = ["None"] + files
@@ -317,7 +319,7 @@ if EMB_PATH.exists():
# Get embeddings after the model loaded callback
script_callbacks.on_model_loaded(get_embeddings)
def refresh_temp_files():
def refresh_temp_files(*args, **kwargs):
global WILDCARD_EXT_PATHS
WILDCARD_EXT_PATHS = find_ext_wildcard_paths()
load_textual_inversion_embeddings(force_reload = True) # Instant embedding reload.
@@ -507,7 +509,7 @@ def api_tac(_: gr.Blocks, app: FastAPI):
try:
json_candidates = glob.glob(base_path.as_posix() + f"/**/{filename}.json", recursive=True)
if json_candidates is not None and len(json_candidates) > 0:
if json_candidates is not None and len(json_candidates) > 0 and Path(json_candidates[0]).is_file():
return FileResponse(json_candidates[0])
except Exception as e:
return JSONResponse({"error": e}, status_code=500)
@@ -518,7 +520,7 @@ def api_tac(_: gr.Blocks, app: FastAPI):
try:
img_glob = glob.glob(base_path.as_posix() + f"/**/{filename}.*", recursive=True)
img_candidates = [img for img in img_glob if Path(img).suffix in [".png", ".jpg", ".jpeg", ".webp", ".gif"]]
img_candidates = [img for img in img_glob if Path(img).suffix in [".png", ".jpg", ".jpeg", ".webp", ".gif"] and Path(img).is_file()]
if img_candidates is not None and len(img_candidates) > 0:
if blob:
return FileResponse(img_candidates[0])
@@ -542,7 +544,7 @@ def api_tac(_: gr.Blocks, app: FastAPI):
@app.get("/tacapi/v1/lora-cached-hash/{lora_name}")
async def get_lora_cached_hash(lora_name: str):
path_glob = glob.glob(LORA_PATH.as_posix() + f"/**/{lora_name}.*", recursive=True)
paths = [lora for lora in path_glob if Path(lora).suffix in [".safetensors", ".ckpt", ".pt"]]
paths = [lora for lora in path_glob if Path(lora).suffix in [".safetensors", ".ckpt", ".pt"] and Path(lora).is_file()]
if paths is not None and len(paths) > 0:
path = paths[0]
hash = hashes.sha256_from_cache(path, f"lora/{lora_name}", path.endswith(".safetensors"))
@@ -582,7 +584,7 @@ def api_tac(_: gr.Blocks, app: FastAPI):
try:
wildcard_path = base.joinpath(filename)
if wildcard_path.exists():
if wildcard_path.exists() and wildcard_path.is_file():
return FileResponse(wildcard_path)
else:
return JSONResponse({}, status_code=404)