mirror of
https://github.com/DominikDoom/a1111-sd-webui-tagcomplete.git
synced 2026-01-27 03:29:55 +00:00
Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b28497764f | ||
|
|
0d9d5f1e44 | ||
|
|
de3380818e | ||
|
|
acb85d7bb1 | ||
|
|
39ea33be9f | ||
|
|
1cac893e63 | ||
|
|
94823b871c | ||
|
|
599ff8a6f2 | ||
|
|
6893113e0b |
@@ -136,7 +136,7 @@ To add custom mappings for unknown Loras, you can use the UI provided by model-k
|
||||
The only issue is that it has no official support for the Lycoris extension and doesn't scan its folder for files, so to add them through the UI you will have to temporarily move them into the Lora model folder to be able to select them in model-keywords dropdown.
|
||||
Some are already included in the default list though, so trying it out first is advisable.
|
||||
<details>
|
||||
<summary>Walkthorugh to add custom keywords</summary>
|
||||
<summary>Walkthrough to add custom keywords</summary>
|
||||
|
||||

|
||||
</details>
|
||||
|
||||
@@ -29,9 +29,9 @@ class LoraParser extends BaseTagParser {
|
||||
async function load() {
|
||||
if (loras.length === 0) {
|
||||
try {
|
||||
loras = (await readFile(`${tagBasePath}/temp/lora.txt`)).split("\n")
|
||||
.filter(x => x.trim().length > 0) // Remove empty lines
|
||||
.map(x => x.trim().split(",")); // Remove carriage returns and padding if it exists, split into name, hash pairs
|
||||
loras = (await loadCSV(`${tagBasePath}/temp/lora.txt`))
|
||||
.filter(x => x[0]?.trim().length > 0) // Remove empty lines
|
||||
.map(x => [x[0]?.trim(), x[1]]); // Trim filenames and return the name, hash pairs
|
||||
} catch (e) {
|
||||
console.error("Error loading lora.txt: " + e);
|
||||
}
|
||||
|
||||
@@ -29,9 +29,9 @@ class LycoParser extends BaseTagParser {
|
||||
async function load() {
|
||||
if (lycos.length === 0) {
|
||||
try {
|
||||
lycos = (await readFile(`${tagBasePath}/temp/lyco.txt`)).split("\n")
|
||||
.filter(x => x.trim().length > 0) // Remove empty lines
|
||||
.map(x => x.trim().split(",")); // Remove carriage returns and padding if it exists, split into name, hash pairs
|
||||
lycos = (await loadCSV(`${tagBasePath}/temp/lyco.txt`))
|
||||
.filter(x => x[0]?.trim().length > 0) // Remove empty lines
|
||||
.map(x => [x[0]?.trim(), x[1]]); // Trim filenames and return the name, hash pairs
|
||||
} catch (e) {
|
||||
console.error("Error loading lyco.txt: " + e);
|
||||
}
|
||||
|
||||
@@ -5,21 +5,20 @@ async function load() {
|
||||
|
||||
if (modelKeywordPath.length > 0 && modelKeywordDict.size === 0) {
|
||||
try {
|
||||
let lines = [];
|
||||
let csv_lines = [];
|
||||
// Only add default keywords if wanted by the user
|
||||
if (TAC_CFG.modelKeywordCompletion !== "Only user list")
|
||||
lines = (await readFile(`${modelKeywordPath}/lora-keyword.txt`)).split("\n");
|
||||
csv_lines = (await loadCSV(`${modelKeywordPath}/lora-keyword.txt`));
|
||||
// Add custom user keywords if the file exists
|
||||
if (customFileExists)
|
||||
lines = lines.concat((await readFile(`${modelKeywordPath}/lora-keyword-user.txt`)).split("\n"));
|
||||
csv_lines = csv_lines.concat((await loadCSV(`${modelKeywordPath}/lora-keyword-user.txt`)));
|
||||
|
||||
if (lines.length === 0) return;
|
||||
if (csv_lines.length === 0) return;
|
||||
|
||||
csv_lines = csv_lines.filter(x => x[0].trim().length > 0 && x[0].trim()[0] !== "#") // Remove empty lines and comments
|
||||
|
||||
lines = lines.filter(x => x.trim().length > 0 && x.trim()[0] !== "#") // Remove empty lines and comments
|
||||
|
||||
// Add to the dict
|
||||
lines.forEach(line => {
|
||||
const parts = line.split(",");
|
||||
csv_lines.forEach(parts => {
|
||||
const hash = parts[0];
|
||||
const keywords = parts[1].replaceAll("| ", ", ").replaceAll("|", ", ").trim();
|
||||
const lastSepIndex = parts[2]?.lastIndexOf("/") + 1 || parts[2]?.lastIndexOf("\\") + 1 || 0;
|
||||
|
||||
@@ -449,12 +449,18 @@ async function insertTextAtCursor(textArea, result, tagword, tabCompletedWithout
|
||||
if (result.hash && result.hash !== "NOFILE" && result.hash.length > 0) {
|
||||
let keywords = null;
|
||||
let nameDict = modelKeywordDict.get(result.hash);
|
||||
let name = result.text + ".safetensors";
|
||||
let names = [result.text + ".safetensors", result.text + ".pt", result.text + ".ckpt"];
|
||||
|
||||
if (nameDict) {
|
||||
if (nameDict.has(name))
|
||||
keywords = nameDict.get(name);
|
||||
else
|
||||
let found = false;
|
||||
names.forEach(name => {
|
||||
if (!found && nameDict.has(name)) {
|
||||
found = true;
|
||||
keywords = nameDict.get(name);
|
||||
}
|
||||
});
|
||||
|
||||
if (!found)
|
||||
keywords = nameDict.get("none");
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# This file provides support for the model-keyword extension to add known lora keywords on completion
|
||||
|
||||
import csv
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
|
||||
@@ -15,18 +16,21 @@ hash_dict = {}
|
||||
|
||||
|
||||
def load_hash_cache():
|
||||
with open(known_hashes_file, "r") as file:
|
||||
for line in file:
|
||||
name, hash, mtime = line.replace("\n", "").split(",")
|
||||
with open(known_hashes_file, "r", encoding="utf-8") as file:
|
||||
reader = csv.reader(
|
||||
file.readlines(), delimiter=",", quotechar='"', skipinitialspace=True
|
||||
)
|
||||
for line in reader:
|
||||
name, hash, mtime = line
|
||||
hash_dict[name] = (hash, mtime)
|
||||
|
||||
|
||||
def update_hash_cache():
|
||||
global file_needs_update
|
||||
if file_needs_update:
|
||||
with open(known_hashes_file, "w") as file:
|
||||
with open(known_hashes_file, "w", encoding="utf-8") as file:
|
||||
for name, (hash, mtime) in hash_dict.items():
|
||||
file.write(f"{name},{hash},{mtime}\n")
|
||||
file.write(f'"{name}",{hash},{mtime}\n')
|
||||
|
||||
|
||||
# Copy of the fast inaccurate hash function from the extension
|
||||
|
||||
@@ -13,13 +13,13 @@ except ImportError:
|
||||
# Webui root path
|
||||
FILE_DIR = Path().absolute()
|
||||
# The extension base path
|
||||
EXT_PATH = FILE_DIR.joinpath('extensions')
|
||||
EXT_PATH = FILE_DIR.joinpath("extensions")
|
||||
|
||||
# Tags base path
|
||||
TAGS_PATH = Path(scripts.basedir()).joinpath('tags')
|
||||
TAGS_PATH = Path(scripts.basedir()).joinpath("tags")
|
||||
|
||||
# The path to the folder containing the wildcards and embeddings
|
||||
WILDCARD_PATH = FILE_DIR.joinpath('scripts/wildcards')
|
||||
WILDCARD_PATH = FILE_DIR.joinpath("scripts/wildcards")
|
||||
EMB_PATH = Path(shared.cmd_opts.embeddings_dir)
|
||||
HYP_PATH = Path(shared.cmd_opts.hypernetwork_dir)
|
||||
|
||||
@@ -27,15 +27,16 @@ try:
|
||||
LORA_PATH = Path(shared.cmd_opts.lora_dir)
|
||||
except AttributeError:
|
||||
LORA_PATH = None
|
||||
|
||||
|
||||
try:
|
||||
LYCO_PATH = Path(shared.cmd_opts.lyco_dir)
|
||||
except AttributeError:
|
||||
LYCO_PATH = None
|
||||
|
||||
|
||||
def find_ext_wildcard_paths():
|
||||
"""Returns the path to the extension wildcards folder"""
|
||||
found = list(EXT_PATH.glob('*/wildcards/'))
|
||||
found = list(EXT_PATH.glob("*/wildcards/"))
|
||||
return found
|
||||
|
||||
|
||||
@@ -43,5 +44,12 @@ def find_ext_wildcard_paths():
|
||||
WILDCARD_EXT_PATHS = find_ext_wildcard_paths()
|
||||
|
||||
# The path to the temporary files
|
||||
STATIC_TEMP_PATH = FILE_DIR.joinpath('tmp') # In the webui root, on windows it exists by default, on linux it doesn't
|
||||
TEMP_PATH = TAGS_PATH.joinpath('temp') # Extension specific temp files
|
||||
# In the webui root, on windows it exists by default, on linux it doesn't
|
||||
STATIC_TEMP_PATH = FILE_DIR.joinpath("tmp")
|
||||
TEMP_PATH = TAGS_PATH.joinpath("temp") # Extension specific temp files
|
||||
|
||||
# Make sure these folders exist
|
||||
if not TEMP_PATH.exists():
|
||||
TEMP_PATH.mkdir()
|
||||
if not STATIC_TEMP_PATH.exists():
|
||||
STATIC_TEMP_PATH.mkdir()
|
||||
|
||||
@@ -151,7 +151,7 @@ def get_lora():
|
||||
# Sort
|
||||
sorted_loras = dict(sorted(hashes.items()))
|
||||
# Add hashes and return
|
||||
return [f"{name},{hash}" for name, hash in sorted_loras.items()]
|
||||
return [f"\"{name}\",{hash}" for name, hash in sorted_loras.items()]
|
||||
|
||||
|
||||
def get_lyco():
|
||||
@@ -170,7 +170,7 @@ def get_lyco():
|
||||
# Sort
|
||||
sorted_lycos = dict(sorted(hashes.items()))
|
||||
# Add hashes and return
|
||||
return [f"{name},{hash}" for name, hash in sorted_lycos.items()]
|
||||
return [f"\"{name}\",{hash}" for name, hash in sorted_lycos.items()]
|
||||
|
||||
|
||||
def write_tag_base_path():
|
||||
@@ -263,15 +263,19 @@ def write_temp_files():
|
||||
if model_keyword_installed:
|
||||
load_hash_cache()
|
||||
|
||||
if LORA_PATH is not None and LORA_PATH.exists():
|
||||
lora_exists = LORA_PATH is not None and LORA_PATH.exists()
|
||||
if lora_exists:
|
||||
lora = get_lora()
|
||||
if lora:
|
||||
write_to_temp_file('lora.txt', lora)
|
||||
|
||||
if LYCO_PATH is not None and LYCO_PATH.exists():
|
||||
|
||||
lyco_exists = LYCO_PATH is not None and LYCO_PATH.exists()
|
||||
if lyco_exists and not (lora_exists and LYCO_PATH.samefile(LORA_PATH)):
|
||||
lyco = get_lyco()
|
||||
if lyco:
|
||||
write_to_temp_file('lyco.txt', lyco)
|
||||
elif lyco_exists and lora_exists and LYCO_PATH.samefile(LORA_PATH):
|
||||
print("tag_autocomplete_helper: LyCORIS path is the same as LORA path, skipping")
|
||||
|
||||
if model_keyword_installed:
|
||||
update_hash_cache()
|
||||
|
||||
Reference in New Issue
Block a user