Compare commits

...

6 Commits
2.7.1 ... 2.7.5

Author SHA1 Message Date
DominikDoom
b28497764f Check keywords for .pt and .ckpt loras too
Especially for custom keywords, the preset list mostly uses safetensors
2023-07-23 11:27:02 +02:00
DominikDoom
0d9d5f1e44 Safety check & remove log 2023-07-23 11:08:29 +02:00
DominikDoom
de3380818e Quote lora filenames to handle commas in filenames
Fixes #206
2023-07-23 11:05:44 +02:00
DominikDoom
acb85d7bb1 Make sure both temp folders exist 2023-07-23 09:01:26 +02:00
DominikDoom
39ea33be9f Fix encoding for load too
Fixes #204
2023-07-22 21:15:44 +02:00
DominikDoom
1cac893e63 Create temp folder first before touching if it doesn't exist
Fixes #203
2023-07-22 20:46:10 +02:00
7 changed files with 48 additions and 31 deletions

View File

@@ -29,9 +29,9 @@ class LoraParser extends BaseTagParser {
async function load() {
if (loras.length === 0) {
try {
loras = (await readFile(`${tagBasePath}/temp/lora.txt`)).split("\n")
.filter(x => x.trim().length > 0) // Remove empty lines
.map(x => x.trim().split(",")); // Remove carriage returns and padding if it exists, split into name, hash pairs
loras = (await loadCSV(`${tagBasePath}/temp/lora.txt`))
.filter(x => x[0]?.trim().length > 0) // Remove empty lines
.map(x => [x[0]?.trim(), x[1]]); // Trim filenames and return the name, hash pairs
} catch (e) {
console.error("Error loading lora.txt: " + e);
}

View File

@@ -29,9 +29,9 @@ class LycoParser extends BaseTagParser {
async function load() {
if (lycos.length === 0) {
try {
lycos = (await readFile(`${tagBasePath}/temp/lyco.txt`)).split("\n")
.filter(x => x.trim().length > 0) // Remove empty lines
.map(x => x.trim().split(",")); // Remove carriage returns and padding if it exists, split into name, hash pairs
lycos = (await loadCSV(`${tagBasePath}/temp/lyco.txt`))
.filter(x => x[0]?.trim().length > 0) // Remove empty lines
.map(x => [x[0]?.trim(), x[1]]); // Trim filenames and return the name, hash pairs
} catch (e) {
console.error("Error loading lyco.txt: " + e);
}

View File

@@ -5,21 +5,20 @@ async function load() {
if (modelKeywordPath.length > 0 && modelKeywordDict.size === 0) {
try {
let lines = [];
let csv_lines = [];
// Only add default keywords if wanted by the user
if (TAC_CFG.modelKeywordCompletion !== "Only user list")
lines = (await readFile(`${modelKeywordPath}/lora-keyword.txt`)).split("\n");
csv_lines = (await loadCSV(`${modelKeywordPath}/lora-keyword.txt`));
// Add custom user keywords if the file exists
if (customFileExists)
lines = lines.concat((await readFile(`${modelKeywordPath}/lora-keyword-user.txt`)).split("\n"));
csv_lines = csv_lines.concat((await loadCSV(`${modelKeywordPath}/lora-keyword-user.txt`)));
if (lines.length === 0) return;
if (csv_lines.length === 0) return;
csv_lines = csv_lines.filter(x => x[0].trim().length > 0 && x[0].trim()[0] !== "#") // Remove empty lines and comments
lines = lines.filter(x => x.trim().length > 0 && x.trim()[0] !== "#") // Remove empty lines and comments
// Add to the dict
lines.forEach(line => {
const parts = line.split(",");
csv_lines.forEach(parts => {
const hash = parts[0];
const keywords = parts[1].replaceAll("| ", ", ").replaceAll("|", ", ").trim();
const lastSepIndex = parts[2]?.lastIndexOf("/") + 1 || parts[2]?.lastIndexOf("\\") + 1 || 0;

View File

@@ -449,12 +449,18 @@ async function insertTextAtCursor(textArea, result, tagword, tabCompletedWithout
if (result.hash && result.hash !== "NOFILE" && result.hash.length > 0) {
let keywords = null;
let nameDict = modelKeywordDict.get(result.hash);
let name = result.text + ".safetensors";
let names = [result.text + ".safetensors", result.text + ".pt", result.text + ".ckpt"];
if (nameDict) {
if (nameDict.has(name))
keywords = nameDict.get(name);
else
let found = false;
names.forEach(name => {
if (!found && nameDict.has(name)) {
found = true;
keywords = nameDict.get(name);
}
});
if (!found)
keywords = nameDict.get("none");
}

View File

@@ -1,5 +1,6 @@
# This file provides support for the model-keyword extension to add known lora keywords on completion
import csv
import hashlib
from pathlib import Path
@@ -15,9 +16,12 @@ hash_dict = {}
def load_hash_cache():
with open(known_hashes_file, "r") as file:
for line in file:
name, hash, mtime = line.replace("\n", "").split(",")
with open(known_hashes_file, "r", encoding="utf-8") as file:
reader = csv.reader(
file.readlines(), delimiter=",", quotechar='"', skipinitialspace=True
)
for line in reader:
name, hash, mtime = line
hash_dict[name] = (hash, mtime)
@@ -26,7 +30,7 @@ def update_hash_cache():
if file_needs_update:
with open(known_hashes_file, "w", encoding="utf-8") as file:
for name, (hash, mtime) in hash_dict.items():
file.write(f"{name},{hash},{mtime}\n")
file.write(f'"{name}",{hash},{mtime}\n')
# Copy of the fast inaccurate hash function from the extension

View File

@@ -13,13 +13,13 @@ except ImportError:
# Webui root path
FILE_DIR = Path().absolute()
# The extension base path
EXT_PATH = FILE_DIR.joinpath('extensions')
EXT_PATH = FILE_DIR.joinpath("extensions")
# Tags base path
TAGS_PATH = Path(scripts.basedir()).joinpath('tags')
TAGS_PATH = Path(scripts.basedir()).joinpath("tags")
# The path to the folder containing the wildcards and embeddings
WILDCARD_PATH = FILE_DIR.joinpath('scripts/wildcards')
WILDCARD_PATH = FILE_DIR.joinpath("scripts/wildcards")
EMB_PATH = Path(shared.cmd_opts.embeddings_dir)
HYP_PATH = Path(shared.cmd_opts.hypernetwork_dir)
@@ -27,15 +27,16 @@ try:
LORA_PATH = Path(shared.cmd_opts.lora_dir)
except AttributeError:
LORA_PATH = None
try:
LYCO_PATH = Path(shared.cmd_opts.lyco_dir)
except AttributeError:
LYCO_PATH = None
def find_ext_wildcard_paths():
"""Returns the path to the extension wildcards folder"""
found = list(EXT_PATH.glob('*/wildcards/'))
found = list(EXT_PATH.glob("*/wildcards/"))
return found
@@ -43,5 +44,12 @@ def find_ext_wildcard_paths():
WILDCARD_EXT_PATHS = find_ext_wildcard_paths()
# The path to the temporary files
STATIC_TEMP_PATH = FILE_DIR.joinpath('tmp') # In the webui root, on windows it exists by default, on linux it doesn't
TEMP_PATH = TAGS_PATH.joinpath('temp') # Extension specific temp files
# In the webui root, on windows it exists by default, on linux it doesn't
STATIC_TEMP_PATH = FILE_DIR.joinpath("tmp")
TEMP_PATH = TAGS_PATH.joinpath("temp") # Extension specific temp files
# Make sure these folders exist
if not TEMP_PATH.exists():
TEMP_PATH.mkdir()
if not STATIC_TEMP_PATH.exists():
STATIC_TEMP_PATH.mkdir()

View File

@@ -151,7 +151,7 @@ def get_lora():
# Sort
sorted_loras = dict(sorted(hashes.items()))
# Add hashes and return
return [f"{name},{hash}" for name, hash in sorted_loras.items()]
return [f"\"{name}\",{hash}" for name, hash in sorted_loras.items()]
def get_lyco():
@@ -170,7 +170,7 @@ def get_lyco():
# Sort
sorted_lycos = dict(sorted(hashes.items()))
# Add hashes and return
return [f"{name},{hash}" for name, hash in sorted_lycos.items()]
return [f"\"{name}\",{hash}" for name, hash in sorted_lycos.items()]
def write_tag_base_path():