mirror of
https://github.com/DominikDoom/a1111-sd-webui-tagcomplete.git
synced 2026-01-26 19:19:57 +00:00
@@ -29,9 +29,9 @@ class LoraParser extends BaseTagParser {
|
||||
async function load() {
|
||||
if (loras.length === 0) {
|
||||
try {
|
||||
loras = (await readFile(`${tagBasePath}/temp/lora.txt`)).split("\n")
|
||||
.filter(x => x.trim().length > 0) // Remove empty lines
|
||||
.map(x => x.trim().split(",")); // Remove carriage returns and padding if it exists, split into name, hash pairs
|
||||
loras = (await loadCSV(`${tagBasePath}/temp/lora.txt`))
|
||||
.filter(x => x[0].trim().length > 0) // Remove empty lines
|
||||
.map(x => [x[0].trim(), x[1]]); // Remove carriage returns and padding if it exists, split into name, hash pairs
|
||||
} catch (e) {
|
||||
console.error("Error loading lora.txt: " + e);
|
||||
}
|
||||
|
||||
@@ -29,9 +29,9 @@ class LycoParser extends BaseTagParser {
|
||||
async function load() {
|
||||
if (lycos.length === 0) {
|
||||
try {
|
||||
lycos = (await readFile(`${tagBasePath}/temp/lyco.txt`)).split("\n")
|
||||
.filter(x => x.trim().length > 0) // Remove empty lines
|
||||
.map(x => x.trim().split(",")); // Remove carriage returns and padding if it exists, split into name, hash pairs
|
||||
lycos = (await loadCSV(`${tagBasePath}/temp/lyco.txt`))
|
||||
.filter(x => x[0].trim().length > 0) // Remove empty lines
|
||||
.map(x => [x[0].trim(), x[1]]); // Remove carriage returns and padding if it exists, split into name, hash pairs
|
||||
} catch (e) {
|
||||
console.error("Error loading lyco.txt: " + e);
|
||||
}
|
||||
|
||||
@@ -5,21 +5,21 @@ async function load() {
|
||||
|
||||
if (modelKeywordPath.length > 0 && modelKeywordDict.size === 0) {
|
||||
try {
|
||||
let lines = [];
|
||||
let csv_lines = [];
|
||||
// Only add default keywords if wanted by the user
|
||||
if (TAC_CFG.modelKeywordCompletion !== "Only user list")
|
||||
lines = (await readFile(`${modelKeywordPath}/lora-keyword.txt`)).split("\n");
|
||||
csv_lines = (await loadCSV(`${modelKeywordPath}/lora-keyword.txt`));
|
||||
// Add custom user keywords if the file exists
|
||||
if (customFileExists)
|
||||
lines = lines.concat((await readFile(`${modelKeywordPath}/lora-keyword-user.txt`)).split("\n"));
|
||||
csv_lines = csv_lines.concat((await loadCSV(`${modelKeywordPath}/lora-keyword-user.txt`)));
|
||||
|
||||
if (lines.length === 0) return;
|
||||
if (csv_lines.length === 0) return;
|
||||
|
||||
csv_lines = csv_lines.filter(x => x[0].trim().length > 0 && x[0].trim()[0] !== "#") // Remove empty lines and comments
|
||||
console.log(csv_lines)
|
||||
|
||||
lines = lines.filter(x => x.trim().length > 0 && x.trim()[0] !== "#") // Remove empty lines and comments
|
||||
|
||||
// Add to the dict
|
||||
lines.forEach(line => {
|
||||
const parts = line.split(",");
|
||||
csv_lines.forEach(parts => {
|
||||
const hash = parts[0];
|
||||
const keywords = parts[1].replaceAll("| ", ", ").replaceAll("|", ", ").trim();
|
||||
const lastSepIndex = parts[2]?.lastIndexOf("/") + 1 || parts[2]?.lastIndexOf("\\") + 1 || 0;
|
||||
|
||||
Reference in New Issue
Block a user