Add testing for ComfyUI examples (#95)

* Add testing for ComfyUI examples

* Remove examples, add test to github action

* Create dir

* Update readme
This commit is contained in:
pythongosssss
2024-07-07 15:38:28 +01:00
committed by GitHub
parent 7dae2eb6ba
commit 32d81c13ad
11 changed files with 414 additions and 150 deletions

View File

@@ -11,3 +11,6 @@ DEV_SERVER_COMFYUI_URL=http://127.0.0.1:8188
# Add `--front-end-root {DEPLOY_COMFY_UI_DIR}/custom_web_versions/main/dev`
# to ComfyUI launch script to serve the custom web version.
DEPLOY_COMFYUI_DIR=/home/ComfyUI/web
# The directory containing the ComfyUI_examples repo used to extract test workflows.
EXAMPLE_REPO_PATH=tests-ui/ComfyUI_examples

View File

@@ -25,6 +25,12 @@ jobs:
id: commit-message
run: echo "::set-output name=message::$(git log -1 --pretty=%B)"
working-directory: ComfyUI_frontend
- name: Checkout ComfyUI_examples
uses: actions/checkout@v4
with:
repository: "comfyanonymous/ComfyUI_examples"
path: "ComfyUI_frontend/tests-ui/ComfyUI_examples"
ref: master
- name: Skip CI
if: contains(steps.commit-message.outputs.message, '[skip ci]')
run: echo "Skipping CI as commit contains '[skip ci]'"
@@ -58,6 +64,7 @@ jobs:
- name: Run UI tests
run: |
npm run test:generate
npm run test:generate:examples
npm test -- --verbose
working-directory: ComfyUI_frontend
- name: Install Playwright Browsers

2
.gitignore vendored
View File

@@ -26,6 +26,8 @@ dist-ssr
# Ignore test data.
tests-ui/data/*
tests-ui/ComfyUI_examples
tests-ui/workflows/examples
# Browser tests
/test-results/

View File

@@ -46,8 +46,10 @@ core extensions will be loaded.
### Test
- `git clone https://github.com/comfyanonymous/ComfyUI_examples.git` to `tests-ui/ComfyUI_examples` or the EXAMPLE_REPO_PATH location specified in .env
- `npm i` to install all dependencies
- `npm run test:generate` to fetch `tests-ui/data/object_info.json`
- `npm run test:generate:examples` to extract the example workflows
- `npm run test` to execute all unit tests.
## Deploy

43
package-lock.json generated
View File

@@ -21,6 +21,7 @@
"@types/node": "^20.14.8",
"babel-plugin-transform-import-meta": "^2.2.1",
"babel-plugin-transform-rename-import": "^2.3.0",
"chalk": "^5.3.0",
"fs-extra": "^11.2.0",
"husky": "^9.0.11",
"identity-obj-proxy": "^3.0.0",
@@ -459,6 +460,20 @@
"node": ">=6.9.0"
}
},
"node_modules/@babel/highlight/node_modules/chalk": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
"integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
"dev": true,
"dependencies": {
"ansi-styles": "^3.2.1",
"escape-string-regexp": "^1.0.5",
"supports-color": "^5.3.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/parser": {
"version": "7.24.7",
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.7.tgz",
@@ -3827,17 +3842,15 @@
]
},
"node_modules/chalk": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
"integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
"version": "5.3.0",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz",
"integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==",
"dev": true,
"dependencies": {
"ansi-styles": "^3.2.1",
"escape-string-regexp": "^1.0.5",
"supports-color": "^5.3.0"
},
"engines": {
"node": ">=4"
"node": "^12.17.0 || ^14.13 || >=16.0.0"
},
"funding": {
"url": "https://github.com/chalk/chalk?sponsor=1"
}
},
"node_modules/char-regex": {
@@ -6977,18 +6990,6 @@
"url": "https://opencollective.com/lint-staged"
}
},
"node_modules/lint-staged/node_modules/chalk": {
"version": "5.3.0",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz",
"integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==",
"dev": true,
"engines": {
"node": "^12.17.0 || ^14.13 || >=16.0.0"
},
"funding": {
"url": "https://github.com/chalk/chalk?sponsor=1"
}
},
"node_modules/lint-staged/node_modules/execa": {
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz",

View File

@@ -11,6 +11,7 @@
"typecheck": "tsc --noEmit",
"format": "prettier --write 'src/**/*.{js,ts,tsx}'",
"test": "npm run build && jest",
"test:generate:examples": "npx tsx tests-ui/extractExamples",
"test:generate": "npx tsx tests-ui/setup",
"test:browser": "npx playwright test",
"prepare": "husky || true",
@@ -24,6 +25,7 @@
"@types/node": "^20.14.8",
"babel-plugin-transform-import-meta": "^2.2.1",
"babel-plugin-transform-rename-import": "^2.3.0",
"chalk": "^5.3.0",
"fs-extra": "^11.2.0",
"husky": "^9.0.11",
"identity-obj-proxy": "^3.0.0",

View File

@@ -0,0 +1,76 @@
export function getFromFlacBuffer(buffer: ArrayBuffer): Record<string, string> {
const dataView = new DataView(buffer);
// Verify the FLAC signature
const signature = String.fromCharCode(...new Uint8Array(buffer, 0, 4));
if (signature !== "fLaC") {
console.error("Not a valid FLAC file");
return;
}
// Parse metadata blocks
let offset = 4;
let vorbisComment = null;
while (offset < dataView.byteLength) {
const isLastBlock = dataView.getUint8(offset) & 0x80;
const blockType = dataView.getUint8(offset) & 0x7f;
const blockSize = dataView.getUint32(offset, false) & 0xffffff;
offset += 4;
if (blockType === 4) {
// Vorbis Comment block type
vorbisComment = parseVorbisComment(
new DataView(buffer, offset, blockSize)
);
}
offset += blockSize;
if (isLastBlock) break;
}
return vorbisComment;
}
export function getFromFlacFile(file: File): Promise<Record<string, string>> {
return new Promise((r) => {
const reader = new FileReader();
reader.onload = function (event) {
const arrayBuffer = event.target.result as ArrayBuffer;
r(getFromFlacBuffer(arrayBuffer));
};
reader.readAsArrayBuffer(file);
});
}
// Function to parse the Vorbis Comment block
function parseVorbisComment(dataView: DataView): Record<string, string> {
let offset = 0;
const vendorLength = dataView.getUint32(offset, true);
offset += 4;
const vendorString = getString(dataView, offset, vendorLength);
offset += vendorLength;
const userCommentListLength = dataView.getUint32(offset, true);
offset += 4;
const comments = {};
for (let i = 0; i < userCommentListLength; i++) {
const commentLength = dataView.getUint32(offset, true);
offset += 4;
const comment = getString(dataView, offset, commentLength);
offset += commentLength;
const [key, value] = comment.split("=");
comments[key] = value;
}
return comments;
}
function getString(dataView: DataView, offset: number, length: number): string {
let string = "";
for (let i = 0; i < length; i++) {
string += String.fromCharCode(dataView.getUint8(offset + i));
}
return string;
}

View File

@@ -0,0 +1,53 @@
export function getFromPngBuffer(buffer: ArrayBuffer) {
// Get the PNG data as a Uint8Array
const pngData = new Uint8Array(buffer);
const dataView = new DataView(pngData.buffer);
// Check that the PNG signature is present
if (dataView.getUint32(0) !== 0x89504e47) {
console.error("Not a valid PNG file");
return;
}
// Start searching for chunks after the PNG signature
let offset = 8;
let txt_chunks: Record<string, string> = {};
// Loop through the chunks in the PNG file
while (offset < pngData.length) {
// Get the length of the chunk
const length = dataView.getUint32(offset);
// Get the chunk type
const type = String.fromCharCode(...pngData.slice(offset + 4, offset + 8));
if (type === "tEXt" || type == "comf" || type === "iTXt") {
// Get the keyword
let keyword_end = offset + 8;
while (pngData[keyword_end] !== 0) {
keyword_end++;
}
const keyword = String.fromCharCode(
...pngData.slice(offset + 8, keyword_end)
);
// Get the text
const contentArraySegment = pngData.slice(
keyword_end + 1,
offset + 8 + length
);
const contentJson = new TextDecoder("utf-8").decode(contentArraySegment);
txt_chunks[keyword] = contentJson;
}
offset += 12 + length;
}
return txt_chunks;
}
export function getFromPngFile(file: File) {
return new Promise<Record<string, string>>((r) => {
const reader = new FileReader();
reader.onload = (event) => {
r(getFromPngBuffer(event.target.result as ArrayBuffer));
};
reader.readAsArrayBuffer(file);
});
}

View File

@@ -1,60 +1,15 @@
import { LiteGraph } from "@comfyorg/litegraph";
import { api } from "./api";
import { getFromPngFile } from "./metadata/png";
import { getFromFlacFile } from "./metadata/flac";
export function getPngMetadata(file) {
return new Promise<Record<string, string>>((r) => {
const reader = new FileReader();
reader.onload = (event) => {
// Get the PNG data as a Uint8Array
const pngData = new Uint8Array(event.target.result as ArrayBuffer);
const dataView = new DataView(pngData.buffer);
// Original functions left in for backwards compatibility
export function getPngMetadata(file: File): Promise<Record<string, string>> {
return getFromPngFile(file);
}
// Check that the PNG signature is present
if (dataView.getUint32(0) !== 0x89504e47) {
console.error("Not a valid PNG file");
r({});
return;
}
// Start searching for chunks after the PNG signature
let offset = 8;
let txt_chunks: Record<string, string> = {};
// Loop through the chunks in the PNG file
while (offset < pngData.length) {
// Get the length of the chunk
const length = dataView.getUint32(offset);
// Get the chunk type
const type = String.fromCharCode(
...pngData.slice(offset + 4, offset + 8)
);
if (type === "tEXt" || type == "comf" || type === "iTXt") {
// Get the keyword
let keyword_end = offset + 8;
while (pngData[keyword_end] !== 0) {
keyword_end++;
}
const keyword = String.fromCharCode(
...pngData.slice(offset + 8, keyword_end)
);
// Get the text
const contentArraySegment = pngData.slice(
keyword_end + 1,
offset + 8 + length
);
const contentJson = new TextDecoder("utf-8").decode(
contentArraySegment
);
txt_chunks[keyword] = contentJson;
}
offset += 12 + length;
}
r(txt_chunks);
};
reader.readAsArrayBuffer(file);
});
export function getFlacMetadata(file: File): Promise<Record<string, string>> {
return getFromFlacFile(file);
}
function parseExifData(exifData) {
@@ -197,82 +152,6 @@ export function getLatentMetadata(file) {
});
}
function getString(dataView: DataView, offset: number, length: number): string {
let string = "";
for (let i = 0; i < length; i++) {
string += String.fromCharCode(dataView.getUint8(offset + i));
}
return string;
}
// Function to parse the Vorbis Comment block
function parseVorbisComment(dataView: DataView): Record<string, string> {
let offset = 0;
const vendorLength = dataView.getUint32(offset, true);
offset += 4;
const vendorString = getString(dataView, offset, vendorLength);
offset += vendorLength;
const userCommentListLength = dataView.getUint32(offset, true);
offset += 4;
const comments = {};
for (let i = 0; i < userCommentListLength; i++) {
const commentLength = dataView.getUint32(offset, true);
offset += 4;
const comment = getString(dataView, offset, commentLength);
offset += commentLength;
const [key, value] = comment.split("=");
comments[key] = value;
}
return comments;
}
// Function to read a FLAC file and parse Vorbis comments
export function getFlacMetadata(file: Blob): Promise<Record<string, string>> {
return new Promise((r) => {
const reader = new FileReader();
reader.onload = function (event) {
const arrayBuffer = event.target.result as ArrayBuffer;
const dataView = new DataView(arrayBuffer);
// Verify the FLAC signature
const signature = String.fromCharCode(
...new Uint8Array(arrayBuffer, 0, 4)
);
if (signature !== "fLaC") {
console.error("Not a valid FLAC file");
return;
}
// Parse metadata blocks
let offset = 4;
let vorbisComment = null;
while (offset < dataView.byteLength) {
const isLastBlock = dataView.getUint8(offset) & 0x80;
const blockType = dataView.getUint8(offset) & 0x7f;
const blockSize = dataView.getUint32(offset, false) & 0xffffff;
offset += 4;
if (blockType === 4) {
// Vorbis Comment block type
vorbisComment = parseVorbisComment(
new DataView(arrayBuffer, offset, blockSize)
);
}
offset += blockSize;
if (isLastBlock) break;
}
r(vorbisComment);
};
reader.readAsArrayBuffer(file);
});
}
export async function importA1111(graph, parameters) {
const p = parameters.lastIndexOf("\nSteps:");
if (p > -1) {

170
tests-ui/extractExamples.ts Normal file
View File

@@ -0,0 +1,170 @@
/*
Script to generate test API json from the ComfyUI_examples repo.
Requires the repo to be cloned to the tests-ui directory or specified via the EXAMPLE_REPO_PATH env var.
*/
import chalk from "chalk";
import fs from "fs";
import path from "path";
import { fileURLToPath } from "node:url";
import { getFromPngBuffer } from "@/scripts/metadata/png";
import { getFromFlacBuffer } from "@/scripts/metadata/flac";
import dotenv from "dotenv";
dotenv.config();
const dirname = path.dirname(fileURLToPath(import.meta.url));
const repoPath =
process.env.EXAMPLE_REPO_PATH || path.resolve(dirname, "ComfyUI_examples");
const workflowsPath = path.resolve(dirname, "workflows", "examples");
if (!fs.existsSync(repoPath)) {
console.error(
`ComfyUI_examples repo not found. Please clone this to ${repoPath} or set the EXAMPLE_REPO_PATH env var (see .env_example) and re-run.`
);
}
if (!fs.existsSync(workflowsPath)) {
await fs.promises.mkdir(workflowsPath);
}
async function* getFiles(
dir: string,
...exts: string[]
): AsyncGenerator<string, void, void> {
const dirents = await fs.promises.readdir(dir, { withFileTypes: true });
for (const dirent of dirents) {
const res = path.resolve(dir, dirent.name);
if (dirent.isDirectory()) {
yield* getFiles(res, ...exts);
} else if (exts.includes(path.extname(res))) {
yield res;
}
}
}
async function validateMetadata(metadata: Record<string, string>) {
const check = (prop: "prompt" | "workflow") => {
const v = metadata?.[prop];
if (!v) throw `${prop} not found in metadata`;
try {
JSON.parse(v);
} catch (error) {
throw `${prop} invalid json: ${error.message}`;
}
return v;
};
return { prompt: check("prompt"), workflow: check("workflow") };
}
async function hasExampleChanged(
existingFilePath: string,
exampleJson: string
) {
return exampleJson !== (await fs.promises.readFile(existingFilePath, "utf8"));
}
// Example images to ignore as they don't contain workflows
const ignore = [
"unclip_sunset.png",
"unclip_mountains.png",
"inpaint_yosemite_inpaint_example.png",
"controlnet_shark_depthmap.png",
"controlnet_pose_worship.png",
"controlnet_pose_present.png",
"controlnet_input_scribble_example.png",
"controlnet_house_scribble.png",
];
// Find all existing examples so we can check if any are removed/changed
const existing = new Set(
(await fs.promises.readdir(workflowsPath, { withFileTypes: true }))
.filter((d) => d.isFile())
.map((d) => path.resolve(workflowsPath, d.name))
);
const results = {
new: [],
changed: [],
unchanged: [],
missing: [],
failed: [],
};
let total = 0;
for await (const file of getFiles(repoPath, ".png", ".flac")) {
const cleanedName = path
.relative(repoPath, file)
.replaceAll("/", "_")
.replaceAll("\\", "_");
if (ignore.includes(cleanedName)) continue;
total++;
let metadata: { prompt: string; workflow: string };
try {
const { buffer } = await fs.promises.readFile(file);
switch (path.extname(file)) {
case ".png":
metadata = await validateMetadata(getFromPngBuffer(buffer));
break;
case ".flac":
metadata = await validateMetadata(getFromFlacBuffer(buffer));
break;
}
const outPath = path.resolve(workflowsPath, cleanedName + ".json");
const exampleJson = JSON.stringify(metadata);
if (existing.has(outPath)) {
existing.delete(outPath);
if (await hasExampleChanged(outPath, exampleJson)) {
results.changed.push(outPath);
} else {
// Unchanged, no point in re-saving
results.unchanged.push(outPath);
continue;
}
} else {
results.new.push(outPath);
}
await fs.promises.writeFile(outPath, exampleJson, "utf8");
} catch (error) {
results.failed.push({ file, error });
}
}
// Any workflows left in the existing set are now missing, these will want checking and manually removing
results.missing.push(...existing);
const c = (v: number, gt0: "red" | "yellow" | "green") =>
chalk[v > 0 ? gt0 : "gray"](v);
console.log(`Processed ${chalk.green(total)} examples`);
console.log(` ${chalk.gray(results.unchanged.length)} unchanged`);
console.log(` ${c(results.changed.length, "yellow")} changed`);
console.log(` ${c(results.new.length, "green")} new`);
console.log(` ${c(results.missing.length, "red")} missing`);
console.log(` ${c(results.failed.length, "red")} failed`);
if (results.missing.length) {
console.log();
console.log(
chalk.red(
"The following examples are missing and require manual reviewing & removal:"
)
);
for (const m of results.missing) {
console.log(m);
}
}
if (results.failed.length) {
console.log();
console.log(chalk.red("The following examples failed to extract:"));
for (const m of results.failed) {
console.log(m.file);
console.error(m.error);
console.log();
}
}

View File

@@ -0,0 +1,69 @@
import { readdirSync, readFileSync } from "fs";
import lg from "../utils/litegraph";
import path from "path";
import { start } from "../utils";
const WORKFLOW_DIR = "tests-ui/workflows/examples";
// Resolve basic differences in old prompts
function fixLegacyPrompt(prompt: { inputs: any }) {
for (const n of Object.values(prompt)) {
const { inputs } = n;
// Added inputs
if (n.class_type === "VAEEncodeForInpaint") {
if (n.inputs["grow_mask_by"] == null) n.inputs["grow_mask_by"] = 6;
} else if (n.class_type === "SDTurboScheduler") {
if (n.inputs["denoise"] == null) n.inputs["denoise"] = 1;
}
// This has been renamed
if (inputs["choose file to upload"]) {
const v = inputs["choose file to upload"];
delete inputs["choose file to upload"];
inputs["upload"] = v;
}
delete n["is_changed"];
}
return prompt;
}
describe("example workflows", () => {
beforeEach(() => {
lg.setup(global);
});
afterEach(() => {
lg.teardown(global);
});
for (const file of readdirSync(WORKFLOW_DIR)) {
if (!file.endsWith(".json")) continue;
const { workflow, prompt } = JSON.parse(
readFileSync(path.resolve(WORKFLOW_DIR, file), "utf8")
);
let skip = false;
let parsedWorkflow;
try {
// Workflows with group nodes dont generate the same IDs as the examples
// they'll need recreating so skip them for now.
parsedWorkflow = JSON.parse(workflow);
skip = !!Object.keys(parsedWorkflow?.extra?.groupNodes ?? {}).length;
} catch (error) {}
(skip ? test.skip : test)(
"correctly generates prompt json for " + file,
async () => {
if (!workflow || !prompt) throw new Error("Invalid example json");
const { app } = await start();
await app.loadGraphData(parsedWorkflow);
const output = await app.graphToPrompt();
expect(output.output).toEqual(fixLegacyPrompt(JSON.parse(prompt)));
}
);
}
});