mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-26 19:19:53 +00:00
Add unified jobs API with /api/jobs endpoints (#11054)
* feat: create a /jobs api to return queue and history jobs
* update unused vars
* include priority
* create jobs helper file
* fix ruff
* update how we set error message
* include execution error in both responses
* rename error -> failed, fix output shape
* re-use queue and history functions
* set workflow id
* allow srot by exec duration
* fix tests
* send priority and remove error msg
* use ws messages to get start and end times
* revert main.py fully
* refactor: move all /jobs business logic to jobs.py
* fix failing test
* remove some tests
* fix non dict nodes
* address comments
* filter by workflow id and remove null fields
* add clearer typing - remove get("..") or ..
* refactor query params to top get_job(s) doc, add remove_sensitive_from_queue
* add brief comment explaining why we skip animated
* comment that format field is for frontend backward compatibility
* fix whitespace
---------
Co-authored-by: Jedrzej Kosinski <kosinkadink1@gmail.com>
Co-authored-by: guill <jacob.e.segal@gmail.com>
This commit is contained in:
@@ -99,6 +99,37 @@ class ComfyClient:
|
||||
with urllib.request.urlopen(url) as response:
|
||||
return json.loads(response.read())
|
||||
|
||||
def get_jobs(self, status=None, limit=None, offset=None, sort_by=None, sort_order=None):
|
||||
url = "http://{}/api/jobs".format(self.server_address)
|
||||
params = {}
|
||||
if status is not None:
|
||||
params["status"] = status
|
||||
if limit is not None:
|
||||
params["limit"] = limit
|
||||
if offset is not None:
|
||||
params["offset"] = offset
|
||||
if sort_by is not None:
|
||||
params["sort_by"] = sort_by
|
||||
if sort_order is not None:
|
||||
params["sort_order"] = sort_order
|
||||
|
||||
if params:
|
||||
url_values = urllib.parse.urlencode(params)
|
||||
url = "{}?{}".format(url, url_values)
|
||||
|
||||
with urllib.request.urlopen(url) as response:
|
||||
return json.loads(response.read())
|
||||
|
||||
def get_job(self, job_id):
|
||||
url = "http://{}/api/jobs/{}".format(self.server_address, job_id)
|
||||
try:
|
||||
with urllib.request.urlopen(url) as response:
|
||||
return json.loads(response.read())
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 404:
|
||||
return None
|
||||
raise
|
||||
|
||||
def set_test_name(self, name):
|
||||
self.test_name = name
|
||||
|
||||
@@ -877,3 +908,106 @@ class TestExecution:
|
||||
result = client.get_all_history(max_items=5, offset=len(all_history) - 1)
|
||||
|
||||
assert len(result) <= 1, "Should return at most 1 item when offset is near end"
|
||||
|
||||
# Jobs API tests
|
||||
def test_jobs_api_job_structure(
|
||||
self, client: ComfyClient, builder: GraphBuilder
|
||||
):
|
||||
"""Test that job objects have required fields"""
|
||||
self._create_history_item(client, builder)
|
||||
|
||||
jobs_response = client.get_jobs(status="completed", limit=1)
|
||||
assert len(jobs_response["jobs"]) > 0, "Should have at least one job"
|
||||
|
||||
job = jobs_response["jobs"][0]
|
||||
assert "id" in job, "Job should have id"
|
||||
assert "status" in job, "Job should have status"
|
||||
assert "create_time" in job, "Job should have create_time"
|
||||
assert "outputs_count" in job, "Job should have outputs_count"
|
||||
assert "preview_output" in job, "Job should have preview_output"
|
||||
|
||||
def test_jobs_api_preview_output_structure(
|
||||
self, client: ComfyClient, builder: GraphBuilder
|
||||
):
|
||||
"""Test that preview_output has correct structure"""
|
||||
self._create_history_item(client, builder)
|
||||
|
||||
jobs_response = client.get_jobs(status="completed", limit=1)
|
||||
job = jobs_response["jobs"][0]
|
||||
|
||||
if job["preview_output"] is not None:
|
||||
preview = job["preview_output"]
|
||||
assert "filename" in preview, "Preview should have filename"
|
||||
assert "nodeId" in preview, "Preview should have nodeId"
|
||||
assert "mediaType" in preview, "Preview should have mediaType"
|
||||
|
||||
def test_jobs_api_pagination(
|
||||
self, client: ComfyClient, builder: GraphBuilder
|
||||
):
|
||||
"""Test jobs API pagination"""
|
||||
for _ in range(5):
|
||||
self._create_history_item(client, builder)
|
||||
|
||||
first_page = client.get_jobs(limit=2, offset=0)
|
||||
second_page = client.get_jobs(limit=2, offset=2)
|
||||
|
||||
assert len(first_page["jobs"]) <= 2, "First page should have at most 2 jobs"
|
||||
assert len(second_page["jobs"]) <= 2, "Second page should have at most 2 jobs"
|
||||
|
||||
first_ids = {j["id"] for j in first_page["jobs"]}
|
||||
second_ids = {j["id"] for j in second_page["jobs"]}
|
||||
assert first_ids.isdisjoint(second_ids), "Pages should have different jobs"
|
||||
|
||||
def test_jobs_api_sorting(
|
||||
self, client: ComfyClient, builder: GraphBuilder
|
||||
):
|
||||
"""Test jobs API sorting"""
|
||||
for _ in range(3):
|
||||
self._create_history_item(client, builder)
|
||||
|
||||
desc_jobs = client.get_jobs(sort_order="desc")
|
||||
asc_jobs = client.get_jobs(sort_order="asc")
|
||||
|
||||
if len(desc_jobs["jobs"]) >= 2:
|
||||
desc_times = [j["create_time"] for j in desc_jobs["jobs"] if j["create_time"]]
|
||||
asc_times = [j["create_time"] for j in asc_jobs["jobs"] if j["create_time"]]
|
||||
if len(desc_times) >= 2:
|
||||
assert desc_times == sorted(desc_times, reverse=True), "Desc should be newest first"
|
||||
if len(asc_times) >= 2:
|
||||
assert asc_times == sorted(asc_times), "Asc should be oldest first"
|
||||
|
||||
def test_jobs_api_status_filter(
|
||||
self, client: ComfyClient, builder: GraphBuilder
|
||||
):
|
||||
"""Test jobs API status filtering"""
|
||||
self._create_history_item(client, builder)
|
||||
|
||||
completed_jobs = client.get_jobs(status="completed")
|
||||
assert len(completed_jobs["jobs"]) > 0, "Should have completed jobs from history"
|
||||
|
||||
for job in completed_jobs["jobs"]:
|
||||
assert job["status"] == "completed", "Should only return completed jobs"
|
||||
|
||||
# Pending jobs are transient - just verify filter doesn't error
|
||||
pending_jobs = client.get_jobs(status="pending")
|
||||
for job in pending_jobs["jobs"]:
|
||||
assert job["status"] == "pending", "Should only return pending jobs"
|
||||
|
||||
def test_get_job_by_id(
|
||||
self, client: ComfyClient, builder: GraphBuilder
|
||||
):
|
||||
"""Test getting a single job by ID"""
|
||||
result = self._create_history_item(client, builder)
|
||||
prompt_id = result.get_prompt_id()
|
||||
|
||||
job = client.get_job(prompt_id)
|
||||
assert job is not None, "Should find the job"
|
||||
assert job["id"] == prompt_id, "Job ID should match"
|
||||
assert "outputs" in job, "Single job should include outputs"
|
||||
|
||||
def test_get_job_not_found(
|
||||
self, client: ComfyClient, builder: GraphBuilder
|
||||
):
|
||||
"""Test getting a non-existent job returns 404"""
|
||||
job = client.get_job("nonexistent-job-id")
|
||||
assert job is None, "Non-existent job should return None"
|
||||
|
||||
361
tests/execution/test_jobs.py
Normal file
361
tests/execution/test_jobs.py
Normal file
@@ -0,0 +1,361 @@
|
||||
"""Unit tests for comfy_execution/jobs.py"""
|
||||
|
||||
from comfy_execution.jobs import (
|
||||
JobStatus,
|
||||
is_previewable,
|
||||
normalize_queue_item,
|
||||
normalize_history_item,
|
||||
get_outputs_summary,
|
||||
apply_sorting,
|
||||
)
|
||||
|
||||
|
||||
class TestJobStatus:
|
||||
"""Test JobStatus constants."""
|
||||
|
||||
def test_status_values(self):
|
||||
"""Status constants should have expected string values."""
|
||||
assert JobStatus.PENDING == 'pending'
|
||||
assert JobStatus.IN_PROGRESS == 'in_progress'
|
||||
assert JobStatus.COMPLETED == 'completed'
|
||||
assert JobStatus.FAILED == 'failed'
|
||||
|
||||
def test_all_contains_all_statuses(self):
|
||||
"""ALL should contain all status values."""
|
||||
assert JobStatus.PENDING in JobStatus.ALL
|
||||
assert JobStatus.IN_PROGRESS in JobStatus.ALL
|
||||
assert JobStatus.COMPLETED in JobStatus.ALL
|
||||
assert JobStatus.FAILED in JobStatus.ALL
|
||||
assert len(JobStatus.ALL) == 4
|
||||
|
||||
|
||||
class TestIsPreviewable:
|
||||
"""Unit tests for is_previewable()"""
|
||||
|
||||
def test_previewable_media_types(self):
|
||||
"""Images, video, audio media types should be previewable."""
|
||||
for media_type in ['images', 'video', 'audio']:
|
||||
assert is_previewable(media_type, {}) is True
|
||||
|
||||
def test_non_previewable_media_types(self):
|
||||
"""Other media types should not be previewable."""
|
||||
for media_type in ['latents', 'text', 'metadata', 'files']:
|
||||
assert is_previewable(media_type, {}) is False
|
||||
|
||||
def test_3d_extensions_previewable(self):
|
||||
"""3D file extensions should be previewable regardless of media_type."""
|
||||
for ext in ['.obj', '.fbx', '.gltf', '.glb']:
|
||||
item = {'filename': f'model{ext}'}
|
||||
assert is_previewable('files', item) is True
|
||||
|
||||
def test_3d_extensions_case_insensitive(self):
|
||||
"""3D extension check should be case insensitive."""
|
||||
item = {'filename': 'MODEL.GLB'}
|
||||
assert is_previewable('files', item) is True
|
||||
|
||||
def test_video_format_previewable(self):
|
||||
"""Items with video/ format should be previewable."""
|
||||
item = {'format': 'video/mp4'}
|
||||
assert is_previewable('files', item) is True
|
||||
|
||||
def test_audio_format_previewable(self):
|
||||
"""Items with audio/ format should be previewable."""
|
||||
item = {'format': 'audio/wav'}
|
||||
assert is_previewable('files', item) is True
|
||||
|
||||
def test_other_format_not_previewable(self):
|
||||
"""Items with other format should not be previewable."""
|
||||
item = {'format': 'application/json'}
|
||||
assert is_previewable('files', item) is False
|
||||
|
||||
|
||||
class TestGetOutputsSummary:
|
||||
"""Unit tests for get_outputs_summary()"""
|
||||
|
||||
def test_empty_outputs(self):
|
||||
"""Empty outputs should return 0 count and None preview."""
|
||||
count, preview = get_outputs_summary({})
|
||||
assert count == 0
|
||||
assert preview is None
|
||||
|
||||
def test_counts_across_multiple_nodes(self):
|
||||
"""Outputs from multiple nodes should all be counted."""
|
||||
outputs = {
|
||||
'node1': {'images': [{'filename': 'a.png', 'type': 'output'}]},
|
||||
'node2': {'images': [{'filename': 'b.png', 'type': 'output'}]},
|
||||
'node3': {'images': [
|
||||
{'filename': 'c.png', 'type': 'output'},
|
||||
{'filename': 'd.png', 'type': 'output'}
|
||||
]}
|
||||
}
|
||||
count, preview = get_outputs_summary(outputs)
|
||||
assert count == 4
|
||||
|
||||
def test_skips_animated_key_and_non_list_values(self):
|
||||
"""The 'animated' key and non-list values should be skipped."""
|
||||
outputs = {
|
||||
'node1': {
|
||||
'images': [{'filename': 'test.png', 'type': 'output'}],
|
||||
'animated': [True], # Should skip due to key name
|
||||
'metadata': 'string', # Should skip due to non-list
|
||||
'count': 42 # Should skip due to non-list
|
||||
}
|
||||
}
|
||||
count, preview = get_outputs_summary(outputs)
|
||||
assert count == 1
|
||||
|
||||
def test_preview_prefers_type_output(self):
|
||||
"""Items with type='output' should be preferred for preview."""
|
||||
outputs = {
|
||||
'node1': {
|
||||
'images': [
|
||||
{'filename': 'temp.png', 'type': 'temp'},
|
||||
{'filename': 'output.png', 'type': 'output'}
|
||||
]
|
||||
}
|
||||
}
|
||||
count, preview = get_outputs_summary(outputs)
|
||||
assert count == 2
|
||||
assert preview['filename'] == 'output.png'
|
||||
|
||||
def test_preview_fallback_when_no_output_type(self):
|
||||
"""If no type='output', should use first previewable."""
|
||||
outputs = {
|
||||
'node1': {
|
||||
'images': [
|
||||
{'filename': 'temp1.png', 'type': 'temp'},
|
||||
{'filename': 'temp2.png', 'type': 'temp'}
|
||||
]
|
||||
}
|
||||
}
|
||||
count, preview = get_outputs_summary(outputs)
|
||||
assert preview['filename'] == 'temp1.png'
|
||||
|
||||
def test_non_previewable_media_types_counted_but_no_preview(self):
|
||||
"""Non-previewable media types should be counted but not used as preview."""
|
||||
outputs = {
|
||||
'node1': {
|
||||
'latents': [
|
||||
{'filename': 'latent1.safetensors'},
|
||||
{'filename': 'latent2.safetensors'}
|
||||
]
|
||||
}
|
||||
}
|
||||
count, preview = get_outputs_summary(outputs)
|
||||
assert count == 2
|
||||
assert preview is None
|
||||
|
||||
def test_previewable_media_types(self):
|
||||
"""Images, video, and audio media types should be previewable."""
|
||||
for media_type in ['images', 'video', 'audio']:
|
||||
outputs = {
|
||||
'node1': {
|
||||
media_type: [{'filename': 'test.file', 'type': 'output'}]
|
||||
}
|
||||
}
|
||||
count, preview = get_outputs_summary(outputs)
|
||||
assert preview is not None, f"{media_type} should be previewable"
|
||||
|
||||
def test_3d_files_previewable(self):
|
||||
"""3D file extensions should be previewable."""
|
||||
for ext in ['.obj', '.fbx', '.gltf', '.glb']:
|
||||
outputs = {
|
||||
'node1': {
|
||||
'files': [{'filename': f'model{ext}', 'type': 'output'}]
|
||||
}
|
||||
}
|
||||
count, preview = get_outputs_summary(outputs)
|
||||
assert preview is not None, f"3D file {ext} should be previewable"
|
||||
|
||||
def test_format_mime_type_previewable(self):
|
||||
"""Files with video/ or audio/ format should be previewable."""
|
||||
for fmt in ['video/x-custom', 'audio/x-custom']:
|
||||
outputs = {
|
||||
'node1': {
|
||||
'files': [{'filename': 'file.custom', 'format': fmt, 'type': 'output'}]
|
||||
}
|
||||
}
|
||||
count, preview = get_outputs_summary(outputs)
|
||||
assert preview is not None, f"Format {fmt} should be previewable"
|
||||
|
||||
def test_preview_enriched_with_node_metadata(self):
|
||||
"""Preview should include nodeId, mediaType, and original fields."""
|
||||
outputs = {
|
||||
'node123': {
|
||||
'images': [{'filename': 'test.png', 'type': 'output', 'subfolder': 'outputs'}]
|
||||
}
|
||||
}
|
||||
count, preview = get_outputs_summary(outputs)
|
||||
assert preview['nodeId'] == 'node123'
|
||||
assert preview['mediaType'] == 'images'
|
||||
assert preview['subfolder'] == 'outputs'
|
||||
|
||||
|
||||
class TestApplySorting:
|
||||
"""Unit tests for apply_sorting()"""
|
||||
|
||||
def test_sort_by_create_time_desc(self):
|
||||
"""Default sort by create_time descending."""
|
||||
jobs = [
|
||||
{'id': 'a', 'create_time': 100},
|
||||
{'id': 'b', 'create_time': 300},
|
||||
{'id': 'c', 'create_time': 200},
|
||||
]
|
||||
result = apply_sorting(jobs, 'created_at', 'desc')
|
||||
assert [j['id'] for j in result] == ['b', 'c', 'a']
|
||||
|
||||
def test_sort_by_create_time_asc(self):
|
||||
"""Sort by create_time ascending."""
|
||||
jobs = [
|
||||
{'id': 'a', 'create_time': 100},
|
||||
{'id': 'b', 'create_time': 300},
|
||||
{'id': 'c', 'create_time': 200},
|
||||
]
|
||||
result = apply_sorting(jobs, 'created_at', 'asc')
|
||||
assert [j['id'] for j in result] == ['a', 'c', 'b']
|
||||
|
||||
def test_sort_by_execution_duration(self):
|
||||
"""Sort by execution_duration should order by duration."""
|
||||
jobs = [
|
||||
{'id': 'a', 'create_time': 100, 'execution_start_time': 100, 'execution_end_time': 5100}, # 5s
|
||||
{'id': 'b', 'create_time': 300, 'execution_start_time': 300, 'execution_end_time': 1300}, # 1s
|
||||
{'id': 'c', 'create_time': 200, 'execution_start_time': 200, 'execution_end_time': 3200}, # 3s
|
||||
]
|
||||
result = apply_sorting(jobs, 'execution_duration', 'desc')
|
||||
assert [j['id'] for j in result] == ['a', 'c', 'b']
|
||||
|
||||
def test_sort_with_none_values(self):
|
||||
"""Jobs with None values should sort as 0."""
|
||||
jobs = [
|
||||
{'id': 'a', 'create_time': 100, 'execution_start_time': 100, 'execution_end_time': 5100},
|
||||
{'id': 'b', 'create_time': 300, 'execution_start_time': None, 'execution_end_time': None},
|
||||
{'id': 'c', 'create_time': 200, 'execution_start_time': 200, 'execution_end_time': 3200},
|
||||
]
|
||||
result = apply_sorting(jobs, 'execution_duration', 'asc')
|
||||
assert result[0]['id'] == 'b' # None treated as 0, comes first
|
||||
|
||||
|
||||
class TestNormalizeQueueItem:
|
||||
"""Unit tests for normalize_queue_item()"""
|
||||
|
||||
def test_basic_normalization(self):
|
||||
"""Queue item should be normalized to job dict."""
|
||||
item = (
|
||||
10, # priority/number
|
||||
'prompt-123', # prompt_id
|
||||
{'nodes': {}}, # prompt
|
||||
{
|
||||
'create_time': 1234567890,
|
||||
'extra_pnginfo': {'workflow': {'id': 'workflow-abc'}}
|
||||
}, # extra_data
|
||||
['node1'], # outputs_to_execute
|
||||
)
|
||||
job = normalize_queue_item(item, JobStatus.PENDING)
|
||||
|
||||
assert job['id'] == 'prompt-123'
|
||||
assert job['status'] == 'pending'
|
||||
assert job['priority'] == 10
|
||||
assert job['create_time'] == 1234567890
|
||||
assert 'execution_start_time' not in job
|
||||
assert 'execution_end_time' not in job
|
||||
assert 'execution_error' not in job
|
||||
assert 'preview_output' not in job
|
||||
assert job['outputs_count'] == 0
|
||||
assert job['workflow_id'] == 'workflow-abc'
|
||||
|
||||
|
||||
class TestNormalizeHistoryItem:
|
||||
"""Unit tests for normalize_history_item()"""
|
||||
|
||||
def test_completed_job(self):
|
||||
"""Completed history item should have correct status and times from messages."""
|
||||
history_item = {
|
||||
'prompt': (
|
||||
5, # priority
|
||||
'prompt-456',
|
||||
{'nodes': {}},
|
||||
{
|
||||
'create_time': 1234567890000,
|
||||
'extra_pnginfo': {'workflow': {'id': 'workflow-xyz'}}
|
||||
},
|
||||
['node1'],
|
||||
),
|
||||
'status': {
|
||||
'status_str': 'success',
|
||||
'completed': True,
|
||||
'messages': [
|
||||
('execution_start', {'prompt_id': 'prompt-456', 'timestamp': 1234567890500}),
|
||||
('execution_success', {'prompt_id': 'prompt-456', 'timestamp': 1234567893000}),
|
||||
]
|
||||
},
|
||||
'outputs': {},
|
||||
}
|
||||
job = normalize_history_item('prompt-456', history_item)
|
||||
|
||||
assert job['id'] == 'prompt-456'
|
||||
assert job['status'] == 'completed'
|
||||
assert job['priority'] == 5
|
||||
assert job['execution_start_time'] == 1234567890500
|
||||
assert job['execution_end_time'] == 1234567893000
|
||||
assert job['workflow_id'] == 'workflow-xyz'
|
||||
|
||||
def test_failed_job(self):
|
||||
"""Failed history item should have failed status and error from messages."""
|
||||
history_item = {
|
||||
'prompt': (
|
||||
5,
|
||||
'prompt-789',
|
||||
{'nodes': {}},
|
||||
{'create_time': 1234567890000},
|
||||
['node1'],
|
||||
),
|
||||
'status': {
|
||||
'status_str': 'error',
|
||||
'completed': False,
|
||||
'messages': [
|
||||
('execution_start', {'prompt_id': 'prompt-789', 'timestamp': 1234567890500}),
|
||||
('execution_error', {
|
||||
'prompt_id': 'prompt-789',
|
||||
'node_id': '5',
|
||||
'node_type': 'KSampler',
|
||||
'exception_message': 'CUDA out of memory',
|
||||
'exception_type': 'RuntimeError',
|
||||
'traceback': ['Traceback...', 'RuntimeError: CUDA out of memory'],
|
||||
'timestamp': 1234567891000,
|
||||
})
|
||||
]
|
||||
},
|
||||
'outputs': {},
|
||||
}
|
||||
|
||||
job = normalize_history_item('prompt-789', history_item)
|
||||
assert job['status'] == 'failed'
|
||||
assert job['execution_start_time'] == 1234567890500
|
||||
assert job['execution_end_time'] == 1234567891000
|
||||
assert job['execution_error']['node_id'] == '5'
|
||||
assert job['execution_error']['node_type'] == 'KSampler'
|
||||
assert job['execution_error']['exception_message'] == 'CUDA out of memory'
|
||||
|
||||
def test_include_outputs(self):
|
||||
"""When include_outputs=True, should include full output data."""
|
||||
history_item = {
|
||||
'prompt': (
|
||||
5,
|
||||
'prompt-123',
|
||||
{'nodes': {'1': {}}},
|
||||
{'create_time': 1234567890, 'client_id': 'abc'},
|
||||
['node1'],
|
||||
),
|
||||
'status': {'status_str': 'success', 'completed': True, 'messages': []},
|
||||
'outputs': {'node1': {'images': [{'filename': 'test.png'}]}},
|
||||
}
|
||||
job = normalize_history_item('prompt-123', history_item, include_outputs=True)
|
||||
|
||||
assert 'outputs' in job
|
||||
assert 'workflow' in job
|
||||
assert 'execution_status' in job
|
||||
assert job['outputs'] == {'node1': {'images': [{'filename': 'test.png'}]}}
|
||||
assert job['workflow'] == {
|
||||
'prompt': {'nodes': {'1': {}}},
|
||||
'extra_data': {'create_time': 1234567890, 'client_id': 'abc'},
|
||||
}
|
||||
Reference in New Issue
Block a user