mirror of
https://github.com/ostris/ai-toolkit.git
synced 2026-01-26 16:39:47 +00:00
124 lines
3.2 KiB
TypeScript
124 lines
3.2 KiB
TypeScript
import { NextResponse } from 'next/server';
|
|
import { exec } from 'child_process';
|
|
import { promisify } from 'util';
|
|
import os from 'os';
|
|
|
|
const execAsync = promisify(exec);
|
|
|
|
export async function GET() {
|
|
try {
|
|
// Get platform
|
|
const platform = os.platform();
|
|
const isWindows = platform === 'win32';
|
|
|
|
// Check if nvidia-smi is available
|
|
const hasNvidiaSmi = await checkNvidiaSmi(isWindows);
|
|
|
|
if (!hasNvidiaSmi) {
|
|
return NextResponse.json({
|
|
hasNvidiaSmi: false,
|
|
gpus: [],
|
|
error: 'nvidia-smi not found or not accessible',
|
|
});
|
|
}
|
|
|
|
// Get GPU stats
|
|
const gpuStats = await getGpuStats(isWindows);
|
|
|
|
return NextResponse.json({
|
|
hasNvidiaSmi: true,
|
|
gpus: gpuStats,
|
|
});
|
|
} catch (error) {
|
|
console.error('Error fetching NVIDIA GPU stats:', error);
|
|
return NextResponse.json(
|
|
{
|
|
hasNvidiaSmi: false,
|
|
gpus: [],
|
|
error: `Failed to fetch GPU stats: ${error instanceof Error ? error.message : String(error)}`,
|
|
},
|
|
{ status: 500 },
|
|
);
|
|
}
|
|
}
|
|
|
|
async function checkNvidiaSmi(isWindows: boolean): Promise<boolean> {
|
|
try {
|
|
if (isWindows) {
|
|
// Check if nvidia-smi is available on Windows
|
|
// It's typically located in C:\Program Files\NVIDIA Corporation\NVSMI\nvidia-smi.exe
|
|
// but we'll just try to run it directly as it may be in PATH
|
|
await execAsync('nvidia-smi -L');
|
|
} else {
|
|
// Linux/macOS check
|
|
await execAsync('which nvidia-smi');
|
|
}
|
|
return true;
|
|
} catch (error) {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
async function getGpuStats(isWindows: boolean) {
|
|
// Command is the same for both platforms, but the path might be different
|
|
const command =
|
|
'nvidia-smi --query-gpu=index,name,driver_version,temperature.gpu,utilization.gpu,utilization.memory,memory.total,memory.free,memory.used,power.draw,power.limit,clocks.current.graphics,clocks.current.memory,fan.speed --format=csv,noheader,nounits';
|
|
|
|
// Execute command
|
|
const { stdout } = await execAsync(command, {
|
|
env: { ...process.env, CUDA_DEVICE_ORDER: 'PCI_BUS_ID' },
|
|
});
|
|
|
|
// Parse CSV output
|
|
const gpus = stdout
|
|
.trim()
|
|
.split('\n')
|
|
.map(line => {
|
|
const [
|
|
index,
|
|
name,
|
|
driverVersion,
|
|
temperature,
|
|
gpuUtil,
|
|
memoryUtil,
|
|
memoryTotal,
|
|
memoryFree,
|
|
memoryUsed,
|
|
powerDraw,
|
|
powerLimit,
|
|
clockGraphics,
|
|
clockMemory,
|
|
fanSpeed,
|
|
] = line.split(', ').map(item => item.trim());
|
|
|
|
return {
|
|
index: parseInt(index),
|
|
name,
|
|
driverVersion,
|
|
temperature: parseInt(temperature),
|
|
utilization: {
|
|
gpu: parseInt(gpuUtil),
|
|
memory: parseInt(memoryUtil),
|
|
},
|
|
memory: {
|
|
total: parseInt(memoryTotal),
|
|
free: parseInt(memoryFree),
|
|
used: parseInt(memoryUsed),
|
|
},
|
|
power: {
|
|
draw: parseFloat(powerDraw),
|
|
limit: parseFloat(powerLimit),
|
|
},
|
|
clocks: {
|
|
graphics: parseInt(clockGraphics),
|
|
memory: parseInt(clockMemory),
|
|
},
|
|
fan: {
|
|
speed: parseInt(fanSpeed) || 0, // Some GPUs might not report fan speed, default to 0
|
|
},
|
|
};
|
|
});
|
|
|
|
return gpus;
|
|
}
|