fix: properly merge LCOV shard coverage instead of concatenating

When the same source file appears in multiple shards, naive concatenation
double-counts LF:/LH: counters. Add scripts/merge-lcov.ts that unions
DA: records per source file (max hit count per line) and recomputes
summary counters from merged data.

Also fix coverage-report.ts and coverage-slack-notify.ts to deduplicate
per-file stats using Math.max instead of summing, making them robust
against unmerged LCOV input.
This commit is contained in:
bymyself
2026-04-09 10:20:25 -07:00
parent d187fecb5f
commit 377bb7e5b5
4 changed files with 278 additions and 27 deletions

View File

@@ -40,9 +40,7 @@ jobs:
- name: Merge shard coverage into single LCOV
run: |
mkdir -p coverage/playwright
# Concatenate all per-shard LCOV files into one
find temp/coverage-shards -name 'coverage.lcov' -exec cat {} + > coverage/playwright/coverage.lcov
echo "Merged coverage from $(find temp/coverage-shards -name 'coverage.lcov' | wc -l) shards"
pnpm exec tsx scripts/merge-lcov.ts temp/coverage-shards coverage/playwright/coverage.lcov
wc -l coverage/playwright/coverage.lcov
- name: Upload merged coverage data

View File

@@ -22,14 +22,16 @@ if (!existsSync(lcovPath)) {
const lcov = readFileSync(lcovPath, 'utf-8')
let totalLines = 0
let coveredLines = 0
let totalFunctions = 0
let coveredFunctions = 0
let totalBranches = 0
let coveredBranches = 0
interface RecordAccum {
lf: number
lh: number
fnf: number
fnh: number
brf: number
brh: number
}
const fileStats = new Map<string, FileStats>()
const fileRecords = new Map<string, RecordAccum>()
let currentFile = ''
for (const line of lcov.split('\n')) {
@@ -37,27 +39,65 @@ for (const line of lcov.split('\n')) {
currentFile = line.slice(3)
} else if (line.startsWith('LF:')) {
const n = parseInt(line.slice(3), 10) || 0
totalLines += n
const entry = fileStats.get(currentFile) ?? { lines: 0, covered: 0 }
entry.lines = n
fileStats.set(currentFile, entry)
const rec = fileRecords.get(currentFile) ?? {
lf: 0,
lh: 0,
fnf: 0,
fnh: 0,
brf: 0,
brh: 0
}
rec.lf = Math.max(rec.lf, n)
fileRecords.set(currentFile, rec)
} else if (line.startsWith('LH:')) {
const n = parseInt(line.slice(3), 10) || 0
coveredLines += n
const entry = fileStats.get(currentFile) ?? { lines: 0, covered: 0 }
entry.covered = n
fileStats.set(currentFile, entry)
const rec = fileRecords.get(currentFile) ?? {
lf: 0,
lh: 0,
fnf: 0,
fnh: 0,
brf: 0,
brh: 0
}
rec.lh = Math.max(rec.lh, n)
fileRecords.set(currentFile, rec)
} else if (line.startsWith('FNF:')) {
totalFunctions += parseInt(line.slice(4), 10) || 0
const n = parseInt(line.slice(4), 10) || 0
const rec = fileRecords.get(currentFile)
if (rec) rec.fnf = Math.max(rec.fnf, n)
} else if (line.startsWith('FNH:')) {
coveredFunctions += parseInt(line.slice(4), 10) || 0
const n = parseInt(line.slice(4), 10) || 0
const rec = fileRecords.get(currentFile)
if (rec) rec.fnh = Math.max(rec.fnh, n)
} else if (line.startsWith('BRF:')) {
totalBranches += parseInt(line.slice(4), 10) || 0
const n = parseInt(line.slice(4), 10) || 0
const rec = fileRecords.get(currentFile)
if (rec) rec.brf = Math.max(rec.brf, n)
} else if (line.startsWith('BRH:')) {
coveredBranches += parseInt(line.slice(4), 10) || 0
const n = parseInt(line.slice(4), 10) || 0
const rec = fileRecords.get(currentFile)
if (rec) rec.brh = Math.max(rec.brh, n)
}
}
let totalLines = 0
let coveredLines = 0
let totalFunctions = 0
let coveredFunctions = 0
let totalBranches = 0
let coveredBranches = 0
const fileStats = new Map<string, FileStats>()
for (const [file, rec] of fileRecords) {
totalLines += rec.lf
coveredLines += rec.lh
totalFunctions += rec.fnf
coveredFunctions += rec.fnh
totalBranches += rec.brf
coveredBranches += rec.brh
fileStats.set(file, { lines: rec.lf, covered: rec.lh })
}
function pct(covered: number, total: number): string {
if (total === 0) return '—'
return ((covered / total) * 100).toFixed(1) + '%'

View File

@@ -19,17 +19,32 @@ interface SlackBlock {
}
function parseLcovContent(content: string): CoverageData | null {
let totalLines = 0
let coveredLines = 0
const perFile = new Map<string, { lf: number; lh: number }>()
let currentFile = ''
for (const line of content.split('\n')) {
if (line.startsWith('LF:')) {
totalLines += parseInt(line.slice(3), 10) || 0
if (line.startsWith('SF:')) {
currentFile = line.slice(3)
} else if (line.startsWith('LF:')) {
const n = parseInt(line.slice(3), 10) || 0
const entry = perFile.get(currentFile) ?? { lf: 0, lh: 0 }
entry.lf = Math.max(entry.lf, n)
perFile.set(currentFile, entry)
} else if (line.startsWith('LH:')) {
coveredLines += parseInt(line.slice(3), 10) || 0
const n = parseInt(line.slice(3), 10) || 0
const entry = perFile.get(currentFile) ?? { lf: 0, lh: 0 }
entry.lh = Math.max(entry.lh, n)
perFile.set(currentFile, entry)
}
}
let totalLines = 0
let coveredLines = 0
for (const { lf, lh } of perFile.values()) {
totalLines += lf
coveredLines += lh
}
if (totalLines === 0) return null
return {

198
scripts/merge-lcov.ts Normal file
View File

@@ -0,0 +1,198 @@
import { execSync } from 'node:child_process'
import { existsSync, readFileSync, writeFileSync } from 'node:fs'
import { resolve } from 'node:path'
interface FileRecord {
lines: Map<number, number>
functions: Map<string, { name: string; line: number; hits: number }>
branches: Map<
string,
{ line: number; block: number; branch: number; hits: number }
>
}
function getOrCreateRecord(
files: Map<string, FileRecord>,
sf: string
): FileRecord {
let rec = files.get(sf)
if (!rec) {
rec = {
lines: new Map(),
functions: new Map(),
branches: new Map()
}
files.set(sf, rec)
}
return rec
}
function parseLcovFiles(paths: string[]): Map<string, FileRecord> {
const files = new Map<string, FileRecord>()
let current: FileRecord | null = null
for (const filePath of paths) {
if (!existsSync(filePath)) continue
const content = readFileSync(filePath, 'utf-8')
for (const line of content.split('\n')) {
const trimmed = line.trim()
if (!trimmed) continue
if (trimmed.startsWith('SF:')) {
current = getOrCreateRecord(files, trimmed.slice(3))
} else if (trimmed === 'end_of_record') {
current = null
} else if (current) {
if (trimmed.startsWith('DA:')) {
const parts = trimmed.slice(3).split(',')
const lineNum = parseInt(parts[0], 10)
const hits = parseInt(parts[1], 10) || 0
const prev = current.lines.get(lineNum) ?? 0
current.lines.set(lineNum, Math.max(prev, hits))
} else if (trimmed.startsWith('FN:')) {
const parts = trimmed.slice(3).split(',')
const fnLine = parseInt(parts[0], 10)
const fnName = parts.slice(1).join(',')
if (!current.functions.has(fnName)) {
current.functions.set(fnName, {
name: fnName,
line: fnLine,
hits: 0
})
}
} else if (trimmed.startsWith('FNDA:')) {
const parts = trimmed.slice(5).split(',')
const hits = parseInt(parts[0], 10) || 0
const fnName = parts.slice(1).join(',')
const fn = current.functions.get(fnName)
if (fn) {
fn.hits = Math.max(fn.hits, hits)
} else {
current.functions.set(fnName, { name: fnName, line: 0, hits })
}
} else if (trimmed.startsWith('BRDA:')) {
const parts = trimmed.slice(5).split(',')
const brLine = parseInt(parts[0], 10)
const block = parseInt(parts[1], 10)
const branch = parseInt(parts[2], 10)
const hits = parts[3] === '-' ? 0 : parseInt(parts[3], 10) || 0
const key = `${brLine},${block},${branch}`
const prev = current.branches.get(key)
if (prev) {
prev.hits = Math.max(prev.hits, hits)
} else {
current.branches.set(key, {
line: brLine,
block,
branch,
hits
})
}
}
}
}
}
return files
}
function writeLcov(files: Map<string, FileRecord>): string {
const out: string[] = []
for (const [sf, rec] of [...files.entries()].sort((a, b) =>
a[0].localeCompare(b[0])
)) {
out.push(`SF:${sf}`)
for (const fn of rec.functions.values()) {
out.push(`FN:${fn.line},${fn.name}`)
}
const fnTotal = rec.functions.size
let fnHit = 0
for (const fn of rec.functions.values()) {
out.push(`FNDA:${fn.hits},${fn.name}`)
if (fn.hits > 0) fnHit++
}
out.push(`FNF:${fnTotal}`)
out.push(`FNH:${fnHit}`)
for (const br of rec.branches.values()) {
out.push(
`BRDA:${br.line},${br.block},${br.branch},${br.hits === 0 ? '-' : br.hits}`
)
}
const brTotal = rec.branches.size
let brHit = 0
for (const br of rec.branches.values()) {
if (br.hits > 0) brHit++
}
out.push(`BRF:${brTotal}`)
out.push(`BRH:${brHit}`)
for (const [lineNum, hits] of [...rec.lines.entries()].sort(
(a, b) => a[0] - b[0]
)) {
out.push(`DA:${lineNum},${hits}`)
}
const lf = rec.lines.size
let lh = 0
for (const hits of rec.lines.values()) {
if (hits > 0) lh++
}
out.push(`LF:${lf}`)
out.push(`LH:${lh}`)
out.push('end_of_record')
}
return out.join('\n') + '\n'
}
function main() {
const inputDir = process.argv[2]
const outputFile = process.argv[3]
if (!inputDir || !outputFile) {
console.error('Usage: merge-lcov.ts <input-dir> <output-file>')
console.error(
' Finds all coverage.lcov files under <input-dir> and merges them.'
)
process.exit(1)
}
const findResult = execSync(
`find ${JSON.stringify(resolve(inputDir))} -name 'coverage.lcov' -type f`,
{ encoding: 'utf-8' }
).trim()
if (!findResult) {
console.error('No coverage.lcov files found under', inputDir)
writeFileSync(outputFile, '')
process.exit(0)
}
const lcovFiles = findResult.split('\n').filter(Boolean)
console.error(`Merging ${lcovFiles.length} shard LCOV files...`)
const merged = parseLcovFiles(lcovFiles)
const output = writeLcov(merged)
writeFileSync(outputFile, output)
let totalFiles = 0
let totalLines = 0
let coveredLines = 0
for (const rec of merged.values()) {
totalFiles++
totalLines += rec.lines.size
for (const hits of rec.lines.values()) {
if (hits > 0) coveredLines++
}
}
console.error(
`Merged: ${totalFiles} source files, ${coveredLines}/${totalLines} lines covered`
)
}
main()