diff --git a/Jenkinsfile b/Jenkinsfile index aea14c78b6..2a1d1fd904 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -72,10 +72,8 @@ def sendFailureNotifications() { } } -def generateAndArchiveBuildTraceVisualization() { +def generateAndArchiveBuildTraceVisualization(String buildTraceFileName) { try { - def buildTraceFileName = "ck_build_trace.json"; - // Attempt to download the build trace file to check if it exists def traceFileExists = false try { @@ -628,15 +626,17 @@ def cmake_build(Map conf=[:]){ sh cmd //run tests except when NO_CK_BUILD or BUILD_LEGACY_OS are set if(!setup_args.contains("NO_CK_BUILD") && !params.BUILD_LEGACY_OS){ - if ((setup_args.contains("gfx9") && params.NINJA_BUILD_TRACE) || params.BUILD_INSTANCES_ONLY){ + sh "python3 ../script/ninja_json_converter.py .ninja_log --legacy-format --output ck_build_trace_${check_arch_name()}.json" + archiveArtifacts "ck_build_trace_${check_arch_name()}.json" + sh "python3 ../script/parse_ninja_trace.py ck_build_trace_${check_arch_name()}.json" + if (params.NINJA_BUILD_TRACE || params.BUILD_INSTANCES_ONLY){ if (params.NINJA_FTIME_TRACE) { - echo "running ninja ftime trace" + echo "running ClangBuildAnalyzer" sh "/ClangBuildAnalyzer/build/ClangBuildAnalyzer --all . clang_build.log" - sh "/ClangBuildAnalyzer/build/ClangBuildAnalyzer --analyze clang_build.log > clang_build_analysis.log" - archiveArtifacts "clang_build_analysis.log" + sh "/ClangBuildAnalyzer/build/ClangBuildAnalyzer --analyze clang_build.log > clang_build_analysis_${check_arch_name()}.log" + archiveArtifacts "clang_build_analysis_${check_arch_name()}.log" } - sh "python3 ../script/ninja_json_converter.py .ninja_log --legacy-format --output ck_build_trace.json" - archiveArtifacts "ck_build_trace.json" + // do not run unit tests when building instances only if(!params.BUILD_INSTANCES_ONLY){ @@ -652,9 +652,8 @@ def cmake_build(Map conf=[:]){ if(params.BUILD_PACKAGES){ echo "Build ckProfiler packages" sh 'ninja -j64 package' - def arch_name = check_arch_name() - sh "mv composablekernel-ckprofiler_*.deb composablekernel-ckprofiler_1.2.0_amd64_${arch_name}.deb" - stash includes: "composablekernel-ckprofiler**.deb", name: "profiler_package_${arch_name}" + sh "mv composablekernel-ckprofiler_*.deb composablekernel-ckprofiler_1.2.0_amd64_${check_arch_name()}.deb" + stash includes: "composablekernel-ckprofiler**.deb", name: "profiler_package_${check_arch_name()}" } } if(params.BUILD_INSTANCES_ONLY){ @@ -680,9 +679,8 @@ def cmake_build(Map conf=[:]){ if(params.BUILD_PACKAGES){ echo "Build ckProfiler packages" sh 'ninja -j64 package' - def arch_name = check_arch_name() - sh "mv composablekernel-ckprofiler_*.deb composablekernel-ckprofiler_1.2.0_amd64_${arch_name}.deb" - stash includes: "composablekernel-ckprofiler**.deb", name: "profiler_package_${arch_name}" + sh "mv composablekernel-ckprofiler_*.deb composablekernel-ckprofiler_1.2.0_amd64_${check_arch_name()}.deb" + stash includes: "composablekernel-ckprofiler**.deb", name: "profiler_package_${check_arch_name()}" } } } @@ -1887,7 +1885,11 @@ pipeline { node(rocmnode("nogpu")) { script { // Simulate capture - generateAndArchiveBuildTraceVisualization() + generateAndArchiveBuildTraceVisualization("ck_build_trace_gfx11.json") + generateAndArchiveBuildTraceVisualization("ck_build_trace_gfx12.json") + generateAndArchiveBuildTraceVisualization("ck_build_trace_gfx90a.json") + generateAndArchiveBuildTraceVisualization("ck_build_trace_gfx942.json") + generateAndArchiveBuildTraceVisualization("ck_build_trace_gfx950.json") } cleanWs() } diff --git a/script/dependency-parser/src/enhanced_ninja_parser.py b/script/dependency-parser/src/enhanced_ninja_parser.py index 2ac8e8537a..ebcd878915 100644 --- a/script/dependency-parser/src/enhanced_ninja_parser.py +++ b/script/dependency-parser/src/enhanced_ninja_parser.py @@ -99,7 +99,7 @@ class EnhancedNinjaDependencyParser: print("No object files found - skipping dependency extraction") return - max_workers = min(16, len(object_files)) # Limit concurrent processes + max_workers = min(128, len(object_files)) # Limit concurrent processes with ThreadPoolExecutor(max_workers=max_workers) as executor: # Submit all object files for processing diff --git a/script/parse_ninja_trace.py b/script/parse_ninja_trace.py new file mode 100755 index 0000000000..1706214f49 --- /dev/null +++ b/script/parse_ninja_trace.py @@ -0,0 +1,43 @@ +# Copyright (c) Advanced Micro Devices, Inc., or its affiliates. +# SPDX-License-Identifier: MIT + +import json +import os +import sys + + +def read_json_file(file_path): + if not os.path.isfile(file_path): + raise FileNotFoundError(f"File not found: {file_path}") + + with open(file_path, "r", encoding="utf-8") as file: + try: + data = json.load(file) + except json.JSONDecodeError as e: + raise json.JSONDecodeError(f"Invalid JSON format: {e}", e.doc, e.pos) + return data + + +if __name__ == "__main__": + if len(sys.argv) != 2: + print("Usage: python parse_json.py ") + sys.exit(1) + + json_file_path = sys.argv[1] + + try: + parsed_data = read_json_file(json_file_path) + print("JSON parsed successfully!") + threshold = 15 # max number of minutes for compilation + for i in range(len(parsed_data)): + if parsed_data[i]["dur"] > threshold * 60000000: + print( + f"build duration of {parsed_data[i]['name']} exceeds {threshold} minutes! actual build time: {parsed_data[i]['dur'] / 60000000:.2f} minutes!" + ) + + except FileNotFoundError as fnf_err: + print(f"Error: {fnf_err}") + except json.JSONDecodeError as json_err: + print(f"Error: {json_err}") + except Exception as e: + print(f"Unexpected error: {e}")