Test transparent huge pages on Linux (#278)

* Adding ability to use THP on Linux

* Use the actual page size4 used for mmap also in munmap

* Add -thp to llama-bench

---------

Co-authored-by: Iwan Kawrakow <iwan.kawrakow@gmail.com>
This commit is contained in:
Kawrakow
2025-03-23 07:24:43 +01:00
committed by GitHub
parent 37c48feb3e
commit 79a105d8ab
5 changed files with 99 additions and 13 deletions

View File

@@ -993,6 +993,10 @@ bool gpt_params_find_arg(int argc, char ** argv, const std::string & arg, gpt_pa
params.use_mmap = false;
return true;
}
if (arg == "-thp" || arg == "--transparent-huge-pages") {
params.use_thp = true;
return true;
}
if (arg == "--numa") {
CHECK_ARG
std::string value(argv[i]);
@@ -2316,6 +2320,7 @@ struct llama_model_params llama_model_params_from_gpt_params(const gpt_params &
mparams.use_mlock = params.use_mlock;
mparams.check_tensors = params.check_tensors;
mparams.repack_tensors = params.repack_tensors;
mparams.use_thp = params.use_thp;
if (params.kv_overrides.empty()) {
mparams.kv_overrides = NULL;
} else {
@@ -3371,6 +3376,7 @@ void yaml_dump_non_result_info(FILE * stream, const gpt_params & params, const l
fprintf(stream, "n_probs: %d # only used by server binary, default: 0\n", sparams.n_probs);
fprintf(stream, "no_mmap: %s # default: false\n", !params.use_mmap ? "true" : "false");
fprintf(stream, "repack: %s # default: false\n", params.repack_tensors ? "true" : "false");
fprintf(stream, "use_thp: %s # default: false\n", params.use_thp ? "true" : "false");
fprintf(stream, "penalize_nl: %s # default: false\n", sparams.penalize_nl ? "true" : "false");
fprintf(stream, "ppl_output_type: %d # default: 0\n", params.ppl_output_type);
fprintf(stream, "ppl_stride: %d # default: 0\n", params.ppl_stride);