Grouped GEMM for fp16 (#126)

* init of grouped_gemm

* 2 gemm test

* perf test

* clean

* wrap desc into a struct

* test cast static_arr to pointer

* add ptr to GemmDesc

* add grouped gemm profiler

* fixed mem issue with unique_ptr

* clean

* clean

* finished ckprofiler

* Update README.md

* readme

* fixed readme

* add example

* improve code

* fixed comments: reserve, seperate ptr and gemm_shapes

* merge group and non-group

* fixed comments: replace push_back with emplace_back to avoid copy constructor

* fixed comments: unified blk2ctile; add test

* ci fix

* fixed ci

* fixed ci

* fixed ci
This commit is contained in:
zjing14
2022-03-22 18:18:18 -05:00
committed by GitHub
parent 9a8ee8a39a
commit 716f1c7fb1
20 changed files with 1917 additions and 0 deletions

View File

@@ -15,9 +15,11 @@ int profile_conv_fwd_bias_relu_add(int, char*[]);
int profile_conv_fwd_bias_relu_atomic_add(int, char*[]);
int profile_conv_bwd_data(int, char*[]);
int profile_reduce(int, char*[]);
int profile_grouped_gemm(int, char*[]);
int main(int argc, char* argv[])
{
#if 0
if(strcmp(argv[1], "gemm") == 0)
{
return profile_gemm(argc, argv);
@@ -62,6 +64,10 @@ int main(int argc, char* argv[])
{
return profile_reduce(argc, argv);
}
else if(strcmp(argv[1], "grouped_gemm") == 0)
{
return profile_grouped_gemm(argc, argv);
}
else
{
// clang-format off
@@ -74,9 +80,13 @@ int main(int argc, char* argv[])
" conv_fwd_bias_relu_add: ForwardConvolution+Bias+ReLU+Add\n"
" conv_fwd_bias_relu_atomic_add: ForwardConvolution+Bias+ReLU+AtomicAdd\n"
" conv_bwd: BackwardConvolution\n"
" grouped_gemm: Grouped Gemm\n"
" reduce: REDUCE\n");
// clang-format on
return 0;
}
#else
profile_grouped_gemm(argc, argv);
#endif
}