ollama/llama/ggml-cuda/template-instances/fattn-wmma-f16-instance-kqfloat-cpb32.cu
2024-09-03 21:15:13 -04:00

10 lines
325 B
Plaintext

// This file has been autogenerated by generate_cu_files.py, do not edit manually.
#include "../fattn-wmma-f16.cuh"
DECL_FATTN_WMMA_F16_CASE(64, 32, float);
DECL_FATTN_WMMA_F16_CASE(80, 32, float);
DECL_FATTN_WMMA_F16_CASE(96, 32, float);
DECL_FATTN_WMMA_F16_CASE(112, 32, float);
DECL_FATTN_WMMA_F16_CASE(128, 32, float);