From 1b5528788fe81fed31818a8d8a33eb8d2879d973 Mon Sep 17 00:00:00 2001 From: Laurent Date: Sat, 25 Jan 2025 23:28:36 +0100 Subject: [PATCH] Fix some compilation issues. --- candle-metal-kernels/src/scaled_dot_product_attention.metal | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/candle-metal-kernels/src/scaled_dot_product_attention.metal b/candle-metal-kernels/src/scaled_dot_product_attention.metal index 0453e0d11..ab129d13a 100644 --- a/candle-metal-kernels/src/scaled_dot_product_attention.metal +++ b/candle-metal-kernels/src/scaled_dot_product_attention.metal @@ -1404,7 +1404,7 @@ instantiate_fast_inference_self_attention_kernel(half, half, 16, 16, 256, 2, 2); const constant size_t& v_stride, \ const constant float& scale, \ const constant float& softcapping, \ - const device bool* mask [[function_constant(sdpa_vector_has_mask)]],, \ + const device bool* mask [[function_constant(sdpa_vector_has_mask)]], \ const constant int& mask_seq_stride [[function_constant(sdpa_vector_has_mask)]], \ const constant int& mask_head_stride [[function_constant(sdpa_vector_has_mask)]], \ uint3 tid [[threadgroup_position_in_grid]], \ @@ -1424,7 +1424,7 @@ instantiate_fast_inference_self_attention_kernel(half, half, 16, 16, 256, 2, 2); const constant size_t& v_stride, \ const constant float& scale, \ const constant float& softcapping, \ - const device bool* mask [[function_constant(sdpa_vector_has_mask)]],, \ + const device bool* mask [[function_constant(sdpa_vector_has_mask)]], \ const constant int& mask_seq_stride [[function_constant(sdpa_vector_has_mask)]], \ const constant int& mask_head_stride [[function_constant(sdpa_vector_has_mask)]], \ uint3 tid [[threadgroup_position_in_grid]], \