Skip to content

Commit 4b1e676

Browse files
committed
cuda : fix array size + indents
ggml-ci
1 parent 23f5299 commit 4b1e676

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

ggml-cuda/rope.cu

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#include "rope.cuh"
22

33
struct rope_corr_dims {
4-
float v[4]; // TODO: is there any reson for this to be 4 instead of 2?
4+
float v[2];
55
};
66

77
static __device__ float rope_yarn_ramp(const float low, const float high, const int i0) {

ggml.c

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14370,7 +14370,7 @@ static void ggml_compute_forward_rope_f32(
1437014370
const float sin_theta = cache[i0 + 1];
1437114371

1437214372
const float * const src = (float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00);
14373-
float * dst_data = (float *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0);
14373+
float * dst_data = (float *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0);
1437414374

1437514375
const float x0 = src[0];
1437614376
const float x1 = src[1];
@@ -14499,7 +14499,7 @@ static void ggml_compute_forward_rope_f16(
1449914499
const float sin_theta = cache[i0 + 1];
1450014500

1450114501
const ggml_fp16_t * const src = (ggml_fp16_t *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00);
14502-
ggml_fp16_t * dst_data = (ggml_fp16_t *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0);
14502+
ggml_fp16_t * dst_data = (ggml_fp16_t *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0);
1450314503

1450414504
const float x0 = GGML_FP16_TO_FP32(src[0]);
1450514505
const float x1 = GGML_FP16_TO_FP32(src[1]);

0 commit comments

Comments
 (0)