Skip to content

Commit 4fa3b23

Browse files
committed
tests : add non-cont tests
ggml-ci
1 parent 3b0df03 commit 4fa3b23

File tree

1 file changed

+37
-27
lines changed

1 file changed

+37
-27
lines changed

tests/test-backend-ops.cpp

Lines changed: 37 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1138,27 +1138,35 @@ struct test_soft_max : public test_case {
11381138
// GGML_OP_ROPE
11391139
struct test_rope : public test_case {
11401140
const ggml_type type;
1141-
const std::array<int64_t, 4> ne;
1141+
const std::array<int64_t, 4> ne_a;
11421142
int n_dims;
11431143
int mode;
11441144
int n_ctx;
11451145
float fs; // freq_scale
11461146
float ef; // ext_factor
11471147
float af; // attn_factor
11481148
bool ff;
1149+
int v; // view (1 : non-contiguous a)
11491150

11501151
std::string vars() override {
1151-
return VARS_TO_STR9(type, ne, n_dims, mode, n_ctx, fs, ef, af, ff);
1152+
return VARS_TO_STR10(type, ne_a, n_dims, mode, n_ctx, fs, ef, af, ff, v);
11521153
}
11531154

11541155
test_rope(ggml_type type = GGML_TYPE_F32,
1155-
std::array<int64_t, 4> ne = {10, 10, 10, 1},
1156-
int n_dims = 10, int mode = 0, int n_ctx = 512, float fs = 1.0f, float ef = 0.0f, float af = 0.0f, bool ff = false)
1157-
: type(type), ne(ne), n_dims(n_dims), mode(mode), n_ctx(n_ctx), fs(fs), ef(ef), af(af), ff(ff) {}
1156+
std::array<int64_t, 4> ne_a = {10, 10, 10, 1},
1157+
int n_dims = 10, int mode = 0, int n_ctx = 512, float fs = 1.0f, float ef = 0.0f, float af = 0.0f, bool ff = false, int v = 0)
1158+
: type(type), ne_a(ne_a), n_dims(n_dims), mode(mode), n_ctx(n_ctx), fs(fs), ef(ef), af(af), ff(ff), v(v) {}
11581159

11591160
ggml_tensor * build_graph(ggml_context * ctx) override {
1160-
ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data());
1161-
ggml_tensor * pos = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, ne[2]);
1161+
ggml_tensor * a;
1162+
if (v & 1) {
1163+
auto ne = ne_a; ne[0] *= 2; ne[1] *= 4; ne[2] *= 3;
1164+
a = ggml_new_tensor(ctx, type, 4, ne.data());
1165+
a = ggml_view_4d(ctx, a, ne_a[0], ne_a[1], ne_a[2], ne_a[3], a->nb[1], a->nb[2], a->nb[3], 0);
1166+
} else {
1167+
a = ggml_new_tensor(ctx, type, 4, ne_a.data());
1168+
}
1169+
ggml_tensor * pos = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, ne_a[2]);
11621170
ggml_tensor * freq = ff ? ggml_new_tensor_1d(ctx, GGML_TYPE_F32, n_dims/2) : nullptr;
11631171
ggml_tensor * out = ggml_rope_ext(ctx, a, pos, freq, n_dims, mode, n_ctx, 0, 10000.0f, fs, ef, af, 1.0f, 1.0f);
11641172
return out;
@@ -1168,11 +1176,11 @@ struct test_rope : public test_case {
11681176
for (ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) {
11691177
if (t->type == GGML_TYPE_I32) {
11701178
// pos
1171-
std::vector<int> data(ne[2]);
1172-
for (int i = 0; i < ne[2]; i++) {
1179+
std::vector<int> data(ne_a[2]);
1180+
for (int i = 0; i < ne_a[2]; i++) {
11731181
data[i] = rand() % n_ctx;
11741182
}
1175-
ggml_backend_tensor_set(t, data.data(), 0, ne[2] * sizeof(int));
1183+
ggml_backend_tensor_set(t, data.data(), 0, ne_a[2] * sizeof(int));
11761184
} else {
11771185
if (t->ne[0] == n_dims/2) {
11781186
// frequency factors in the range [0.9f, 1.1f]
@@ -2201,23 +2209,25 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op
22012209
test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {32, 2, 32, 1}, true, 0.1f, 0.0f));
22022210
test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {32, 2, 32, 1}, true, 0.1f, 8.0f));
22032211

2204-
for (float fs : { 1.0f, 1.4245f }) {
2205-
for (float ef : { 0.0f, 0.7465f }) {
2206-
for (float af : { 1.0f, 1.4245f }) {
2207-
for (ggml_type type : {GGML_TYPE_F32, GGML_TYPE_F16}) {
2208-
// TODO: ff not supported yet for !neox
2209-
test_cases.emplace_back(new test_rope(type, {128, 32, 10, 1}, 128, 0, 512, fs, ef, af, false)); // llama 7B
2210-
test_cases.emplace_back(new test_rope(type, {128, 40, 10, 1}, 128, 0, 512, fs, ef, af, false)); // llama 13B
2211-
test_cases.emplace_back(new test_rope(type, {128, 52, 10, 1}, 128, 0, 512, fs, ef, af, false)); // llama 30B
2212-
test_cases.emplace_back(new test_rope(type, {128, 64, 10, 1}, 128, 0, 512, fs, ef, af, false)); // llama 65B
2213-
2214-
for (bool ff : {false, true}) { // freq_factors
2215-
test_cases.emplace_back(new test_rope(type, { 64, 1, 10, 1}, 64, 2, 512, fs, ef, af, ff)); // neox (falcon 7B)
2216-
test_cases.emplace_back(new test_rope(type, { 64, 71, 10, 1}, 64, 2, 512, fs, ef, af, ff)); // neox (falcon 7B)
2217-
test_cases.emplace_back(new test_rope(type, { 64, 8, 10, 1}, 64, 2, 512, fs, ef, af, ff)); // neox (falcon 40B)
2218-
test_cases.emplace_back(new test_rope(type, { 64, 128, 10, 1}, 64, 2, 512, fs, ef, af, ff)); // neox (falcon 40B)
2219-
test_cases.emplace_back(new test_rope(type, { 80, 32, 10, 1}, 20, 2, 512, fs, ef, af, ff)); // neox (stablelm)
2220-
test_cases.emplace_back(new test_rope(type, { 80, 32, 10, 1}, 32, 2, 512, fs, ef, af, ff)); // neox (phi-2)
2212+
for (float v : { 0, 1 }) {
2213+
for (float fs : { 1.0f, 1.4245f }) {
2214+
for (float ef : { 0.0f, 0.7465f }) {
2215+
for (float af : { 1.0f, 1.4245f }) {
2216+
for (ggml_type type : {GGML_TYPE_F32, GGML_TYPE_F16}) {
2217+
// TODO: ff not supported yet for !neox
2218+
test_cases.emplace_back(new test_rope(type, {128, 32, 10, 1}, 128, 0, 512, fs, ef, af, false, v)); // llama 7B
2219+
test_cases.emplace_back(new test_rope(type, {128, 40, 10, 1}, 128, 0, 512, fs, ef, af, false, v)); // llama 13B
2220+
test_cases.emplace_back(new test_rope(type, {128, 52, 10, 1}, 128, 0, 512, fs, ef, af, false, v)); // llama 30B
2221+
test_cases.emplace_back(new test_rope(type, {128, 64, 10, 1}, 128, 0, 512, fs, ef, af, false, v)); // llama 65B
2222+
2223+
for (bool ff : {false, true}) { // freq_factors
2224+
test_cases.emplace_back(new test_rope(type, { 64, 1, 10, 1}, 64, 2, 512, fs, ef, af, ff, v)); // neox (falcon 7B)
2225+
test_cases.emplace_back(new test_rope(type, { 64, 71, 10, 1}, 64, 2, 512, fs, ef, af, ff, v)); // neox (falcon 7B)
2226+
test_cases.emplace_back(new test_rope(type, { 64, 8, 10, 1}, 64, 2, 512, fs, ef, af, ff, v)); // neox (falcon 40B)
2227+
test_cases.emplace_back(new test_rope(type, { 64, 128, 10, 1}, 64, 2, 512, fs, ef, af, ff, v)); // neox (falcon 40B)
2228+
test_cases.emplace_back(new test_rope(type, { 80, 32, 10, 1}, 20, 2, 512, fs, ef, af, ff, v)); // neox (stablelm)
2229+
test_cases.emplace_back(new test_rope(type, { 80, 32, 10, 1}, 32, 2, 512, fs, ef, af, ff, v)); // neox (phi-2)
2230+
}
22212231
}
22222232
}
22232233
}

0 commit comments

Comments
 (0)