Skip to content

Commit d707261

Browse files
ggml_graph_dup force_grads
1 parent 1c4f91e commit d707261

File tree

3 files changed

+5
-5
lines changed

3 files changed

+5
-5
lines changed

ggml/include/ggml.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2002,7 +2002,7 @@ extern "C" {
20022002
// graph allocation in a context
20032003
GGML_API struct ggml_cgraph * ggml_new_graph (struct ggml_context * ctx); // size = GGML_DEFAULT_GRAPH_SIZE, grads = false
20042004
GGML_API struct ggml_cgraph * ggml_new_graph_custom(struct ggml_context * ctx, size_t size, bool grads);
2005-
GGML_API struct ggml_cgraph * ggml_graph_dup (struct ggml_context * ctx, struct ggml_cgraph * cgraph);
2005+
GGML_API struct ggml_cgraph * ggml_graph_dup (struct ggml_context * ctx, struct ggml_cgraph * cgraph, bool force_grads);
20062006
GGML_API void ggml_graph_cpy (struct ggml_cgraph * src, struct ggml_cgraph * dst);
20072007
GGML_API void ggml_graph_reset (struct ggml_cgraph * cgraph); // set regular grads + optimizer momenta to 0, set loss grad to 1
20082008
GGML_API void ggml_graph_clear (struct ggml_cgraph * cgraph);

ggml/src/ggml-opt.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -486,7 +486,7 @@ static void ggml_opt_build(ggml_opt_context_t opt_ctx, const enum ggml_opt_build
486486
}
487487

488488
// gb_grad == graph backward gradients, forward pass, then backward pass to calculate gradients.
489-
opt_ctx->gb_grad = ggml_graph_dup(opt_ctx->ctx_compute, opt_ctx->gf);
489+
opt_ctx->gb_grad = ggml_graph_dup(opt_ctx->ctx_compute, opt_ctx->gf, /*force_grads =*/ true);
490490
ggml_build_backward_expand(opt_ctx->ctx_compute, opt_ctx->gb_grad, opt_ctx->grad_accs.data());
491491

492492
if (build_type == GGML_OPT_BUILD_TYPE_GRAD) {
@@ -500,7 +500,7 @@ static void ggml_opt_build(ggml_opt_context_t opt_ctx, const enum ggml_opt_build
500500
GGML_ASSERT(build_type == GGML_OPT_BUILD_TYPE_OPT);
501501

502502
// gb_opt == graph backward optimize, forward pass, then backward pass to calculate gradients, then optimizer step.
503-
opt_ctx->gb_opt = ggml_graph_dup(opt_ctx->ctx_compute, opt_ctx->gb_grad);
503+
opt_ctx->gb_opt = ggml_graph_dup(opt_ctx->ctx_compute, opt_ctx->gb_grad, /*force_grads =*/ true);
504504

505505
opt_ctx->adamw_params = ggml_new_tensor_1d(opt_ctx->ctx_static_cpu, GGML_TYPE_F32, 7);
506506
ggml_set_input(opt_ctx->adamw_params);

ggml/src/ggml.c

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5834,8 +5834,8 @@ void ggml_graph_cpy(struct ggml_cgraph * src, struct ggml_cgraph * dst) {
58345834
}
58355835
}
58365836

5837-
struct ggml_cgraph * ggml_graph_dup(struct ggml_context * ctx, struct ggml_cgraph * cgraph) {
5838-
struct ggml_cgraph * result = ggml_new_graph_custom(ctx, cgraph->size, true); // FIXME
5837+
struct ggml_cgraph * ggml_graph_dup(struct ggml_context * ctx, struct ggml_cgraph * cgraph, bool force_grads) {
5838+
struct ggml_cgraph * result = ggml_new_graph_custom(ctx, cgraph->size, cgraph->grads || force_grads);
58395839
ggml_graph_cpy(cgraph, result);
58405840
return result;
58415841
}

0 commit comments

Comments
 (0)