Skip to content

Create extract_gradient #89

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Jun 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 11 additions & 2 deletions src/ChainRules.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,13 @@
module ChainRulesModule

using ChainRulesCore:
ChainRulesCore, AbstractTangent, NoTangent, ZeroTangent, Tangent, @thunk, canonicalize
ChainRulesCore as CRC,
AbstractTangent,
NoTangent,
ZeroTangent,
Tangent,
@thunk,
canonicalize
using ..OperatorEnumModule: OperatorEnum
using ..NodeModule: AbstractExpressionNode, with_type_parameters, tree_mapreduce
using ..EvaluateModule: eval_tree_array
Expand All @@ -11,6 +17,9 @@ struct NodeTangent{T,N<:AbstractExpressionNode{T},A<:AbstractArray{T}} <: Abstra
tree::N
gradient::A
end
function extract_gradient(gradient::NodeTangent, ::AbstractExpressionNode)
return gradient.gradient
end
function Base.:+(a::NodeTangent, b::NodeTangent)
# @assert a.tree == b.tree
return NodeTangent(a.tree, a.gradient + b.gradient)
Expand All @@ -19,7 +28,7 @@ Base.:*(a::Number, b::NodeTangent) = NodeTangent(b.tree, a * b.gradient)
Base.:*(a::NodeTangent, b::Number) = NodeTangent(a.tree, a.gradient * b)
Base.zero(::Union{Type{NodeTangent},NodeTangent}) = ZeroTangent()

function ChainRulesCore.rrule(
function CRC.rrule(
::typeof(eval_tree_array),
tree::AbstractExpressionNode,
X::AbstractMatrix,
Expand Down
2 changes: 1 addition & 1 deletion src/DynamicExpressions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ import .NodeModule:
OperatorEnum, GenericOperatorEnum, @extend_operators, set_default_variable_names!
@reexport import .EvaluateModule: eval_tree_array, differentiable_eval_tree_array
@reexport import .EvaluateDerivativeModule: eval_diff_tree_array, eval_grad_tree_array
@reexport import .ChainRulesModule: NodeTangent
@reexport import .ChainRulesModule: NodeTangent, extract_gradient
@reexport import .SimplifyModule: combine_operators, simplify_tree!
@reexport import .EvaluationHelpersModule
@reexport import .ExtensionInterfaceModule: node_to_symbolic, symbolic_to_node
Expand Down
14 changes: 14 additions & 0 deletions src/Expression.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ using DispatchDoctor: @unstable
using ..NodeModule: AbstractExpressionNode, Node
using ..OperatorEnumModule: AbstractOperatorEnum, OperatorEnum
using ..UtilsModule: Undefined
using ..ChainRulesModule: NodeTangent

import ..NodeModule: copy_node, set_node!, count_nodes, tree_mapreduce, constructorof
import ..NodeUtilsModule:
Expand All @@ -16,6 +17,7 @@ import ..NodeUtilsModule:
has_constants,
get_constants,
set_constants!
import ..ChainRulesModule: extract_gradient

"""A wrapper for a named tuple to avoid piracy."""
struct Metadata{NT<:NamedTuple}
Expand Down Expand Up @@ -140,6 +142,12 @@ end
function set_constants!(ex::AbstractExpression{T}, constants, refs) where {T}
return error("`set_constants!` function must be implemented for $(typeof(ex)) types.")
end
function extract_gradient(gradient, ex::AbstractExpression)
# Should match `get_constants`
return error(
"`extract_gradient` function must be implemented for $(typeof(ex)) types with $(typeof(gradient)) gradient.",
)
end
function get_contents(ex::AbstractExpression)
return error("`get_contents` function must be implemented for $(typeof(ex)) types.")
end
Expand Down Expand Up @@ -263,6 +271,12 @@ end
function set_constants!(ex::Expression{T}, constants, refs) where {T}
return set_constants!(get_tree(ex), constants, refs)
end
function extract_gradient(
gradient::@NamedTuple{tree::NT, metadata::Nothing}, ex::Expression{T,N}
) where {T,N<:AbstractExpressionNode{T},NT<:NodeTangent{T,N}}
# TODO: This messy gradient type is produced by ChainRules. There is probably a better way to do this.
return extract_gradient(gradient.tree, get_tree(ex))
end

import ..StringsModule: string_tree, print_tree

Expand Down
24 changes: 17 additions & 7 deletions src/Interfaces.jl
Original file line number Diff line number Diff line change
Expand Up @@ -153,12 +153,16 @@ ei_components = (
index_constants = "indexes constants in the expression tree" => _check_index_constants,
has_operators = "checks if the expression has operators" => _check_has_operators,
has_constants = "checks if the expression has constants" => _check_has_constants,
get_constants = "gets constants from the expression tree" => _check_get_constants,
set_constants! = "sets constants in the expression tree" => _check_set_constants!,
get_constants = ("gets constants from the expression tree, returning a tuple of: " *
"(1) a flat vector of the constants, and (2) an reference object that " *
"can be used by `set_constants!` to efficiently set them back") => _check_get_constants,
set_constants! = ("sets constants in the expression tree, given: " *
"(1) a flat vector of constants, (2) the expression, and " *
"(3) the reference object produced by `get_constants`") => _check_set_constants!,
string_tree = "returns a string representation of the expression tree" => _check_string_tree,
default_node_type = "returns the default node type for the expression" => _check_default_node,
constructorof = "gets the constructor function for a type" => _check_constructorof,
tree_mapreduce = "applies a function across the tree" => _check_tree_mapreduce
tree_mapreduce = "applies a function across the tree" => _check_tree_mapreduce,
)
)
ei_description = (
Expand Down Expand Up @@ -332,10 +336,14 @@ ni_components = (
count_constants = "counts the number of constants" => _check_count_constants,
filter_map = "applies a filter and map function to the tree" => _check_filter_map,
has_constants = "checks if the tree has constants" => _check_has_constants,
get_constants = "gets constants from the tree" => _check_get_constants,
set_constants! = "sets constants in the tree" => _check_set_constants!,
get_constants = ("gets constants from the tree, returning a tuple of: " *
"(1) a flat vector of the constants, and (2) a reference object that " *
"can be used by `set_constants!` to efficiently set them back") => _check_get_constants,
set_constants! = ("sets constants in the tree, given: " *
"(1) a flat vector of constants, (2) the tree, and " *
"(3) the reference object produced by `get_constants`") => _check_set_constants!,
index_constants = "indexes constants in the tree" => _check_index_constants,
has_operators = "checks if the tree has operators" => _check_has_operators
has_operators = "checks if the tree has operators" => _check_has_operators,
)
)

Expand Down Expand Up @@ -372,6 +380,8 @@ ni_description = (

#! format: on

# TODO: Create an interface for evaluation
# TODO: Create an interface for evaluation and `extract_gradient`
# extract_gradient = ("given a Zygote-computed gradient with respect to the tree constants, " *
# "extracts a flat vector in the same order as `get_constants`") => _check_extract_gradient,

end
29 changes: 23 additions & 6 deletions src/ParametricExpression.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
module ParametricExpressionModule

using DispatchDoctor: @stable, @unstable
using ChainRulesCore: ChainRulesCore, NoTangent, @thunk
using ChainRulesCore: ChainRulesCore as CRC, NoTangent, @thunk

using ..OperatorEnumModule: AbstractOperatorEnum, OperatorEnum
using ..NodeModule: AbstractExpressionNode, Node, tree_mapreduce
Expand All @@ -20,6 +20,7 @@ import ..StringsModule: string_tree
import ..EvaluateModule: eval_tree_array
import ..EvaluateDerivativeModule: eval_grad_tree_array
import ..EvaluationHelpersModule: _grad_evaluator
import ..ChainRulesModule: extract_gradient
import ..ExpressionModule:
get_contents,
get_metadata,
Expand Down Expand Up @@ -207,7 +208,7 @@ has_constants(ex::ParametricExpression) = _interface_error()
has_operators(ex::ParametricExpression) = has_operators(get_tree(ex))
function get_constants(ex::ParametricExpression{T}) where {T}
constants, constant_refs = get_constants(get_tree(ex))
parameters = ex.metadata.parameters
parameters = get_metadata(ex).parameters
flat_parameters = parameters[:]
num_constants = length(constants)
num_parameters = length(flat_parameters)
Expand All @@ -218,9 +219,27 @@ function set_constants!(ex::ParametricExpression{T}, x, refs) where {T}
# First, set the usual constants
set_constants!(get_tree(ex), @view(x[1:(refs.num_constants)]), refs.constant_refs)
# Then, copy in the parameters
ex.metadata.parameters[:] .= @view(x[(refs.num_constants + 1):end])
get_metadata(ex).parameters[:] .= @view(x[(refs.num_constants + 1):end])
return ex
end
function extract_gradient(
gradient::@NamedTuple{
tree::NT,
metadata::@NamedTuple{
_data::@NamedTuple{
operators::Nothing,
variable_names::Nothing,
parameters::PARAM,
parameter_names::Nothing,
}
}
},
ex::ParametricExpression{T,N},
) where {T,N<:ParametricNode{T},NT<:NodeTangent{T,N},PARAM<:AbstractMatrix{T}}
d_constants = extract_gradient(gradient.tree, get_tree(ex))
d_params = gradient.metadata._data.parameters[:]
return vcat(d_constants, d_params) # Same shape as `get_constants`
end

function Base.convert(::Type{Node}, ex::ParametricExpression{T}) where {T}
num_params = UInt16(size(ex.metadata.parameters, 1))
Expand All @@ -238,9 +257,7 @@ function Base.convert(::Type{Node}, ex::ParametricExpression{T}) where {T}
Node{T},
)
end
function ChainRulesCore.rrule(
::typeof(convert), ::Type{Node}, ex::ParametricExpression{T}
) where {T}
function CRC.rrule(::typeof(convert), ::Type{Node}, ex::ParametricExpression{T}) where {T}
tree = get_contents(ex)
primal = convert(Node, ex)
pullback = let tree = tree
Expand Down
13 changes: 13 additions & 0 deletions test/test_expressions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,19 @@ end
end
end

@testitem "Can also get derivatives of expression itself" begin
using DynamicExpressions
using Zygote: Zygote
using DifferentiationInterface: AutoZygote, gradient

ex = @parse_expression(x1 + 1.5, binary_operators = [+], variable_names = ["x1"])
d_ex = gradient(AutoZygote(), ex) do ex
sum(ex(ones(1, 5)))
end
@test d_ex isa NamedTuple
@test extract_gradient(d_ex, ex) ≈ [5.0]
end

@testitem "Expression simplification" begin
using DynamicExpressions

Expand Down
3 changes: 3 additions & 0 deletions test/test_multi_expression.jl
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,9 @@
@test_throws "`set_constants!` function must be implemented for" set_constants!(
multi_ex, nothing, nothing
)
@test_throws "`extract_gradient` function must be implemented for" extract_gradient(
nothing, multi_ex
)
end

tree_factory(f::F, trees) where {F} = f(; trees...)
Expand Down
4 changes: 4 additions & 0 deletions test/test_parametric_expression.jl
Original file line number Diff line number Diff line change
Expand Up @@ -316,4 +316,8 @@ end
@test grad.tree.gradient ≈ true_grad[3]
# Gradient w.r.t. the parameters:
@test grad.metadata._data.parameters ≈ true_grad[2]

# Gradient extractor
@test extract_gradient(grad, ex) ≈ vcat(true_grad[3], true_grad[2][:])
@test axes(extract_gradient(grad, ex)) == axes(first(get_constants(ex)))
end
Loading