Skip to content

Add general TensorProduct kernel #81

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Apr 16, 2020
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/KernelFunctions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ export MahalanobisKernel, GaborKernel, PiecewisePolynomialKernel
export PeriodicKernel
export KernelSum, KernelProduct
export TransformedKernel, ScaledKernel
export TensorProduct

export Transform, SelectTransform, ChainTransform, ScaleTransform, LowRankTransform, IdentityTransform, FunctionTransform

Expand Down Expand Up @@ -56,6 +57,7 @@ include("kernels/scaledkernel.jl")
include("matrix/kernelmatrix.jl")
include("kernels/kernelsum.jl")
include("kernels/kernelproduct.jl")
include("kernels/tensorproduct.jl")
include("approximations/nystrom.jl")
include("generic.jl")

Expand Down
175 changes: 175 additions & 0 deletions src/kernels/tensorproduct.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
"""
TensorProduct(kernels...)

Create a tensor product of kernels.
"""
struct TensorProduct{K} <: Kernel
kernels::K
end

function TensorProduct(kernel::Kernel, kernels::Kernel...)
return TensorProduct((kernel, kernels...))
end

Base.length(kernel::TensorProduct) = length(kernel.kernels)

(kernel::TensorProduct)(x, y) = kappa(kernel, x, y)
function kappa(kernel::TensorProduct, x, y)
return prod(kappa(k, xi, yi) for (k, xi, yi) in zip(kernel.kernels, x, y))
end

# TODO: General implementation of `kernelmatrix` and `kerneldiagmatrix`
# Default implementation assumes 1D observations

function kernelmatrix!(
K::AbstractMatrix,
kernel::TensorProduct,
X::AbstractMatrix;
obsdim::Int = defaultobs
)
obsdim ∈ (1, 2) || "obsdim should be 1 or 2 (see docs of kernelmatrix))"

featuredim = feature_dim(obsdim)
if !check_dims(K, X, X, featuredim, obsdim)
throw(DimensionMismatch("Dimensions of the target array K $(size(K)) are not consistent with X $(size(X))"))
end

size(X, featuredim) == length(kernel) ||
error("number of kernels and groups of features are not consistent")

kernelmatrix!(K, kernel.kernels[1], selectdim(X, featuredim, 1))
for (k, Xi) in Iterators.drop(zip(kernel.kernels, eachslice(X; dims = featuredim)), 1)
K .*= kernelmatrix(k, Xi)
end

return K
end

function kernelmatrix!(
K::AbstractMatrix,
kernel::TensorProduct,
X::AbstractMatrix,
Y::AbstractMatrix;
obsdim::Int = defaultobs
)
obsdim ∈ (1, 2) || error("obsdim should be 1 or 2 (see docs of kernelmatrix))")

featuredim = feature_dim(obsdim)
if !check_dims(K, X, Y, featuredim, obsdim)
throw(DimensionMismatch("Dimensions $(size(K)) of the target array K are not consistent with X ($(size(X))) and Y ($(size(Y)))"))
end

size(X, featuredim) == length(kernel) ||
error("number of kernels and groups of features are not consistent")

kernelmatrix!(K, kernel.kernels[1], selectdim(X, featuredim, 1),
selectdim(Y, featuredim, 1))
for (k, Xi, Yi) in Iterators.drop(zip(kernel.kernels,
eachslice(X; dims = featuredim),
eachslice(Y; dims = featuredim)), 1)
K .*= kernelmatrix(k, Xi, Yi)
end

return K
end

# mapreduce with multiple iterators requires Julia 1.2 or later.

function kernelmatrix(
kernel::TensorProduct,
X::AbstractMatrix;
obsdim::Int = defaultobs
)
obsdim ∈ (1, 2) || error("obsdim should be 1 or 2 (see docs of kernelmatrix))")

featuredim = feature_dim(obsdim)
if !check_dims(X, X, featuredim, obsdim)
throw(DimensionMismatch("Dimensions of the target array K $(size(K)) are not consistent with X $(size(X))"))
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not within 92 char lim. Please wrap string over multiple lines

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, I just copied this from kernelmatrix.jl (which is not great either) and missed that it doesn't follow the style guide.

end

size(X, featuredim) == length(kernel) ||
error("number of kernels and groups of features are not consistent")

return mapreduce((x, y) -> x .* y,
zip(kernel.kernels, eachslice(X; dims = featuredim))) do (k, Xi)
kernelmatrix(k, Xi)
end
end

function kernelmatrix(
kernel::TensorProduct,
X::AbstractMatrix,
Y::AbstractMatrix;
obsdim::Int = defaultobs
)
@assert obsdim ∈ (1, 2) || error("obsdim should be 1 or 2 (see docs of kernelmatrix))")

featuredim = feature_dim(obsdim)
if !check_dims(X, Y, featuredim, obsdim)
throw(DimensionMismatch("Dimensions $(size(K)) of the target array K are not consistent with X ($(size(X))) and Y ($(size(Y)))"))
end

size(X, featuredim) == length(kernel) ||
error("number of kernels and groups of features are not consistent")

return mapreduce((x, y) -> x .* y,
zip(kernel.kernels,
eachslice(X; dims = featuredim),
eachslice(Y; dims = featuredim))) do (k, Xi, Yi)
kernelmatrix(k, Xi, Yi)
end
end

function kerneldiagmatrix!(
K::AbstractVector,
kernel::TensorProduct,
X::AbstractMatrix;
obsdim::Int = defaultobs
)
obsdim ∈ (1, 2) || error("obsdim should be 1 or 2 (see docs of kernelmatrix))")
if length(K) != size(X, obsdim)
throw(DimensionMismatch("Dimensions of the target array K $(size(K)) are not consistent with X $(size(X))"))
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

line length

end

featuredim = feature_dim(obsdim)
size(X, featuredim) == length(kernel) ||
error("number of kernels and groups of features are not consistent")

kerneldiagmatrix!(K, kernel.kernels[1], selectdim(X, featuredim, 1))
for (k, Xi) in Iterators.drop(zip(kernel.kernels, eachslice(X; dims = featuredim)), 1)
K .*= kerneldiagmatrix(k, Xi)
end

return K
end

function kerneldiagmatrix(
kernel::TensorProduct,
X::AbstractMatrix;
obsdim::Int = defaultobs
)
obsdim ∈ (1,2) || error("obsdim should be 1 or 2 (see docs of kernelmatrix))")

featuredim = feature_dim(obsdim)
size(X, featuredim) == length(kernel) ||
error("number of kernels and groups of features are not consistent")

return mapreduce((x, y) -> x .* y,
zip(kernel.kernels, eachslice(X; dims = featuredim))) do (k, Xi)
kerneldiagmatrix(k, Xi)
end
end

Base.show(io::IO, kernel::TensorProduct) = printshifted(io, kernel, 0)

function printshifted(io::IO, kernel::TensorProduct, shift::Int)
print(io, "Tensor product of ", length(kernel), " kernels:")
for k in kernel.kernels
print(io, "\n")
for _ in 1:(shift + 1)
print(io, "\t")
end
print(io, "- ")
printshifted(io, k, shift + 2)
end
end
56 changes: 50 additions & 6 deletions src/matrix/kernelmatrix.jl
Original file line number Diff line number Diff line change
@@ -1,11 +1,19 @@
"""
kernelmatrix!(K::Matrix, κ::Kernel, X::Matrix; obsdim::Integer = 2)
kernelmatrix!(K::Matrix, κ::Kernel, X::Matrix, Y::Matrix; obsdim::Integer = 2)
kernelmatrix!(K::AbstractMatrix, κ::Kernel, X; obsdim::Integer = 2)
kernelmatrix!(K::AbstractMatrix, κ::Kernel, X, Y; obsdim::Integer = 2)

In-place version of [`kernelmatrix`](@ref) where pre-allocated matrix `K` will be overwritten with the kernel matrix.
"""
kernelmatrix!

function kernelmatrix!(
K::AbstractMatrix,
kernel::Kernel,
X::AbstractVector;
obsdim::Int = defaultobs
)
return kernelmatrix!(K, kernel, reshape(X, 1, :); obsdim = 2)
end

function kernelmatrix!(
K::AbstractMatrix,
Expand All @@ -23,6 +31,16 @@ end
kernelmatrix!(K::AbstractMatrix, κ::TransformedKernel, X::AbstractMatrix; obsdim::Int = defaultobs) =
kernelmatrix!(K, kernel(κ), apply(κ.transform, X, obsdim = obsdim), obsdim = obsdim)

function kernelmatrix!(
K::AbstractMatrix,
kernel::Kernel,
X::AbstractVector,
Y::AbstractVector;
obsdim::Int = defaultobs
)
return kernelmatrix!(K, kernel, reshape(X, 1, :), reshape(Y, 1, :); obsdim = 2)
end

function kernelmatrix!(
K::AbstractMatrix,
κ::Kernel,
Expand Down Expand Up @@ -60,8 +78,8 @@ _kernel(κ::TransformedKernel, x::AbstractVector, y::AbstractVector; obsdim::Int
_kernel(kernel(κ), apply(κ.transform, x), apply(κ.transform, y), obsdim = obsdim)

"""
kernelmatrix(κ::Kernel, X::Matrix; obsdim::Int = 2)
kernelmatrix(κ::Kernel, X::Matrix, Y::Matrix; obsdim::Int = 2)
kernelmatrix(κ::Kernel, X; obsdim::Int = 2)
kernelmatrix(κ::Kernel, X, Y; obsdim::Int = 2)

Calculate the kernel matrix of `X` (and `Y`) with respect to kernel `κ`.
`obsdim = 1` means the matrix `X` (and `Y`) has size #samples x #dimension
Expand All @@ -88,6 +106,15 @@ end
kernelmatrix(κ::TransformedKernel, X::AbstractMatrix; obsdim::Int = defaultobs) =
kernelmatrix(kernel(κ), apply(κ.transform, X, obsdim = obsdim), obsdim = obsdim)

function kernelmatrix(
kernel::Kernel,
X::AbstractVector{<:Real},
Y::AbstractVector{<:Real};
obsdim::Int = defaultobs
)
return kernelmatrix(kernel, reshape(X, 1, :), reshape(Y, 1, :); obsdim = 2)
end

function kernelmatrix(
κ::Kernel,
X::AbstractMatrix,
Expand All @@ -107,12 +134,20 @@ kernelmatrix(κ::TransformedKernel, X::AbstractMatrix, Y::AbstractMatrix; obsdim
kernelmatrix(kernel(κ), apply(κ.transform, X, obsdim = obsdim), apply(κ.transform, Y, obsdim = obsdim), obsdim = obsdim)

"""
kerneldiagmatrix(κ::Kernel, X::Matrix; obsdim::Int = 2)
kerneldiagmatrix(κ::Kernel, X; obsdim::Int = 2)

Calculate the diagonal matrix of `X` with respect to kernel `κ`
`obsdim = 1` means the matrix `X` has size #samples x #dimension
`obsdim = 2` means the matrix `X` has size #dimension x #samples
"""
function kerneldiagmatrix(
kernel::Kernel,
X::AbstractVector;
obsdim::Int = defaultobs
)
return kerneldiagmatrix(kernel, reshape(X, 1, :); obsdim = 2)
end

function kerneldiagmatrix(
κ::Kernel,
X::AbstractMatrix;
Expand All @@ -127,10 +162,19 @@ function kerneldiagmatrix(
end

"""
kerneldiagmatrix!(K::AbstractVector,κ::Kernel, X::Matrix; obsdim::Int = 2)
kerneldiagmatrix!(K::AbstractVector, κ::Kernel, X; obsdim::Int = 2)

In place version of [`kerneldiagmatrix`](@ref)
"""
function kerneldiagmatrix!(
K::AbstractVector,
kernel::Kernel,
X::AbstractVector;
obsdim::Int = defaultobs
)
return kerneldiagmatrix!(K, kernel, reshape(X, 1, :); obsdim = 2)
end

function kerneldiagmatrix!(
K::AbstractVector,
κ::Kernel,
Expand Down
76 changes: 76 additions & 0 deletions test/kernels/tensorproduct.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
@testset "tensorproduct" begin
rng = MersenneTwister(123456)
u1 = rand(rng, 10)
u2 = rand(rng, 10)
v1 = rand(rng, 5)
v2 = rand(rng, 5)

# kernels
k1 = SqExponentialKernel()
k2 = ExponentialKernel()
kernel1 = TensorProduct(k1, k2)
kernel2 = TensorProduct([k1, k2])

@test kernel1.kernels === (k1, k2) === TensorProduct((k1, k2)).kernels

@testset "kappa" begin
for (x, y) in (((v1, u1), (v2, u2)), ([v1, u1], [v2, u2]))
val = k1(x[1], y[1]) * k2(x[2], y[2])

@test kernel1(x, y) == kernel2(x, y) == val
@test KernelFunctions.kappa(kernel1, x, y) ==
KernelFunctions.kappa(kernel2, x, y) == val
end
end

@testset "kernelmatrix" begin
X = rand(2, 10)
Y = rand(2, 10)
trueX = kernelmatrix(k1, X[1, :]) .* kernelmatrix(k2, X[2, :])
trueXY = kernelmatrix(k1, X[1, :], Y[1, :]) .* kernelmatrix(k2, X[2, :], Y[2, :])
tmp = Matrix{Float64}(undef, 10, 10)

for kernel in (kernel1, kernel2)
@test kernelmatrix(kernel, X) == trueX
@test kernelmatrix(kernel, X'; obsdim = 1) == trueX

@test kernelmatrix(kernel, X, Y) == trueXY
@test kernelmatrix(kernel, X', Y'; obsdim = 1) == trueXY

fill!(tmp, 0)
kernelmatrix!(tmp, kernel, X)
@test tmp == trueX

fill!(tmp, 0)
kernelmatrix!(tmp, kernel, X'; obsdim = 1)
@test tmp == trueX

fill!(tmp, 0)
kernelmatrix!(tmp, kernel, X, Y)
@test tmp == trueXY

fill!(tmp, 0)
kernelmatrix!(tmp, kernel, X', Y'; obsdim = 1)
@test tmp == trueXY
end
end

@testset "kerneldiagmatrix" begin
X = rand(2, 10)
trueval = ones(10)
tmp = Vector{Float64}(undef, 10)

for kernel in (kernel1, kernel2)
@test kerneldiagmatrix(kernel, X) == trueval
@test kerneldiagmatrix(kernel, X'; obsdim = 1) == trueval

fill!(tmp, 0)
kerneldiagmatrix!(tmp, kernel, X)
@test tmp == trueval

fill!(tmp, 0)
kerneldiagmatrix!(tmp, kernel, X'; obsdim = 1)
@test tmp == trueval
end
end
end
1 change: 1 addition & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ using KernelFunctions: metric
include(joinpath("kernels", "piecewisepolynomial.jl"))
include(joinpath("kernels", "rationalquad.jl"))
include(joinpath("kernels", "scaledkernel.jl"))
include(joinpath("kernels", "tensorproduct.jl"))
include(joinpath("kernels", "transformedkernel.jl"))

# Legacy tests that don't correspond to anything meaningful in src. Unclear how
Expand Down