Skip to content

Commit deef7e2

Browse files
authored
Merge branch 'master' into gabor
2 parents bd356a9 + e90bb59 commit deef7e2

22 files changed

+274
-96
lines changed

.travis.yml

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
## Documentation: http://docs.travis-ci.com/user/languages/julia/
22
language: julia
3+
branches:
4+
only:
5+
- master
36
os:
47
- linux
58
- osx
@@ -8,22 +11,21 @@ julia:
811
- 1.3
912
- 1.4
1013
- nightly
11-
# because of Zygote needs to allow failing on nightly
12-
matrix:
13-
allow_failures:
14-
- julia: nightly
1514
notifications:
1615
email: false
1716
after_success:
18-
# push coverage results to Coveralls
19-
- julia -e 'using Pkg; Pkg.add("Coverage"); using Coverage; Coveralls.submit(process_folder())'
17+
- if [[ $TRAVIS_JULIA_VERSION = 1.4 ]] && [[ $TRAVIS_OS_NAME = linux ]]; then
18+
julia -e 'using Pkg; Pkg.add("Coverage"); using Coverage; Coveralls.submit(process_folder())';
19+
fi
2020
jobs:
2121
include:
2222
- stage: "Documentation"
23-
julia: 1.0
23+
julia: 1.4
2424
os: linux
2525
script:
2626
- export DOCUMENTER_DEBUG=true
2727
- julia --project=docs/ -e 'using Pkg; Pkg.develop(PackageSpec(path=pwd())); Pkg.instantiate()'
2828
- julia --project=docs/ docs/make.jl
2929
after_succes: skip
30+
allow_failures:
31+
- julia: nightly

Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ Compat = "2.2, 3"
1818
Distances = "0.8"
1919
Requires = "1.0.1"
2020
SpecialFunctions = "0.8, 0.9, 0.10"
21-
StatsBase = "0.32"
21+
StatsBase = "0.32, 0.33"
2222
StatsFuns = "0.8, 0.9"
2323
ZygoteRules = "0.2"
2424
julia = "1.0"

README.md

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,10 @@
1-
[![Build Status](https://travis-ci.org/theogf/KernelFunctions.jl.svg?branch=master)](https://travis-ci.org/theogf/KernelFunctions.jl)
2-
[![Coverage Status](https://coveralls.io/repos/github/theogf/KernelFunctions.jl/badge.svg?branch=master)](https://coveralls.io/github/theogf/KernelFunctions.jl?branch=master)
3-
[![Documentation](https://img.shields.io/badge/docs-dev-blue.svg)](https://theogf.github.io/KernelFunctions.jl/dev/)
41
# KernelFunctions.jl
2+
3+
[![Build Status](https://travis-ci.com/JuliaGaussianProcesses/KernelFunctions.jl.svg?branch=master)](https://travis-ci.com/JuliaGaussianProcesses/KernelFunctions.jl)
4+
[![Coverage Status](https://coveralls.io/repos/github/JuliaGaussianProcesses/KernelFunctions.jl/badge.svg?branch=master)](https://coveralls.io/github/JuliaGaussianProcesses/KernelFunctions.jl?branch=master)
5+
[![Documentation (stable)](https://img.shields.io/badge/docs-stable-blue.svg)](https://juliagaussianprocesses.github.io/KernelFunctions.jl/stable)
6+
[![Documentation (latest)](https://img.shields.io/badge/docs-dev-blue.svg)](https://juliagaussianprocesses.github.io/KernelFunctions.jl/dev)
7+
58
## Kernel functions for machine learning
69

710
KernelFunctions.jl provide a flexible and complete framework for kernel functions, pretransforming the input data.
@@ -43,4 +46,4 @@ Directly inspired by the [MLKernels](https://github.com/trthatcher/MLKernels.jl)
4346

4447
## Issues/Contributing
4548

46-
If you notice a problem or would like to contribute by adding more kernel functions or features please [submit an issue](https://github.com/theogf/KernelFunctions.jl/issues).
49+
If you notice a problem or would like to contribute by adding more kernel functions or features please [submit an issue](https://github.com/JuliaGaussianProcesses/KernelFunctions.jl/issues).

docs/make.jl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ makedocs(
1212
"Transform"=>"transform.md",
1313
"Metrics"=>"metrics.md",
1414
"Theory"=>"theory.md",
15+
"Custom Kernels"=>"create_kernel.md",
1516
"API"=>"api.md"]
1617
)
1718

@@ -20,6 +21,6 @@ makedocs(
2021
# for more information.
2122
deploydocs(
2223
deps = Deps.pip("mkdocs", "python-markdown-math"),
23-
repo = "github.com/theogf/KernelFunctions.jl.git",
24+
repo = "github.com/JuliaGaussianProcesses/KernelFunctions.jl.git",
2425
target = "build"
2526
)

docs/src/create_kernel.md

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
## Creating your own kernel
2+
3+
KernelFunctions.jl contains the most popular kernels already but you might want to make your own!
4+
5+
Here is for example how one can define the Squared Exponential Kernel again :
6+
7+
```julia
8+
struct MyKernel <: Kernel end
9+
10+
KernelFunctions.kappa(::MyKernel, d2::Real) = exp(-d2)
11+
KernelFunctions.metric(::MyKernel) = SqEuclidean()
12+
```
13+
14+
For a "Base" kernel, where the kernel function is simply a function applied on some metric between two vectors of real, you only need to:
15+
- Define your struct inheriting from `Kernel`.
16+
- Define a `kappa` function.
17+
- Define the metric used `SqEuclidean`, `DotProduct` etc. Note that the term "metric" is here overabused.
18+
- Optional : Define any parameter of your kernel as `trainable` by Flux.jl if you want to perform optimization on the parameters. We recommend wrapping all parameters in arrays to allow them to be mutable.
19+
20+
Once these functions are defined, you can use all the wrapping functions of KernelFuntions.jl

src/KernelFunctions.jl

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
"""
2-
KernelFunctions. [Github](https://github.com/theogf/KernelFunctions.jl) [Documentation](https://theogf.github.io/KernelFunctions.jl/dev/)
2+
KernelFunctions. [Github](https://github.com/JuliaGaussianProcesses/KernelFunctions.jl)
3+
[Documentation](https://juliagaussianprocesses.github.io/KernelFunctions.jl/stable/)
34
"""
45
module KernelFunctions
56

67
export kernelmatrix, kernelmatrix!, kerneldiagmatrix, kerneldiagmatrix!, kappa
78
export transform
8-
export params, duplicate, set! # Helpers
9+
export duplicate, set! # Helpers
910

1011
export Kernel
1112
export ConstantKernel, WhiteKernel, EyeKernel, ZeroKernel
@@ -45,7 +46,7 @@ include("distances/dotproduct.jl")
4546
include("distances/delta.jl")
4647
include("transform/transform.jl")
4748

48-
for k in ["exponential","matern","polynomial","constant","rationalquad","exponentiated","cosine","maha","gabor"]
49+
for k in ["exponential","matern","polynomial","constant","rationalquad","exponentiated","cosine","maha","fbm","gabor"]
4950
include(joinpath("kernels",k*".jl"))
5051
end
5152
include("kernels/transformedkernel.jl")

src/generic.jl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ Base.show(io::IO,κ::Kernel) = print(io,nameof(typeof(κ)))
1616

1717
### Syntactic sugar for creating matrices and using kernel functions
1818
for k in subtypes(BaseKernel)
19+
if k [FBMKernel] continue end #for kernels without `metric` or `kappa`
1920
@eval begin
2021
@inline::$k)(d::Real) = kappa(κ,d) #TODO Add test
2122
@inline::$k)(x::AbstractVector{<:Real}, y::AbstractVector{<:Real}) = kappa(κ, x, y)

src/kernels/constant.jl

Lines changed: 20 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
"""
2-
ZeroKernel()
2+
ZeroKernel()
33
44
Create a kernel that always returning zero
55
```
@@ -13,28 +13,40 @@ kappa(κ::ZeroKernel, d::T) where {T<:Real} = zero(T)
1313

1414
metric(::ZeroKernel) = Delta()
1515

16+
Base.show(io::IO, ::ZeroKernel) = print(io, "Zero Kernel")
17+
18+
1619
"""
17-
`WhiteKernel()`
20+
WhiteKernel()
1821
1922
```
2023
κ(x,y) = δ(x,y)
2124
```
22-
Kernel function working as an equivalent to add white noise.
25+
Kernel function working as an equivalent to add white noise. Can also be called via `EyeKernel()`
2326
"""
2427
struct WhiteKernel <: BaseKernel end
2528

29+
"""
30+
EyeKernel()
31+
32+
See [WhiteKernel](@ref)
33+
"""
2634
const EyeKernel = WhiteKernel
2735

28-
kappa::WhiteKernel,δₓₓ::Real) = δₓₓ
36+
kappa::WhiteKernel, δₓₓ::Real) = δₓₓ
2937

3038
metric(::WhiteKernel) = Delta()
3139

40+
Base.show(io::IO, ::WhiteKernel) = print(io, "White Kernel")
41+
42+
3243
"""
33-
`ConstantKernel(c=1.0)`
44+
ConstantKernel(; c=1.0)
45+
46+
Kernel function always returning a constant value `c`
3447
```
3548
κ(x,y) = c
3649
```
37-
Kernel function always returning a constant value `c`
3850
"""
3951
struct ConstantKernel{Tc<:Real} <: BaseKernel
4052
c::Vector{Tc}
@@ -46,3 +58,5 @@ end
4658
kappa::ConstantKernel,x::Real) = first.c)*one(x)
4759

4860
metric(::ConstantKernel) = Delta()
61+
62+
Base.show(io::IO, κ::ConstantKernel) = print(io, "Constant Kernel (c = $(first.c)))")

src/kernels/cosine.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
"""
2-
CosineKernel
2+
CosineKernel()
33
44
The cosine kernel is a stationary kernel for a sinusoidal given by
55
```
66
κ(x,y) = cos( π * (x-y) )
77
```
8-
98
"""
109
struct CosineKernel <: BaseKernel end
1110

1211
kappa::CosineKernel, d::Real) = cospi(d)
13-
1412
metric(::CosineKernel) = Euclidean()
13+
14+
Base.show(io::IO, ::CosineKernel) = print(io, "Cosine Kernel")

src/kernels/exponential.jl

Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,18 @@
11
"""
2-
`SqExponentialKernel()`
2+
SqExponentialKernel()
33
4-
The squared exponential kernel is an isotropic Mercer kernel given by the formula:
4+
The squared exponential kernel is a Mercer kernel given by the formula:
55
```
66
κ(x,y) = exp(-‖x-y‖²)
77
```
8+
Can also be called via `SEKernel`, `GaussianKernel` or `SEKernel`.
89
See also [`ExponentialKernel`](@ref) for a
910
related form of the kernel or [`GammaExponentialKernel`](@ref) for a generalization.
1011
"""
1112
struct SqExponentialKernel <: BaseKernel end
1213

1314
kappa::SqExponentialKernel, d²::Real) = exp(-d²)
1415
iskroncompatible(::SqExponentialKernel) = true
15-
1616
metric(::SqExponentialKernel) = SqEuclidean()
1717

1818
Base.show(io::IO,::SqExponentialKernel) = print(io,"Squared Exponential Kernel")
@@ -23,10 +23,11 @@ const GaussianKernel = SqExponentialKernel
2323
const SEKernel = SqExponentialKernel
2424

2525
"""
26-
`ExponentialKernel([ρ=1.0])`
27-
The exponential kernel is an isotropic Mercer kernel given by the formula:
26+
ExponentialKernel()
27+
28+
The exponential kernel is a Mercer kernel given by the formula:
2829
```
29-
κ(x,y) = exp(-ρ‖x-y‖)
30+
κ(x,y) = exp(-‖x-y‖)
3031
```
3132
"""
3233
struct ExponentialKernel <: BaseKernel end
@@ -35,21 +36,24 @@ kappa(κ::ExponentialKernel, d::Real) = exp(-d)
3536
iskroncompatible(::ExponentialKernel) = true
3637
metric(::ExponentialKernel) = Euclidean()
3738

38-
Base.show(io::IO,::ExponentialKernel) = print(io,"Exponential Kernel")
39+
Base.show(io::IO, ::ExponentialKernel) = print(io, "Exponential Kernel")
3940

4041
## Alias ##
4142
const LaplacianKernel = ExponentialKernel
4243

4344
"""
44-
`GammaExponentialKernel([ρ=1.0, [γ=2.0]])`
45+
GammaExponentialKernel(; γ = 2.0)
46+
4547
The γ-exponential kernel is an isotropic Mercer kernel given by the formula:
4648
```
47-
κ(x,y) = exp(-ρ^(2γ)‖x-y‖^(2γ))
49+
κ(x,y) = exp(-‖x-y‖^(2γ))
4850
```
51+
Where `γ > 0`, (the keyword `γ` can be replaced by `gamma`)
52+
For `γ = 1`, see `SqExponentialKernel` and `γ = 0.5`, see `ExponentialKernel`
4953
"""
5054
struct GammaExponentialKernel{Tγ<:Real} <: BaseKernel
5155
γ::Vector{Tγ}
52-
function GammaExponentialKernel(;gamma::T=2.0, γ::T=gamma) where {T<:Real}
56+
function GammaExponentialKernel(; gamma::T=2.0, γ::T=gamma) where {T<:Real}
5357
@check_args(GammaExponentialKernel, γ, γ >= zero(T), "γ > 0")
5458
return new{T}([γ])
5559
end
@@ -58,3 +62,5 @@ end
5862
kappa::GammaExponentialKernel, d²::Real) = exp(-^first.γ))
5963
iskroncompatible(::GammaExponentialKernel) = true
6064
metric(::GammaExponentialKernel) = SqEuclidean()
65+
66+
Base.show(io::IO, κ::GammaExponentialKernel) = print(io, "Gamma Exponential Kernel (γ = $(first.γ)))")

src/kernels/exponentiated.jl

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,15 @@
11
"""
2-
`ExponentiatedKernel([ρ=1])`
2+
ExponentiatedKernel()
3+
34
The exponentiated kernel is a Mercer kernel given by:
45
```
5-
κ(x,y) = exp(ρ²xᵀy)
6+
κ(x,y) = exp(xᵀy)
67
```
78
"""
89
struct ExponentiatedKernel <: BaseKernel end
910

1011
kappa::ExponentiatedKernel, xᵀy::Real) = exp(xᵀy)
11-
1212
metric(::ExponentiatedKernel) = DotProduct()
13-
1413
iskroncompatible(::ExponentiatedKernel) = true
14+
15+
Base.show(io::IO, ::ExponentiatedKernel) = print(io, "Exponentiated Kernel")

src/kernels/fbm.jl

Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
"""
2+
FBMKernel(; h::Real=0.5)
3+
4+
Fractional Brownian motion kernel with Hurst index `h` from (0,1) given by
5+
```
6+
κ(x,y) = ( |x|²ʰ + |y|²ʰ - |x-y|²ʰ ) / 2
7+
```
8+
9+
For `h=1/2`, this is the Wiener Kernel, for `h>1/2`, the increments are
10+
positively correlated and for `h<1/2` the increments are negatively correlated.
11+
"""
12+
struct FBMKernel{T<:Real} <: BaseKernel
13+
h::T
14+
function FBMKernel(; h::T=0.5) where {T<:Real}
15+
@assert h<=1.0 && h>=0.0 "FBMKernel: Given Hurst index h is invalid."
16+
return new{T}(h)
17+
end
18+
end
19+
20+
Base.show(io::IO, κ::FBMKernel) = print(io, "Fractional Brownian Motion Kernel (h = $(k.h))")
21+
22+
_fbm(modX, modY, modXY, h) = (modX^h + modY^h - modXY^h)/2
23+
24+
function kernelmatrix::FBMKernel, X::AbstractMatrix; obsdim::Int = defaultobs)
25+
@assert obsdim [1,2] "obsdim should be 1 or 2 (see docs of kernelmatrix))"
26+
modX = sum(abs2, X; dims = 3 - obsdim)
27+
modXX = pairwise(SqEuclidean(), X, dims = obsdim)
28+
return _fbm.(vec(modX), reshape(modX, 1, :), modXX, κ.h)
29+
end
30+
31+
function kernelmatrix!(K::AbstractMatrix, κ::FBMKernel, X::AbstractMatrix; obsdim::Int = defaultobs)
32+
@assert obsdim [1,2] "obsdim should be 1 or 2 (see docs of kernelmatrix))"
33+
modX = sum(abs2, X; dims = 3 - obsdim)
34+
modXX = pairwise(SqEuclidean(), X, dims = obsdim)
35+
K .= _fbm.(vec(modX), reshape(modX, 1, :), modXX, κ.h)
36+
return K
37+
end
38+
39+
function kernelmatrix(
40+
κ::FBMKernel,
41+
X::AbstractMatrix,
42+
Y::AbstractMatrix;
43+
obsdim::Int = defaultobs,
44+
)
45+
@assert obsdim [1,2] "obsdim should be 1 or 2 (see docs of kernelmatrix))"
46+
modX = sum(abs2, X, dims=3-obsdim)
47+
modY = sum(abs2, Y, dims=3-obsdim)
48+
modXY = pairwise(SqEuclidean(), X, Y,dims=obsdim)
49+
return _fbm.(vec(modX), reshape(modY, 1, :), modXY, κ.h)
50+
end
51+
52+
function kernelmatrix!(
53+
K::AbstractMatrix,
54+
κ::FBMKernel,
55+
X::AbstractMatrix,
56+
Y::AbstractMatrix;
57+
obsdim::Int = defaultobs,
58+
)
59+
@assert obsdim [1,2] "obsdim should be 1 or 2 (see docs of kernelmatrix))"
60+
modX = sum(abs2, X, dims=3-obsdim)
61+
modY = sum(abs2, Y, dims=3-obsdim)
62+
modXY = pairwise(SqEuclidean(), X, Y,dims=obsdim)
63+
K .= _fbm.(vec(modX), reshape(modY, 1, :), modXY, κ.h)
64+
return K
65+
end
66+
67+
## Apply kernel on two vectors ##
68+
function _kernel(
69+
κ::FBMKernel,
70+
x::AbstractVector,
71+
y::AbstractVector;
72+
obsdim::Int = defaultobs
73+
)
74+
@assert length(x) == length(y) "x and y don't have the same dimension!"
75+
return κ(x,y)
76+
end
77+
78+
#Syntactic Sugar
79+
function::FBMKernel)(x::AbstractVector{<:Real}, y::AbstractVector{<:Real})
80+
modX = sum(abs2, x)
81+
modY = sum(abs2, y)
82+
modXY = sqeuclidean(x, y)
83+
return (modX^κ.h + modY^κ.h - modXY^κ.h)/2
84+
end
85+
86+
::FBMKernel)(x::Real, y::Real) = (abs2(x)^κ.h + abs2(y)^κ.h - abs2(x-y)^κ.h)/2
87+
88+
function::FBMKernel)(X::AbstractMatrix{<:Real}, Y::AbstractMatrix{<:Real}; obsdim::Integer=defaultobs)
89+
return kernelmatrix(κ, X, Y, obsdim=obsdim)
90+
end
91+
92+
function::FBMKernel)(X::AbstractMatrix{<:Real}; obsdim::Integer=defaultobs)
93+
return kernelmatrix(κ, X, obsdim=obsdim)
94+
end

0 commit comments

Comments
 (0)