2
2
#
3
3
# In this notebook we show how you can use KernelFunctions.jl to generate
4
4
# kernel matrices for classification with a support vector machine, as
5
- # implemented by LIBSVM.
5
+ # implemented by [ LIBSVM](https://github.com/JuliaML/LIBSVM.jl) .
6
6
7
7
using Distributions
8
8
using KernelFunctions
@@ -27,28 +27,28 @@ X1 = [cos.(angle1) sin.(angle1)] .+ 0.1randn(n1, 2)
27
27
X2 = [1 .- cos .(angle2) 1 .- sin .(angle2) .- 0.5 ] .+ 0.1 randn (n2, 2 )
28
28
X = [X1; X2]
29
29
x_train = RowVecs (X)
30
- y_train = vcat (fill (- 1 , nout ), fill (1 , nin ));
30
+ y_train = vcat (fill (- 1 , n1 ), fill (1 , n2 ));
31
31
32
- # Create a 100×100 2D grid for evaluation:
33
- test_range = range (floor (Int, minimum (X)), ceil (Int, maximum (X)); length= 100 )
34
- x_test = ColVecs (mapreduce (collect, hcat, Iterators. product (test_range, test_range)));
35
-
36
- # ## SVM model
32
+ # ## Training
37
33
#
38
- # Create kernel function:
34
+ # We create a kernel function:
39
35
k = SqExponentialKernel () ∘ ScaleTransform (1.5 )
40
36
41
- # [LIBSVM](https://github.com/JuliaML/LIBSVM.jl) can make use of a pre-computed kernel matrix.
42
- # KernelFunctions.jl can be used to produce that.
43
- #
44
- # Precomputed matrix for training
37
+ # LIBSVM can make use of a pre-computed kernel matrix.
38
+ # KernelFunctions.jl can be used to produce that using `kernelmatrix`:
45
39
model = svmtrain (kernelmatrix (k, x_train), y_train; kernel= LIBSVM. Kernel. Precomputed)
46
40
47
- # Precomputed matrix for prediction
41
+ # ## Prediction
42
+ #
43
+ # For evaluation, we create a 100×100 2D grid based on the extent of the training data:
44
+ test_range = range (floor (Int, minimum (X)), ceil (Int, maximum (X)); length= 100 )
45
+ x_test = ColVecs (mapreduce (collect, hcat, Iterators. product (test_range, test_range)));
46
+
47
+ # Again, we pass the result of KernelFunctions.jl's `kernelmatrix` to LIBSVM:
48
48
y_pred, _ = svmpredict (model, kernelmatrix (k, x_train, x_test));
49
49
50
- # Visualize prediction on a grid :
50
+ # We can see that the kernelized, non-linear classification successfully separates the two classes in the training data :
51
51
plot (; xlim= extrema (test_range), ylim= extrema (test_range), aspect_ratio= 1 )
52
- contourf! (test_range, test_range, y_pred; levels= 1 , color= cgrad (:redsblues ), alpha= 0.7 )
53
- scatter! (X1[:, 1 ], X1[:, 2 ]; color= :red , label= " class 1" )
54
- scatter! (X2[:, 1 ], X2[:, 2 ]; color= :blue , label= " class 2 " )
52
+ contourf! (test_range, test_range, y_pred; levels= 1 , color= cgrad (:redsblues ), alpha= 0.7 , colorbar_title = " prediction " )
53
+ scatter! (X1[:, 1 ], X1[:, 2 ]; color= :red , label= " training data: class – 1" )
54
+ scatter! (X2[:, 1 ], X2[:, 2 ]; color= :blue , label= " training data: class 1 " )
0 commit comments