This section compares the beta loss when directly applying the classica MLP to train $(λ, β)$
using MonotoneSplines
using Plots
using Random
__init_pytorch__() # initialize supports for PyTorch backend
PyObject <module 'boot' from '/home/runner/work/MonotoneSplines.jl/MonotoneSplines.jl/src/boot.py'>
Generate samples
seed = 1
n = 100
σ = 0.2
prop_nknots = 0.2
f = x -> x^3
x = rand(MersenneTwister(seed), n) * 2 .- 1
y = f.(x) + randn(MersenneTwister(seed+1), n) * σ
100-element Vector{Float64}:
0.0007819503546631634
-0.17782624307112058
-0.1742614506793042
-1.2979823247474416
-0.13513493385996378
-0.08183503123964558
0.5660362347385295
1.1030450840101351
0.12712269579175728
1.155935257401418
⋮
-0.06035131206452983
0.37117086314660136
-0.12565509945611203
-0.14361458475165484
-0.07610037738674293
-0.4362535693694123
0.08783960166858745
0.029232900958345298
0.02312389902322433
construct B spline basis matrix
B, L, J = MonotoneSplines.build_model(x, prop_nknots = prop_nknots)
([0.0 0.0 … 0.0 0.0; 0.0 0.0 … 0.0 0.0; … ; 0.0 0.0 … 0.0 0.0; 0.0 0.0 … 0.0 0.0], [101.9428539030352 0.0 … 0.0 0.0; -139.08167285801295 31.71316023009978 … 0.0 0.0; … ; 0.0 0.0 … 0.1589685279858889 0.0; 0.0 0.0 … -0.1511823989399056 0.004914483903376786], 14, -0.9841814332188785, 1.9839907510161057, [4, 33, 91, 38, 100, 12, 78, 11, 43, 77, 42, 8], [4, 57, 18, 35, 21, 27, 29, 97, 55, 33 … 42, 69, 7, 85, 54, 34, 10, 74, 80, 8], [1.0 0.0 … 0.0 0.0; 0.0 0.0 … 0.0 1.0], [-28.595566037824863 28.595566037824863 … 0.0 0.0; 0.0 0.0 … -28.144709657570832 28.144709657570832], [0.0, 0.10491136968688579, 0.21899472817772042, 0.27367650493628326, 0.3338442475676866, 0.4326619879646609, 0.5157437085725072, 0.5522624575275193, 0.6453099284911253, 0.7665855741024367, 0.8934080316869423, 1.0])
Range of the smoothness parameter λ
λs = 10 .^ (range(-6, 0, length = 10))
λmin = minimum(λs)
λmax = maximum(λs)
1.0
Set number of λ for training and validation
N = 100
ngrid = 100
arr_λs = rand(N) * (λmax - λmin) .+ λmin
βs = [mono_ss(x, y, λ, prop_nknots = prop_nknots).β for λ in arr_λs]
βs_mat = vcat(βs'...)'
λs_mat = vcat(MonotoneSplines.aug.(arr_λs)'...)'
8×100 adjoint(::Matrix{Float64}) with eltype Float64:
0.84318 0.431223 0.727837 … 0.678795 0.618857 0.593
0.944728 0.755499 0.899521 0.878846 0.852178 0.84014
2.32374 1.53914 2.0706 1.9715 1.8568 1.80941
0.918248 0.656676 0.853134 0.82389 0.786675 0.770065
-0.170575 -0.841129 -0.317678 -0.387437 -0.479881 -0.522561
8.4318 4.31223 7.27837 … 6.78795 6.18857 5.93
0.710952 0.185954 0.529747 0.460762 0.382984 0.351649
0.599461 0.0801876 0.385569 0.312763 0.237012 0.208528
for integrative loss
grid_λs = range(λmin, λmax, length=ngrid)
grid_βs = [mono_ss(x, y, λ, prop_nknots = prop_nknots).β for λ in grid_λs]
βs_grid_mat = vcat(grid_βs'...)'
λs_grid_mat = vcat(MonotoneSplines.aug.(grid_λs)'...)'
8×100 adjoint(::Matrix{Float64}) with eltype Float64:
1.0e-6 0.010102 0.020203 … 0.979798 0.989899 1.0
0.01 0.216174 0.272357 0.99322 0.996622 1.0
1.0 1.01015 1.02041 2.66392 2.69096 2.71828
0.001 0.100509 0.142137 0.989847 0.994937 1.0
-13.8155 -4.59502 -3.90192 -0.0204089 -0.0101524 0.0
1.0e-5 0.10102 0.20203 … 9.79798 9.89899 10.0
1.0e-12 0.00010205 0.000408161 0.960004 0.9799 1.0
1.0e-18 1.03091e-6 8.24608e-6 0.94061 0.970002 1.0
train GpBS
G, Loss, Loss1, bfun = MonotoneSplines.py_train_G_lambda(y, B, L, nepoch = 0, nepoch0 = 3, K = N,
λl = λmin, λu = λmax, gpu_id = -1,
nhidden = 1000,
λs_opt_train = λs, λs_opt_val = grid_λs,
βs_opt_train = βs_mat', βs_opt_val = βs_grid_mat',
disable_tqdm = true,
niter_per_epoch = 200)
(MonotoneSplines.var"#79#81"{Matrix{Float64}, PyCall.PyObject}([0.0 0.0 … 0.0 0.0; 0.0 0.0 … 0.0 0.0; … ; 0.0 0.0 … 0.0 0.0; 0.0 0.0 … 0.0 0.0], PyObject <function train_G_lambda.<locals>.<lambda> at 0x7fdf9febdb80>), [0.179214209318161, 1.3556277751922607, 0.5629124641418457, 0.24228602647781372, 0.17488691210746765, 0.38795894384384155, 0.8555715084075928, 0.4368095397949219, 0.33471888303756714, 0.5765613913536072 … 0.05172101408243179, 0.051116134971380234, 0.051794588565826416, 0.04988858103752136, 0.050724875181913376, 0.04921671748161316, 0.05162543058395386, 0.05003600940108299, 0.04908445477485657, 0.04969232901930809], Float32[0.039286934 0.06681631; 0.04572209 0.07563942; … ; 0.025462175 0.00057548465; 0.025501147 0.00056858355], MonotoneSplines.var"#80#82"{PyCall.PyObject}(PyObject <function train_G_lambda.<locals>.<lambda> at 0x7fdf9febdb80>))
Check training loss
plot(Loss, label = "training loss")
Check beta loss
plot(Loss1[:, 1], label = "training L2 beta Loss")
plot!(Loss1[:, 2], label = "expected L2 beta loss")