Learn R Programming

KGode (version 1.0.4)

rkhs: The 'rkhs' class object

Description

This class provide the interpolation methods using reproducing kernel Hilbert space.

Arguments

Value

an R6Class object which can be used for doing interpolation using reproducing kernel Hilbert space.

Format

R6Class object.

Methods

predict()

This method is used to make prediction on given time points

skcross()

This method is used to do cross-validation to estimate the weighting parameter lambda of L^2 norm.

Author

Mu Niu, mu.niu@glasgow.ac.uk

Public fields

y

matrix(of size n_s*n_o) containing observation.

t

vector(of length n_o) containing time points for observation.

b

vector(of length n_o) containing coefficients of kernel or basis functions.

lambda

scalar containing the weighting parameter for L2 norm of the reproducing kernel Hilbert space.

ker

kernel class object containing kernel.

Methods


Method new()

Usage

rkhs$new(y = NULL, t = NULL, b = NULL, lambda = NULL, ker = NULL)


Method greet()

Usage

rkhs$greet()


Method showker()

Usage

rkhs$showker()


Method predict()

Usage

rkhs$predict()


Method predictT()

Usage

rkhs$predictT(testT)


Method lossRK()

Usage

rkhs$lossRK(par, tl1, y_d, jitter)


Method grlossRK()

Usage

rkhs$grlossRK(par, tl1, y_d, jitter)


Method numgrad()

Usage

rkhs$numgrad(par, tl1, y_d, jitter)


Method skcross()

Usage

rkhs$skcross(init, bounded)


Method mkcross()

Usage

rkhs$mkcross(init)


Method loss11()

Usage

rkhs$loss11(par, tl1, y_d, jitter)


Method grloss11()

Usage

rkhs$grloss11(par, tl1, y_d, jitter)


Method clone()

The objects of this class are cloneable with this method.

Usage

rkhs$clone(deep = FALSE)

Arguments

deep

Whether to make a deep clone.

Examples

Run this code
# \dontshow{
  ##examples for checks: executable in < 5 sec together with the examples above not shown to users
  ### define ode 
  toy_fun = function(t,x,par_ode){
       alpha=par_ode[1]
      as.matrix( c( -alpha*x[1]) )
   }

   toy_grlNODE= function(par,grad_ode,y_p,z_p) { 
       alpha = par[1]
       dres= c(0)
       dres[1] = sum( 2*( z_p-grad_ode)*y_p*alpha ) #sum( -2*( z_p[1,2:lm]-dz1)*z1*alpha ) 
       dres
   }

  t_no = c(0.1,1,2,3,4,8)
  n_o = length(t_no)   
  y_no =  matrix( c(exp(-t_no)),ncol=1  )
  ######################## create and initialise ode object #########################################
 init_par = rep(c(0.1))
 init_yode = t(y_no)
 init_t = t_no

 kkk = ode$new(1,fun=toy_fun,grfun=toy_grlNODE,t=init_t,ode_par= init_par, y_ode=init_yode )

 ##### using kernel ridge regression to learn the interpolation of the noisy observation

 initlen = 1
 aker = RBF$new(initlen)
 bbb = rkhs$new(t(y_no)[1,],t_no,rep(1,n_o),1,aker)
## optimise lambda by cross-validation
## initial value of lambda
 initlam = 2
 bbb$skcross( initlam ) 

# }
if (FALSE) {
require(mvtnorm)
noise = 0.1  ## set the variance of noise
SEED = 19537
set.seed(SEED)
## Define ode function, we use lotka-volterra model in this example. 
## we have two ode states x[1], x[2] and four ode parameters alpha, beta, gamma and delta.
LV_fun = function(t,x,par_ode){
  alpha=par_ode[1]
  beta=par_ode[2]
  gamma=par_ode[3]
  delta=par_ode[4]
  as.matrix( c( alpha*x[1]-beta*x[2]*x[1] , -gamma*x[2]+delta*x[1]*x[2] ) )
}
## Define the gradient of ode function against ode parameters 
## df/dalpha,  df/dbeta, df/dgamma, df/ddelta where f is the differential equation.
LV_grlNODE= function(par,grad_ode,y_p,z_p) { 
alpha = par[1]; beta= par[2]; gamma = par[3]; delta = par[4]
dres= c(0)
dres[1] = sum( -2*( z_p[1,]-grad_ode[1,])*y_p[1,]*alpha ) 
dres[2] = sum( 2*( z_p[1,]-grad_ode[1,])*y_p[2,]*y_p[1,]*beta)
dres[3] = sum( 2*( z_p[2,]-grad_ode[2,])*gamma*y_p[2,] )
dres[4] = sum( -2*( z_p[2,]-grad_ode[2,])*y_p[2,]*y_p[1,]*delta)
dres
}

## create a ode class object
kkk0 = ode$new(2,fun=LV_fun,grfun=LV_grlNODE)
## set the initial values for each state at time zero.
xinit = as.matrix(c(0.5,1))
## set the time interval for the ode numerical solver.
tinterv = c(0,6)
## solve the ode numerically using predefined ode parameters. alpha=1, beta=1, gamma=4, delta=1.
kkk0$solve_ode(c(1,1,4,1),xinit,tinterv) 

## Add noise to the numerical solution of the ode model and use it as the noisy observation.
n_o = max( dim( kkk0$y_ode) )
t_no = kkk0$t
y_no =  t(kkk0$y_ode) + rmvnorm(n_o,c(0,0),noise*diag(2))

## Create a ode class object by using the simulation data we created from the ode numerical solver.
## If users have experiment data, they can replace the simulation data with the experiment data.
## Set initial value of ode parameters.
init_par = rep(c(0.1),4)
init_yode = t(y_no)
init_t = t_no
kkk = ode$new(1,fun=LV_fun,grfun=LV_grlNODE,t=init_t,ode_par= init_par, y_ode=init_yode )

## The following examples with CPU or elapsed time > 5s
####### rkhs interpolation for the 1st state of ode using 'rbf' kernel
### set initial value of length scale of rbf kernel
initlen = 1
aker = RBF$new(initlen)
bbb = rkhs$new(t(y_no)[1,],t_no,rep(1,n_o),1,aker)
## optimise lambda by cross-validation
## initial value of lambda
initlam = 2
bbb$skcross( initlam ) 

## make prediction using the 'predict()' method of 'rkhs' class and plot against the time.
plot(t_no,bbb$predict()$pred)
}

Run the code above in your browser using DataLab