if (FALSE) {
#-------------------------------------------------------------------------------
# 1. LAYER EXAMPLE:
# Example R function to be used when the layer is encoding:
# Version for when the final input (a single value per PE) is directly sent to
# the layer (by set_input or some connection set).
# Outputs difference from current bias values, stores current input as new bias:
LAYERenc1 <- function(INPUT,BIAS,...)
{
i <- INPUT # get values directly injected as input to the PE.
o <- i-BIAS # subtract old bias from input.
# update layer's output and biases:
return(list(OUTPUT=o, BIAS=INPUT))
}
# Example R function to be used when the layer is recalling (mapping):
# Version for when the final input (a single value per PE) is directly sent to
# the layer (by set_input or some connection set).
# Outputs difference from current bias values:
LAYERrec1 <- function(INPUT,BIAS,...)
{
i <- INPUT # get values directly injected as input to the PE.
o <- i-BIAS # subtract old bias from input.
return(o) # return this as output.
}
# Example R function to be used when the layer is encoding (same as above):
# Version for cases where a connection set is designed to send multiple
# values (one for each incoming connection) to each PE in the layer so that
# the PE can process them as needed. - typically via its 'input_function'.
# (also works when set_input is used)
# INPUT_Q is a matrix where each column contains the values that have been sent
# to the corresponding node (PE).
# Outputs difference from current bias values, stores current input as new bias:
LAYERenc2 <- function(INPUT_Q,BIAS,...)
{
i <- colSums(INPUT_Q) # summate incoming values to produce final input.
o <- i-BIAS # subtract old bias from that input.
# update layer's output and biases:
return(list(OUTPUT=o, BIAS=i))
}
# Example R function to be used when the layer is recalling/mapping (same as above):
# version for cases where a connection set is designed to send multiple
# values (one for each incoming connection) to each PE in the layer so that
# the PE can process them as needed - typically via its 'input_function'.
# (also works when set_input is used)
# INPUT_Q is a matrix where each column contains the values that have been sent
# to the corresponding node (PE).
# Outputs difference from current bias values:
LAYERrec2 <- function(INPUT_Q,BIAS,...)
{
i <- colSums(INPUT_Q) # summate incoming values to produce final input.
o <- i-BIAS # subtract old bias from that input.
return(o) # return this as output.
}
# create and setup a "NN".
n<-new("NN")
n$add_layer(list(name="R-layer", size=4,
encode_FUN="LAYERenc1", recall_FUN="LAYERrec1"))
# test the layer:
n$set_input_at(1,c(1,0,5,5))
n$encode_at(1)
print(n$get_biases_at(1))
n$set_input_at(1,c(20,20,20,20))
n$recall_at(1)
print(n$get_output_at(1))
n$set_input_at(1,c(10,0,10,0))
n$recall_at(1)
print(n$get_output_at(1))
#-------------------------------------------------------------------------------
# 2. CONNECTION SET EXAMPLE:
# This simple connection set will encode data by adding to each connection
# weight the output of the source node.
CSenc <- function(WEIGHTS, SOURCE_OUTPUT,...)
{
x <- WEIGHTS + SOURCE_OUTPUT
return(list(WEIGHTS=x))
}
# When recalling, this simple connection set multiplies source data by weights.
# this version sends multiple values (the products) to each destination node.
# Typical (s.a. generic) nodes add these values to process them.
CSrec1 <- function(WEIGHTS, SOURCE_OUTPUT,...)
{
x <- WEIGHTS * SOURCE_OUTPUT
return(x)
}
# When recalling, this simple connection set multiplies source data by weights.
# this version sends a single value (the sum of the products) to each
# destination node.
CSrec2 <- function(WEIGHTS, SOURCE_OUTPUT,...)
{
x <- SOURCE_OUTPUT %*% WEIGHTS
return(x)
}
# create and setup a "NN".
n<-new("NN")
n$add_layer("generic",4)
n$add_connection_set(list(name="R-connections",encode_FUN="CSenc",recall_FUN="CSrec2"))
n$add_layer("generic",2)
n$create_connections_in_sets(0,0)
# test the NN:
n$set_input_at(1,c(0,1,5,10))
n$encode_all_fwd()
n$set_input_at(1,c(1,1,1,1))
n$encode_all_fwd()
# see if weights were modified:
print(n$get_weights_at(2))
n$set_input_at(1,c(20,20,20,20))
n$recall_all_fwd()
print(n$get_output_at(3))
#-------------------------------------------------------------------------------
# 3. A COMPLETE EXAMPLE (simple single layer perceptron-like NN):
# Function for connections, when recalling/mapping:
# Use any one of the two functions below.
# Each column of the returned matrix contains the data that will be sent to the
# corresponding destination node.
# version 1: sends multiple values (product) for destination nodes to summate.
CSmap1 <- function(WEIGHTS, SOURCE_OUTPUT,...) WEIGHTS * SOURCE_OUTPUT
# version 2: sends corresponding value (dot product) to destination node.
CSmap2 <- function(WEIGHTS, SOURCE_OUTPUT,...) SOURCE_OUTPUT %*% WEIGHTS
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Function for connections, when encoding data:
learning_rate <- 0.3
CSenc <- function(WEIGHTS, SOURCE_OUTPUT, DESTINATION_MISC, DESTINATION_OUTPUT, ...)
{
a <- learning_rate *
(DESTINATION_MISC - DESTINATION_OUTPUT) # desired output is in misc registers.
a <- outer( SOURCE_OUTPUT, a , "*" ) # compute weight adjustments.
w <- WEIGHTS + a # compute adjusted weights.
return(list(WEIGHTS=w)) # return new (adjusted) weights.
}
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Function for layer, when recalling/mapping:
# (note: no encode function is used for the layer in this example)
LAmap <- function(INPUT_Q,...)
{
x <- colSums(INPUT_Q) # input function is summation.
x <- ifelse(x>0,1,0) # threshold function is step.
return(x)
}
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# prepare some data based on iris data set:
data_in <- as.matrix(iris[1:4])
iris_cases <- nrow((data_in))
# make a "one-hot" encoding matrix for iris species
desired_data_out <- matrix(data=0, nrow=iris_cases, ncol=3)
desired_data_out[cbind(1:iris_cases,unclass(iris[,5]))]=1
# create the NN and define its components:
# (first generic layer simply accepts input and transfers it to the connections)
p <- new("NN")
p$add_layer("generic",4)
p$add_connection_set(list(name="R-connections",
encode_FUN="CSenc",
recall_FUN="CSmap2"))
p$add_layer(list(name="R-layer",
size=3,
encode_FUN="",
recall_FUN="LAmap"))
p$create_connections_in_sets(0,0)
# encode data and desired output (for 50 training epochs):
for(i in 1:50)
for(c in 1:iris_cases)
{
p$input_at(1,data_in[c,])
p$set_misc_values_at(3,desired_data_out[c,]) # put desired output in misc registers
p$recall_all_fwd();
p$encode_at(2)
}
# Recall the data and show NN's output:
for(c in 1:iris_cases)
{
p$input_at(1,data_in[c,])
p$recall_all_fwd()
cat("iris case ",c,", desired = ", desired_data_out[c,],
" returned = ", p$get_output_from(3),"\n")
}
}
Run the code above in your browser using DataLab