Libraries

  library(cgraph)

Fit function

lr <- function(x, y, eta = 0.05, n_epochs = 1, sigma = 0.1)
{
  x <- as.matrix(x)
  
  y <- as.numeric(y)

  graph <- cg_graph()
  
  input <- cg_input("input")
  
  target <- cg_input("target")
  
  parms <- list(
    w = cg_parameter(matrix(rnorm(ncol(x), sd = sigma), 1, ncol(x)), "w"),
    b = cg_parameter(0, "b")
  )
  
  output <- cg_linear(parms$w, input, parms$b, "output")
  
  loss <- cg_mean(cg_colsums((output - target)^2), "loss")
  
  error <- rep(0, n_epochs)
  
  for(i in 1:n_epochs)
  {
    values <- cg_graph_run(graph, loss, list(
      input = t(x), target = y
    ))
    
    grads <- cg_graph_gradients(graph, loss, values)
    
    for(parm in parms)
    {
      parm$value <- parm$value - eta * grads[[parm$name]]
    }
    
    error[i] <- values$loss
  }
  
  structure(list(graph = graph, loss = loss, error = error), class = "lr")
}

Predict function

predict.lr <- function(model, x)
{
  x <- as.matrix(x)

  values <- cg_graph_run(model$graph, model$loss, list(
    input = t(x), target = 0
  ))

  t(values$output)
}

Example

Let us test the linear regression model on the cars dataset (available in R).

  head(cars)
##   speed dist
## 1     4    2
## 2     4   10
## 3     7    4
## 4     7   22
## 5     8   16
## 6     9   10

The data is rescaled by performing z-normalization:

  train <- scale(cars)

We fit a linear regression model that predicts the distance driven by a car based on its speed.

  x <- lr(x = train[, "dist"], y = train[, "speed"], n_epochs = 50)

We can visualize the Mean Squared Error (MSE) at each epoch during the model fitting.

  plot(x$error, type = "l", xlab = "Epoch", ylab = "MSE")

If we plot the linear function (red line) estimated by the regression model on the data (black circles), we see that the model fits reasonablly well.

plot(train)

lines(train[,"speed"], predict(x, train[,"speed"]), col = 2)