This example demonstrates how to perform simple linear regression on the cars dataset (available in R). Our goal is to fit a regression model that can predict the distance of a car based on its speed. The quality of the prediction is esimated by the mean squared error between the predicted distance and the actual distance. The regression model is optimized by gradient descent with a learning rate of 0.05 for 40 epochs.

  # Load cgraph package
  library(cgraph)

  # Set seed for reproducible results
  set.seed(6)
  
  # Basic Gaussian initialization
  gaussian_init <- function(n_rows, n_cols, sd = 0.01)
  {
    matrix(rnorm(n_rows * n_cols, mean = 0, sd = sd), n_rows, n_cols)
  }
  
  # Simple linear regression fitted by gradient descent
  lr <- function(x, y, eta = 0.05, n_epochs = 1)
  {
    # Transform to a matrix
    x <- as.matrix(x)
    
    # Transform to a vector
    y <- as.numeric(y)
    
    # Initialize graph
    graph <- cg_graph()
    
    # Create inputs
    input <- cg_constant(t(x), "input")
    target <- cg_constant(y, "target")
    
    # Create parameters
    w <- cg_parameter(gaussian_init(ncol(x), 1), "w")
    b <- cg_parameter(0, "b")
    
    # Calculate output
    output <- cg_linear(w, input, b, "output")
  
    # Calculate mean squared loss
    loss <- cg_mean(cg_colsums((output - target)^2), "loss")
  
    # Keep track of loss
    error <- rep(0, n_epochs)

    # Optimize by gradient descent
    for(i in 1:n_epochs)
    {
      # Perform forward pass
      cg_graph_forward(graph, loss)
      
      # Perform backward pass
      cg_graph_backward(graph, loss)
      
      # Update parameters
      w$value <- w$value - eta * w$grad
      b$value <- b$value - eta * b$grad
      
      # Store current loss
      error[i] <- loss$value
    }
    
    # Create list object and set class attribute
    structure(list(graph = graph, input = input, output = output, error = error), class = "lr")
  }
  
  # Apply a fitted regression model on new data
  predict.lr <- function(model, x)
  {
    # Transform to a matrix
    x <- as.matrix(x)
    
    # Set the input value
    model$input$value <- t(x)
    
    # Perform forward pass
    cg_graph_forward(model$graph, model$output)
    
    # Return the output
    t(model$output$value)
  }
  
  # Scale data
  cars <- scale(cars)
  
  # Fit a linear regression model on the cars dataset
  model <- lr(x = cars[, 1], y = cars[, 2], n_epochs = 40)
  
  # Plot the mean squared error during model fitting
  plot(model$error, type = "l", xlab = "Epoch", ylab = "MSE")

  # Plot the cars dataset
  plot(cars, xlab = "Speed", ylab = "Distance")
  
  # Add the fitted regression line (red)
  lines(cars[, 1], predict(model, cars[, 1]), col = 2)