R code for the example in the video:
Neural networks with continuous output | ANN vs Regression
rm(list=ls()) normalize <- function(x) { return ((x - min(x)) / (max(x) - min(x))) } Age=c(1,2,2,3,4,5,6,7,8,9,10,10,11) Price=c(29,25,21,18,15,15,12,10,7,5,6,4,4) df=data.frame(Age,Price,Price_n=normalize(Price),Age_n=normalize(Age)) plot(Age, Price, ylab="Price",xlab="Age",col="blue",cex=1.2,ylim=c(0,35),xlim=c(0,12)) set.seed(378) # To get the same numbers as in my example library(neuralnet) nn <- neuralnet(Price_n ~ Age_n, data=df, hidden=c(2), linear.output=TRUE, threshold=1e-4,rep=20,act.fct = "logistic") i=which.min(nn$result.matrix[1,]) # Select network with the lowest error x=seq(0,1,0.01) df2=data.frame(Age=x) yy=predict(nn,df2, rep = i)* abs(diff(range(Price))) + min(Price)# Change back to original scale tt=x*abs(diff(range(Age))) + min(Age)# Change back to original scale lines(tt,yy) yhat=predict(nn,data.frame(Age=normalize(Age)), rep = i)* abs(diff(range(Price))) + min(Price) (SSE=sum((Price-yhat)^2)) plot(nn,rep=i)