Random variable \(X\)
\(X \sim U[0, 1]\)
Error or noise \(\epsilon\)
\(\epsilon \sim N(0, 1/3)\)
Random variable \(Y\)
\(Y = \sin(4X) + \epsilon\)
# X ~ U[0, 1]
X <- runif(100, min = 0, max = 1)
# eps ~ N(0, 1/3)
eps <- rnorm(100, mean = 0, sd = sqrt(1/3))
# Y random variable
f <- function(X, eps) {
sin(4*X) + eps
}
Y <- f(X, eps)
Quadratic Smoother
\(D(t) = \begin{cases} 3/4(1- t^{2}), |t| \leq 1 \\ 0, \text{otherwise} \end{cases}\)
quadratic_smoother <- function(t) {
if(abs(t) <= 1) {
return(3/4 * (1 - t^{2}))
} else {
return(0)
}
}
Kernel Smoother
\(K_{\lambda}(x_{0}, x) = D\left(\dfrac{|x - x_{0}|}{\lambda}\right)\)
kernel_smoother <- function(x_0, x, lambda) {
return(quadratic_smoother(abs(x - x_0)/ lambda))
}
\(\hat{f}(x_{0}) = \dfrac{\sum_{i= 1}^{N}K_{\lambda}(x_{0}, x_{i})y_{i}}{\sum_{i}^{N}K_{\lambda}(x_{0},x_{i})}\)
\(\hat{f}\) is the function approximating \(f\) using the 1-demensional Kernel smoother.
f_kern_avg <- function(x_0, X, Y, lambda){
Y_kern_avg <- numeric(length(x_0))
for(j in 1:length(x_0)) {
sum_weighted_Yi <- 0
sum_Yi_weights <- 0
for(i in 1:length(X)) {
weight_Yi <- kernel_smoother(x_0[j], X[i], lambda)
sum_weighted_Yi <- sum_weighted_Yi + weight_Yi * Y[i]
sum_Yi_weights <- sum_Yi_weights + weight_Yi
}
Y_kern_avg[j] <- sum_weighted_Yi/sum_Yi_weights
}
return(Y_kern_avg)
}
Metric window size \(\lambda = 0.2\)
x <- seq(0, 1, by = 0.01)
lambda <- 0.2
plot(X, Y, col="blue", pch=19)
lines(x, f_kern_avg(x, X, Y, lambda), col="magenta", lwd=2)