# TRANSFORMATION

## Sums and Mixtures

1. X ~ N(1, 5)
2. Y ~ N(2, 3)
3. Z = X + Y ~ N(3, 8)\$
1. v ~𝛘²₂
2. u ~𝛘²₆
3. z = v + u ~𝛘²₈
1. w ~ Gamma(2, 4)
2. z ~ Gamma(7, 4)
3. s = w + z ~ Gamma(9, 4)
• If the random variable X has a distribution as a weighted sum 𝐹(𝑥)=Σ𝜃ᵢ𝐹(𝑋ᵢ) for some sequence of Xᵢ’s and 𝜃ᵢ>0 such that Σ𝜃ᵢ=1, then X is a discrete mixture. The constant 𝜃ᵢ’s are called as mixing weights.
• If the distribution of X is 𝐹(𝑥)=∫𝐹{𝑋|𝑌=𝑦}(𝑥)𝑓(𝑦)𝑑𝑦
1. X ~ N(1, 5)
2. Y ~ N(2, 3)
3. 3. Z = 0.5X + 0.5Y
1. w ~ Gamma(2, 4)
2. z ~Gamma(7, 4)
3. s = 0.7w + 0.3z

# Transformation Methods

## Generating Random Variables from Gamma Distribution.

set.seed(361)#parametersn <- 10^4t <- 4lambda <- 9U <- matrix(runif(n*t),nrow = t, ncol = n) #since we will take sum, we define a matrix
logU <- -log(U) / lambda # inverse transform methodgamma_y <- apply(logU,2,sum) # col sums of matrix logUhist(gamma_y, prob=TRUE, main ="Gamma Distribution (4,9)")y = seq(0,2.5,0.01)lines(y,dgamma(y,t,lambda),col="red",lwd = 2.5)
#theoratical parameterstheoratical_mean <- t/lambdatheoratical_var <- t/lambda^2#sample statisticsest_mean <- mean(gamma_y)est_var <- var(gamma_y)control <- matrix(round(c(theoratical_mean,theoratical_var, est_mean,est_var),5), nrow = 2, byrow = T)colnames(control) <- c("Mean","Variance")rownames(control) <- c("Theoritical", "Estimated")control

## Example 2)

library(sads)set.seed(361)#parametersn <- 10^5t <- 18lambda <- 8U <- matrix(runif(n*t),nrow = t, ncol = n)exponential <- -log(U) / lambda # inverse transform methoderlang <- apply(exponential,2,sum) # col sums of matrix logUpareto = (exponential/erlang) + 1 hist(pareto, prob=TRUE, xlim = c(0, 10),     breaks = 100,     main = "Pareto(1, 18)")y = seq(0,10,0.01)lines(y, dpareto(y,  scale = 1, shape=t, log = FALSE),col="red",lwd = 1.5)

## We can use the fact that the chi-square distribution with V degrees of freedom is the sum of V squared independent standard normal;

set.seed(361)n <- 10^5v <- 10Z <- matrix(rnorm(v*n,0,1),nrow = v, ncol = n)SquaredZ <- Z^2X <- colSums(SquaredZ)hist(X, prob=TRUE, main ="Chi-Square Distribution")y = seq(0,35,0.01)lines(y,dchisq(y,v),col="red",lwd = 2.5)
#theoratical parameterstheoratical_mean <- vtheoratical_var <- 2*v#sample statisticsest_mean <- mean(X)est_var <- var(X)control <- matrix(round(c(theoratical_mean,theoratical_var, est_mean,est_var),5), nrow = 2, byrow = T)colnames(control) <- c("Mean","Variance")rownames(control) <- c("Theoritical", "Estimated")control

## Example 4)

set.seed(361)n <- 10^5v <- 40Z1 <- rnorm(n) #standard normal distZ2 <- matrix(rnorm(n*v),v,n)Z2_sqr <- Z2^2V <- colSums(Z2_sqr) #chi-square dist with 40 doft <- Z1 / (sqrt(V / v))hist(t, prob = TRUE, main = "Student-T Distribution")y <- seq(-4,4,0.01)lines(y, dt(y,v),col = "Red", lwd = 2.5)

## Inverse transform of Exponential distribution is

set.seed(361)n<- 10^5lambda <- 1mu = 5s = 7u1 <- runif(n)x <- -log(u1) / lambda # inverse transform method for first exponnetial variableu2 <- runif(n)y <- -log(u2) / lambda # inverse transform method for second exponnetial variablelogistic <- mu-(s*log(x/y))hist(logistic, prob = TRUE, main = "Logistic(5, 7)")a <- seq(-100,100,0.01)lines(a, (exp(-(a-mu)/s) / (s*(1 + exp(-(a-mu)/s))^2)),col = "Red", lwd = 2.5)

## Remember that

N <- 10^5m <- 2n <- 8z1 <- matrix(rnorm(m*N),m,N) #standard normal matrixz2 <- matrix(rnorm(n*N),n,N) #standard normal matrixsquared_z1 <- z1^2 #chi-square 1 dof matrixsquared_z2 <- z2^2 #chi-square 1 dof matrixU <- colSums(squared_z1) #chi-square 2 dofV <- colSums(squared_z2) #chi-square 8 dofRv.F <- (U/m)/(V/n) # F(2,8)hist(Rv.F, prob = T, main = "F-distribution(2,8)")a <- seq(0,50,0.01)lines(a,df(a,m,n), col = "red", lwd = 2.5)

--

--