final.
r2530289
2024-12-10
install.packages(c("shiny", "shinydashboard", "shinythemes"))
## Installing packages into '/cloud/lib/x86_64-pc-linux-gnu-library/4.3'
## (as 'lib' is unspecified)
# Load required libraries
library(shiny)
library(shinydashboard)
##
## Attaching package: 'shinydashboard'
## The following object is masked from 'package:graphics':
##
## box
library(shinythemes)
# UI function
header <- dashboardHeader(title = "Statistical Models")
sidebar <- dashboardSidebar(
sidebarMenu(
menuItem("Introduction", tabName = "Home", icon = icon("home")),
menuItem("MLR", tabName = "mlr", icon = icon("chart-line")),
menuItem("GLM", tabName = "glm", icon = icon("chart-bar")),
menuItem("LMM", tabName = "lmm", icon = icon("project-diagram")),
menuItem("GLMM", tabName = "glmm", icon = icon("cogs")),
menuItem("Bayesian Regression", tabName = "bayesian", icon = icon("brain"))
)
)
body <- dashboardBody(
tabItems(
tabItem(
tabName = "Home",
h1("Statistical Models"),
h2("Submitted by: Renalyn M. Cablo"),
h2("Submitted to: Carlito Daarol"),
h3("Final Project On STT126 Statistical Computing II"),
h4("Date Publish: December 10, 2024")
),
# Page 1: MLR
1
tabItem(
tabName = "mlr",
fluidRow(
box(
title = "Multiple Linear Regression (MLR)",
width = 12,
# Introduction
h3("Multiple Linear Regression Model"),
p("Multiple Linear Regression (MLR) is a statistical technique used to model the relationship
# Mathematical Model
h4("Mathematical Model"),
withMathJax("$$y_i = \\beta_0 + \\beta_1 x_{i1} + \\beta_2 x_{i2} + \\dots + \\beta_p x_{ip} +
p("Where:"),
tags$ul(
tags$li(" \\( y_i \\): The dependent (response) variable for observation \\( i \\)."),
tags$li(" \\( \\beta_0 \\): The intercept term, representing the expected value of \\( y \\)
tags$li(" \\( \\beta_1, \\beta_2, \\dots, \\beta_p \\): The regression coefficients for the
tags$li(" \\( x_{ij} \\): The value of the \\( j \\)-th independent variable for observation
tags$li(" \\( \\epsilon_i \\): The error term for observation \\( i \\)."),
tags$li(" \\( n \\): The number of observations."),
tags$li(" \\( p \\): The number of predictors (independent variables).")
),
# Matrix Form Representation
h4("Matrix Form Representation"),
p("The MLR model can also be represented in matrix notation as:"),
withMathJax("$$\\mathbf{y} = \\mathbf{X} \\boldsymbol{\\beta} + \\boldsymbol{\\epsilon}$$"),
p("Where:"),
tags$ul(
tags$li(" \\( \\mathbf{y} \\) is the \\( n \\times 1 \\) vector of dependent variable values
withMathJax("$$
\\mathbf{y} =
\\begin{bmatrix}
y_1 \\\\ y_2 \\\\ \\vdots \\\\ y_n
\\end{bmatrix}
$$"),
tags$li(" \\( \\mathbf{X} \\) is the \\( n \\times (p+1) \\) design matrix of independent va
withMathJax("$$
\\mathbf{X} =
\\begin{bmatrix}
1 & x_{11} & x_{12} & \\dots & x_{1p} \\\\
1 & x_{21} & x_{22} & \\dots & x_{2p} \\\\
\\vdots & \\vdots & \\vdots & \\ddots & \\vdots \\\\
1 & x_{n1} & x_{n2} & \\dots & x_{np}
\\end{bmatrix}
$$"),
tags$li(" \\( \\boldsymbol{\\beta} \\) is the \\( (p+1) \\times 1 \\) vector of coefficients
withMathJax("$$
\\boldsymbol{\\beta} =
\\begin{bmatrix}
2
\\beta_0 \\\\ \\beta_1 \\\\ \\vdots \\\\ \\beta_p
\\end{bmatrix}
$$"),
tags$li(" \\( \\boldsymbol{\\epsilon} \\) is the \\( n \\times 1 \\) vector of error terms:"
withMathJax("$$
\\boldsymbol{\\epsilon} =
\\begin{bmatrix}
\\epsilon_1 \\\\ \\epsilon_2 \\\\ \\vdots \\\\ \\epsilon_n
\\end{bmatrix}
$$")
),
# Assumptions of the Model
h4("Assumptions of the Model"),
p("To ensure valid inferences, the MLR model relies on the following assumptions:"),
tags$ol(
tags$li("**Linearity**: The relationship between the independent variables and the dependent
tags$li("**Independence**: Observations are independent of each other."),
tags$li("**Homoscedasticity**: The variance of the error terms is constant across all levels
tags$li("**Normality**: The error terms are normally distributed."),
tags$li("**No Multicollinearity**: Independent variables are not highly correlated with each
),
# Estimation of Coefficients
h4("Estimation of Coefficients"),
p("The coefficients \\( \\boldsymbol{\\beta} \\) are typically estimated using the method of *
withMathJax("$$\\hat{\\boldsymbol{\\beta}} = (\\mathbf{X}ˆT \\mathbf{X})ˆ{-1} \\mathbf{X}ˆT \\
p("Multiple Linear Regression is a foundational tool in statistical modeling, providing insigh
)
)
)
,
# Page 2: GLM
tabItem(
tabName = "glm",
fluidRow(
box(
title = "General Linear Model (GLM)",
width = 12,
# Introduction
h3("General Linear Model"),
p("The General Linear Model (GLM) is a flexible statistical framework that extends linear regr
# Mathematical Model
h4("Mathematical Model"),
p("The GLM consists of three components:"),
tags$ol(
tags$li("**Random Component**: Specifies the probability distribution of the dependent varia
tags$li("**Systematic Component**: Describes the linear predictor as a linear combination of
tags$li("**Link Function**: Connects the linear predictor to the mean of the dependent varia
),
3
p("The general form of the GLM is given by:"),
withMathJax("$$g(\\mu_i) = \\eta_i = \\beta_0 + \\beta_1 x_{i1} + \\beta_2 x_{i2} + \\dots + \
p("Where:"),
tags$ul(
tags$li("\\( g(\\cdot) \\): The **link function** that relates the expected value \\( \\mu_i
tags$li("\\( \\eta_i \\): The **linear predictor**, a linear combination of independent vari
tags$li("\\( \\beta_0, \\beta_1, \\dots, \\beta_p \\): The regression coefficients to be est
tags$li("\\( x_{ij} \\): The \\( j \\)-th independent variable for observation \\( i \\)."),
tags$li("\\( n \\): The number of observations."),
tags$li("\\( p \\): The number of predictors (independent variables).")
),
# Exponential Family
h4("Exponential Family of Distributions"),
p("The random component specifies the distribution of \\( y_i \\), often from the exponential
withMathJax("$$f(y_i | \\theta_i, \\phi) = \\exp\\left( \\frac{y_i \\theta_i - b(\\theta_i)}{a
p("Where:"),
tags$ul(
tags$li("\\( \\theta_i \\): The canonical parameter."),
tags$li("\\( \\phi \\): The dispersion parameter."),
tags$li("\\( a(\\phi), b(\\theta_i), c(y_i, \\phi) \\): Functions defining the exponential f
),
# Common Link Functions
h4("Common GLM Link Functions and Applications"),
tags$table(
class = "table table-striped",
tags$thead(
tags$tr(
tags$th("Link Function"),
tags$th("Mathematical Form"),
tags$th("Application")
)
),
tags$tbody(
tags$tr(
tags$td("Identity Link"),
tags$td("$$g(\\mu) = \\mu$$"),
tags$td("Linear Regression")
),
tags$tr(
tags$td("Log Link"),
tags$td("$$g(\\mu) = \\log(\\mu)$$"),
tags$td("Poisson Regression (Count Data)")
),
tags$tr(
tags$td("Logit Link"),
tags$td("$$g(\\mu) = \\log\\left(\\frac{\\mu}{1-\\mu}\\right)$$"),
tags$td("Logistic Regression (Binary Outcomes)")
),
tags$tr(
4
tags$td("Probit Link"),
tags$td("$$g(\\mu) = \\Phiˆ{-1}(\\mu)$$"),
tags$td("Probit Regression (Binary Outcomes)")
)
)
),
# Assumptions
h4("Assumptions of the GLM"),
p("To ensure the validity of the model, the following assumptions are typically made:"),
tags$ol(
tags$li("The dependent variable \\( y \\) follows a distribution from the exponential family
tags$li("The relationship between the dependent variable's mean \\( \\mu \\) and the linear
tags$li("Observations are independent of each other.")
),
# Estimation of Parameters
h4("Estimation of Parameters"),
p("The regression coefficients \\( \\boldsymbol{\\beta} \\) are estimated using the method of
withMathJax("$$L(\\boldsymbol{\\beta}) = \\prod_{i=1}ˆn f(y_i | \\theta_i, \\phi)$$"),
p("The log-likelihood is maximized to obtain parameter estimates:"),
withMathJax("$$\\ell(\\boldsymbol{\\beta}) = \\sum_{i=1}ˆn \\log f(y_i | \\theta_i, \\phi)$$")
# Conclusion
h4("Conclusion"),
p("The General Linear Model provides a unified framework for modeling diverse data types. By s
)
)
)
,
# Page 3: LMM
tabItem(
tabName = "lmm",
fluidRow(
box(
title = "Linear Mixed Model (LMM)",
width = 12,
# Introduction
h3("Linear Mixed Model"),
p("A Linear Mixed Model (LMM) is an extension of the linear regression model that includes bot
# Mathematical Model
h4("Mathematical Model"),
p("The general form of the LMM is:"),
withMathJax("$$\\mathbf{y} = \\mathbf{X} \\boldsymbol{\\beta} + \\mathbf{Z} \\mathbf{u} + \\bo
p("Where:"),
tags$ul(
tags$li("\\( \\mathbf{y} \\): The \\( n \\times 1 \\) vector of observed responses."),
tags$li("\\( \\mathbf{X} \\): The \\( n \\times p \\) fixed-effects design matrix, relating
tags$li("\\( \\boldsymbol{\\beta} \\): The \\( p \\times 1 \\) vector of fixed-effects coeff
tags$li("\\( \\mathbf{Z} \\): The \\( n \\times q \\) random-effects design matrix, relating
5
tags$li("\\( \\mathbf{u} \\): The \\( q \\times 1 \\) vector of random effects, assumed to f
withMathJax("$$\\mathbf{u} \\sim \\mathcal{N}(\\mathbf{0}, \\mathbf{G})$$"),
tags$li("\\( \\boldsymbol{\\epsilon} \\): The \\( n \\times 1 \\) vector of residual errors,
withMathJax("$$\\boldsymbol{\\epsilon} \\sim \\mathcal{N}(\\mathbf{0}, \\sigmaˆ2 \\mathbf{I}
tags$li("\\( \\mathbf{G} \\): Covariance matrix of the random effects."),
tags$li("\\( \\sigmaˆ2 \\): The residual variance.")
),
p("The random effects introduce dependencies among observations, accommodating correlations wi
# Components
h4("Components of the Model"),
tags$ol(
tags$li("**Fixed Effects**: Population-level effects, common to all groups or clusters. For
tags$li("**Random Effects**: Group-specific effects that vary across clusters or levels of a
),
# Assumptions
h4("Model Assumptions"),
tags$ol(
tags$li("**Linearity**: The relationship between the fixed effects and the response variable
tags$li("**Normality of Random Effects**: The random effects \\( \\mathbf{u} \\) are normall
tags$li("**Homoscedasticity**: The residual variance \\( \\sigmaˆ2 \\) is constant across al
tags$li("**Independence**: The residual errors \\( \\boldsymbol{\\epsilon} \\) are independe
),
# Parameter Estimation
h4("Estimation of Parameters"),
p("The parameters of an LMM include \\( \\boldsymbol{\\beta} \\), the fixed effects, and the v
p("The log-likelihood function under REML is:"),
withMathJax("$$\\ell_{\\text{REML}} = -\\frac{1}{2} \\left[ \\log|\\mathbf{V}| + \\log|\\mathb
p("Where:"),
tags$ul(
tags$li("\\( \\mathbf{V} = \\mathbf{Z} \\mathbf{G} \\mathbf{Z}ˆT + \\sigmaˆ2 \\mathbf{I} \\)
),
# Applications
h4("Applications"),
p("LMMs are widely used in fields such as:"),
tags$ul(
tags$li("**Longitudinal Data Analysis**: Modeling repeated measurements on the same subjects
tags$li("**Hierarchical Data**: Analyzing data with nested structures, such as students with
tags$li("**Clustered Data**: Accounting for correlations within clusters, such as geographic
),
# Conclusion
h4("Conclusion"),
p("Linear Mixed Models are a powerful tool for analyzing data with complex dependency structur
)
)
)
,
6
# Page 4: GLMM
tabItem(
tabName = "glmm",
fluidRow(
box(
title = "General Linear Mixed Model (GLMM)",
width = 12,
# Introduction
h3("General Linear Mixed Model"),
p("The General Linear Mixed Model (GLMM) is an extension of the Linear Mixed Model (LMM) that
# Mathematical Model
h4("Mathematical Model"),
p("The GLMM combines fixed effects, random effects, and a link function to model the mean of t
withMathJax("$$g(\\mu_i) = \\mathbf{x}_iˆ\\top \\boldsymbol{\\beta} + \\mathbf{z}_iˆ\\top \\ma
p("Where:"),
tags$ul(
tags$li("\\( g(\\cdot) \\): The link function that relates the expected value \\( \\mu_i = \
tags$li("\\( \\mu_i \\): The mean of the dependent variable \\( y_i \\), where \\( y_i \\) f
tags$li("\\( \\mathbf{x}_i \\): The vector of fixed-effect covariates for observation \\( i
tags$li("\\( \\boldsymbol{\\beta} \\): The vector of fixed-effect coefficients."),
tags$li("\\( \\mathbf{z}_i \\): The vector of random-effect covariates for observation \\( i
tags$li("\\( \\mathbf{u} \\): The vector of random effects, assumed to follow:"),
withMathJax("$$\\mathbf{u} \\sim \\mathcal{N}(\\mathbf{0}, \\mathbf{G}),$$"),
tags$li("\\( \\mathbf{G} \\): The covariance matrix of the random effects.")
),
p("The response variable \\( y_i \\) is modeled using a probability distribution from the expo
withMathJax("$$f(y_i | \\theta_i, \\phi) = \\exp\\left( \\frac{y_i \\theta_i - b(\\theta_i)}{a
p("Where:"),
tags$ul(
tags$li("\\( \\theta_i \\): The canonical parameter related to \\( \\mu_i \\) through \\( \\
tags$li("\\( \\phi \\): The dispersion parameter."),
tags$li("\\( a(\\phi), b(\\theta_i), c(y_i, \\phi) \\): Functions defining the exponential f
),
# Components of the Model
h4("Components of the Model"),
tags$ol(
tags$li("**Fixed Effects**: Represent the population-level effects, described by \\( \\bolds
tags$li("**Random Effects**: Capture group-level variability or hierarchical structure, repr
tags$li("**Link Function**: Connects the mean response \\( \\mu_i \\) to the linear predicto
),
# Common Distributions
h4("Common Distributions and Link Functions"),
p("GLMMs are highly flexible due to the choice of distribution and link function. Some common
tags$ul(
tags$li("**Binomial Distribution** with a **logit link** for binary data:"),
withMathJax("$$g(\\mu) = \\log\\left(\\frac{\\mu}{1-\\mu}\\right)$$"),
tags$li("**Poisson Distribution** with a **log link** for count data:"),
withMathJax("$$g(\\mu) = \\log(\\mu)$$"),
7
tags$li("**Normal Distribution** with an **identity link** for continuous data:"),
withMathJax("$$g(\\mu) = \\mu$$")
),
# Estimation of Parameters
h4("Estimation of Parameters"),
p("Parameter estimation in GLMMs involves estimating both fixed effects \\( \\boldsymbol{\\bet
tags$ul(
tags$li("**Maximum Likelihood Estimation (MLE)**: Maximizing the marginal likelihood of the
tags$li("**Restricted Maximum Likelihood (REML)**: Used for variance component estimation.")
),
p("The marginal likelihood is given by:"),
withMathJax("$$L(\\boldsymbol{\\beta}, \\mathbf{G}) = \\int \\prod_{i=1}ˆn f(y_i | \\mu_i, \\p
p("Where \\( \\mathbf{G} \\) is the covariance matrix of the random effects."),
# Model Assumptions
h4("Model Assumptions"),
tags$ol(
tags$li("The response variable follows a distribution from the exponential family."),
tags$li("Random effects \\( \\mathbf{u} \\) follow a multivariate normal distribution."),
tags$li("Observations within a group are independent, given the random effects."),
tags$li("The link function is correctly specified.")
),
# Applications
h4("Applications"),
p("GLMMs are widely used in:"),
tags$ul(
tags$li("**Ecology**: Modeling species count data with spatial or temporal random effects.")
tags$li("**Medicine**: Analyzing patient outcomes with random effects for clinics or doctors
tags$li("**Education**: Modeling test scores with random effects for schools or classrooms."
),
# Conclusion
h4("Conclusion"),
p("The General Linear Mixed Model is a powerful and flexible framework for analyzing hierarchi
)
)
)
,
# Page 5: Bayesian Regression
tabItem(
tabName = "bayesian",
fluidRow(
box(
title = "Bayesian Regression Model",
width = 12,
# Introduction
h3("Bayesian Regression"),
p("Bayesian regression provides a probabilistic approach to linear regression by combining pri
8
# Mathematical Model
h4("Mathematical Model"),
p("The Bayesian regression model is built upon the following components:"),
# Likelihood Function
h5("Likelihood Function"),
p("The likelihood function represents the probability of the observed data given the model par
withMathJax("$$y_i = \\mathbf{x}_iˆ\\top \\boldsymbol{\\beta} + \\epsilon_i, \\quad i = 1, 2,
p("Where:"),
tags$ul(
tags$li("\\( y_i \\): Response variable for observation \\( i \\),"),
tags$li("\\( \\mathbf{x}_i \\): \\( p \\)-dimensional vector of predictors for observation \
tags$li("\\( \\boldsymbol{\\beta} \\): \\( p \\)-dimensional vector of regression coefficien
tags$li("\\( \\epsilon_i \\sim \\mathcal{N}(0, \\sigmaˆ2) \\): Normally distributed errors w
),
p("The likelihood is given by:"),
withMathJax("$$p(\\mathbf{y} | \\mathbf{X}, \\boldsymbol{\\beta}, \\sigmaˆ2) = \\prod_{i=1}ˆn
p("Where \\( \\mathbf{y} \\) is the vector of observed responses and \\( \\mathbf{X} \\) is th
# Prior Distribution
h5("Prior Distribution"),
p("In Bayesian regression, prior distributions are assigned to the parameters \\( \\boldsymbol
withMathJax("$$\\boldsymbol{\\beta} \\sim \\mathcal{N}(\\boldsymbol{\\mu}_0, \\mathbf{\\Sigma}
withMathJax("$$\\sigmaˆ2 \\sim \\text{Inverse-Gamma}(\\alpha_0, \\beta_0),$$"),
p("Where:"),
tags$ul(
tags$li("\\( \\boldsymbol{\\mu}_0 \\) and \\( \\mathbf{\\Sigma}_0 \\): Mean vector and covar
tags$li("\\( \\alpha_0 \\) and \\( \\beta_0 \\): Shape and scale parameters of the inverse-g
),
# Posterior Distribution
h5("Posterior Distribution"),
p("Using Bayes' theorem, the posterior distribution is proportional to the product of the like
withMathJax("$$p(\\boldsymbol{\\beta}, \\sigmaˆ2 | \\mathbf{y}, \\mathbf{X}) \\propto p(\\math
# Marginal and Predictive Distributions
h5("Marginal and Predictive Distributions"),
p("The marginal posterior of \\( \\boldsymbol{\\beta} \\) can be obtained by integrating out \
withMathJax("$$p(\\boldsymbol{\\beta} | \\mathbf{y}, \\mathbf{X}) = \\int p(\\boldsymbol{\\bet
p("The predictive distribution for a new observation \\( \\mathbf{x}_\\text{new} \\) is:"),
withMathJax("$$p(y_\\text{new} | \\mathbf{x}_\\text{new}, \\mathbf{y}, \\mathbf{X}) = \\int p(
# Model Components
h4("Model Components"),
tags$ol(
tags$li("**Likelihood**: Encodes the relationship between predictors and the response variab
tags$li("**Prior**: Introduces prior beliefs or information about the parameters."),
tags$li("**Posterior**: Combines the likelihood and the prior, representing updated beliefs
tags$li("**Predictive Distribution**: Provides probabilistic predictions for new data points
),
# Advantages
h4("Advantages of Bayesian Regression"),
tags$ul(
9
tags$li("Handles uncertainty by providing full posterior distributions."),
tags$li("Allows incorporation of prior knowledge or expert opinion."),
tags$li("Naturally extends to hierarchical and complex models."),
tags$li("Facilitates model comparison using metrics like Bayes factors.")
),
# Parameter Estimation
h4("Parameter Estimation"),
p("Bayesian regression often uses computational methods for posterior estimation, including:")
tags$ul(
tags$li("**Markov Chain Monte Carlo (MCMC)**: Sampling methods such as Gibbs sampling or Met
tags$li("**Variational Inference**: Approximates the posterior using optimization.")
),
# Applications
h4("Applications"),
p("Bayesian regression is widely used in:"),
tags$ul(
tags$li("**Economics**: Incorporating prior beliefs about economic relationships."),
tags$li("**Medicine**: Modeling patient outcomes with prior clinical knowledge."),
tags$li("**Engineering**: Reliability analysis with prior data from similar systems."),
tags$li("**Machine Learning**: As a foundation for Bayesian models in supervised learning.")
),
# Conclusion
h4("Conclusion"),
p("Bayesian regression offers a flexible and robust framework for modeling relationships betwe
)
)
)
)
)
ui <- dashboardPage(header, sidebar, body, skin = "blue")
# Server function
server <- function(input, output) {}
# Run the application
shinyApp(ui = ui, server = server)
10