Compiled date: 2021-05-19
Last edited: 2021-04-15
License: GPL-3
Run the following code to install the Bioconductor version of package.
# install.packages("BiocManager")
BiocManager::install("POMA")
library(POMA)
library(MSnbase)
library(ggplot2)
library(patchwork)
Let’s create a cleaned MSnSet
object from example st000336
data to explore the normalization effects.
# load example data
data("st000336")
# imputation using the default method KNN
example_data <- st000336 %>% PomaImpute()
> method argument is empty! KNN will be used
example_data
> MSnSet (storageMode: lockedEnvironment)
> assayData: 30 features, 57 samples
> element names: exprs
> protocolData: none
> phenoData
> sampleNames: DMD004.1.U02 DMD005.1.U02 ... DMD173.1.U02 (57 total)
> varLabels: group steroids
> varMetadata: labelDescription
> featureData: none
> experimentData: use 'experimentData(object)'
> Annotation:
> - - - Processing information - - -
> Imputed (knn): Wed May 19 18:27:08 2021
> MSnbase version: 2.18.0
Here we will evaluate ALL normalization methods that POMA offers on the same MSnSet
object to compare them (Berg et al. 2006).
none <- PomaNorm(example_data, method = "none")
auto_scaling <- PomaNorm(example_data, method = "auto_scaling")
level_scaling <- PomaNorm(example_data, method = "level_scaling")
log_scaling <- PomaNorm(example_data, method = "log_scaling")
log_transformation <- PomaNorm(example_data, method = "log_transformation")
vast_scaling <- PomaNorm(example_data, method = "vast_scaling")
log_pareto <- PomaNorm(example_data, method = "log_pareto")
When we check for the dimension of the data after normalization we can see that ALL methods have the same effect on data dimension. PomaNorm
only change the data dimension when the data have features that only have zeros and when the data have features with 0 variance. Only in these two cases PomaNorm
will remove features of the data, changing the data dimensions.
dim(MSnbase::exprs(none))
> [1] 30 57
dim(MSnbase::exprs(auto_scaling))
> [1] 30 57
dim(MSnbase::exprs(level_scaling))
> [1] 30 57
dim(MSnbase::exprs(log_scaling))
> [1] 30 57
dim(MSnbase::exprs(log_transformation))
> [1] 30 57
dim(MSnbase::exprs(vast_scaling))
> [1] 30 57
dim(MSnbase::exprs(log_pareto))
> [1] 30 57
Here we can evaluate the different normalization effects on samples (Berg et al. 2006).
a <- PomaBoxplots(none, group = "samples", jitter = FALSE) +
ggtitle("Not Normalized")
b <- PomaBoxplots(auto_scaling, group = "samples", jitter = FALSE) +
ggtitle("Auto Scaling") +
theme(axis.text.x = element_blank(),
legend.position = "none")
c <- PomaBoxplots(level_scaling, group = "samples", jitter = FALSE) +
ggtitle("Level Scaling") +
theme(axis.text.x = element_blank(),
legend.position = "none")
d <- PomaBoxplots(log_scaling, group = "samples", jitter = FALSE) +
ggtitle("Log Scaling") +
theme(axis.text.x = element_blank(),
legend.position = "none")
e <- PomaBoxplots(log_transformation, group = "samples", jitter = FALSE) +
ggtitle("Log Transformation") +
theme(axis.text.x = element_blank(),
legend.position = "none")
f <- PomaBoxplots(vast_scaling, group = "samples", jitter = FALSE) +
ggtitle("Vast Scaling") +
theme(axis.text.x = element_blank(),
legend.position = "none")
g <- PomaBoxplots(log_pareto, group = "samples", jitter = FALSE) +
ggtitle("Log Pareto") +
theme(axis.text.x = element_blank(),
legend.position = "none")
a
(b + c + d) / (e + f + g)