Rmd file containing code can be accessed here.
# formatting of tables
library(knitr)
library(kableExtra)
# read in data
library(readr)
# data management, plotting, etc.
library(tidyverse)
# used to calculate demming regression equaiton
library(mcr)
# used to fit linear mixed model to residual trend
library(lme4)
library(emmeans)
# used to fit generalized additive mixed model to residual trend
library(mgcv)
#' library(lme4)
#' library(mgcv)
#' lmer_model <- lmer(Reaction ~ Days + (Days || Subject), data = sleepstudy)
#' ga_model <- gam(Reaction ~ Days + s(Subject, bs = "re") + s(Days, Subject, bs = "re"),
#' data = sleepstudy,
#' method = "REML"
#' )
#'
#' head(
#' data.frame(
#' lmer = predict(lmer_model),
#' gam = predict_gamm(ga_model)
#' )
#' )
#'
#' head(
#' cbind(
#' lmer = predict(lmer_model, re.form = NA),
#' gam1 = predict_gamm(ga_model, re_form = NA),
#' gam2 = predict_gamm(ga_model,
#' exclude = c("s(Subject)", "s(Days,Subject)")
#' )
#' )
#' )
#'
#' head(predict_gamm(ga_model, se = TRUE))
#' @export
predict_gamm <- function(
model,
newdata,
re_form = NULL,
se = FALSE,
include = NULL,
exclude = NULL,
keep_prediction_data = FALSE,
...) {
# Note because predict doesn't use NULL, can't use NULL for new_data arg or
# even a differently named arg, and I'm not going into the weeds of rlang to
# find a hack.
# basic checks
if (!inherits(model, "gam")) stop("Need a gam object.")
if (!rlang::is_null(include) && !rlang::is_character(include)) {
stop("include must be NULL or character.")
}
if (!rlang::is_null(exclude) && !rlang::is_character(exclude)) {
stop("exclude must be NULL or character.")
}
if (!rlang::is_null(re_form) &&
!rlang::is_na(re_form) &
!rlang::is_character(re_form)) {
stop("re_form must be NULL, NA, or character.")
}
if (any(include %in% exclude)) {
stop("You can't include and exclude the same thing.")
}
if (!rlang::is_logical(se)) {
stop("se must be TRUE or FALSE")
}
if (!rlang::is_logical(keep_prediction_data)) {
stop("keep_prediction_data must be TRUE or FALSE")
}
# standard prediction would simply call predict.gam
if (rlang::is_null(re_form) | rlang::is_character(re_form)) {
if (rlang::is_null(re_form)) {
preds <- predict(model,
newdata,
se = se,
terms = include,
exclude = exclude,
...
)
} else {
preds <- predict(model,
newdata,
se = se,
terms = c(include, re_form),
exclude = exclude,
...
)
}
} else if (rlang::is_na(re_form)) {
# FE only
re_terms <- sapply(model$smooth, function(x) inherits(x, "random.effect"))
re_terms <- sapply(model$smooth[re_terms], function(x) x$label)
preds <- predict(model,
newdata,
se = se,
terms = include,
exclude = c(re_terms, exclude),
...
)
}
if (se) {
preds <- data.frame(prediction = preds$fit, se = preds$se)
} else {
preds <- data.frame(prediction = preds)
}
if (keep_prediction_data) {
if (missing(newdata)) {
base <- model$model
} else {
base <- newdata
}
preds <- data.frame(base, preds)
}
preds
}
eyefitting_parameter_details <- read_csv("eyefitting-parameter-details.csv")
eyefitting_parameter_details %>% knitr::kable()
| parm_id | y_xbar | slope | sigma | x_min | x_max | x_by |
|---|---|---|---|---|---|---|
| S | 3.88 | 0.66 | 1.3 | 0 | 20 | 0.25 |
| F | 3.90 | 0.66 | 2.8 | 0 | 20 | 0.25 |
| V | 3.89 | 1.98 | 1.5 | 4 | 16 | 0.25 |
| N | 4.11 | -0.70 | 2.5 | 0 | 20 | 0.25 |
linearDataGen <-
function(y_xbar,
slope,
sigma,
N = 30,
x_min = 0,
x_max = 20,
x_by = 0.25){
# Set up x values
xVals <- seq(x_min, x_max, length.out = floor(N*1))
xVals <- sample(xVals, N, replace = FALSE)
xVals <- jitter(xVals)
xVals <- ifelse(xVals < x_min, x_min, xVals)
xVals <- ifelse(xVals > x_max, x_max, xVals)
# From slope intercept form
# y-y_xbar = m(x-xbar)
# y = m(x-xbar) + y_xbar = mx - mxbar + y_xbar
yintercept = y_xbar - slope*mean(xVals)
# Generate "good" errors
repeat{
errorVals <- rnorm(N, 0, sigma)
if(mean(errorVals[floor(N/3)]) < 2*sigma & mean(errorVals[floor(N/3)] > -2*sigma)){
break
}
}
# Simulate point data
point_data <- tibble(dataset = "point_data",
x = xVals,
y = yintercept + slope*x + errorVals) %>%
arrange(x)
# Obtain least squares regression coefficients
lm.fit <- lm(y ~ x, data = point_data)
yintercepthat <- coef(lm.fit)[1] %>% as.numeric()
slopehat <- coef(lm.fit)[2] %>% as.numeric()
# Simulate best fit line data
line_data <- tibble(dataset = "line_data",
x = seq(x_min, x_max, x_by),
y = yintercepthat + slopehat*x)
data <- list(point_data = point_data, line_data = line_data)
return(data)
}
set.seed(68505)
eyefitting_sim_data <- eyefitting_parameter_details %>%
mutate(data = purrr::pmap(list(y_xbar = y_xbar,
slope = slope,
sigma = sigma,
x_min = x_min,
x_max = x_max,
x_by = x_by), linearDataGen)) %>%
expand_grid(linear = "true",
draw_start = 5,
free_draw = TRUE) %>%
unnest(data) %>%
unnest(data)
npoints_data <- eyefitting_sim_data %>%
filter(dataset == "point_data")
table(npoints_data$parm_id)
##
## F N S V
## 30 30 30 30
# write.csv(eyefitting_sim_data, "eyefitting-simulated-data-example.csv", row.names = F, na = "")
eyefitting_example_simplot <- eyefitting_sim_data %>%
filter(dataset == "point_data") %>%
# filter(dataset %in% c("F", "N", "S") | (x < 16 & x > 4)) %>%
mutate(parm_id = factor(parm_id, levels = c("F", "S", "V", "N"))) %>%
dplyr::rename(`Parameter Choice` = parm_id) %>%
ggplot(aes(x = x, y = y)) +
geom_point(size = 1) +
facet_wrap(~`Parameter Choice`, ncol = 4) +
theme_bw(base_size = 14) +
theme(aspect.ratio = 1,
legend.position = "none",
plot.title = element_text(size = 12, hjust = 0),
axis.text = element_text(size = 12),
axis.title = element_text(size = 12),
legend.title = element_text(size = 12),
legend.text = element_text(size = 12),
# strip.text = element_text(size = 5, margin = margin(0.05,0,0.05,0, "cm")),
# strip.background = element_rect(size = 0.5),
legend.key.size = unit(1, "line")
) +
scale_y_continuous(breaks = seq(-10, 20, 5))
eyefitting_example_simplot
This section only needed to be run once to compute the fitted values based on the principal axis (Deming regression). The ordinary least squares fitted values were computed in the original data simulation step. The resulting data set is the eyefitting-model-data.csv used for analysis.
# read in feedback and simulated data
eyefitting_feedback_data <- read_csv("https://github.com/earobinson95/Eye-Fitting-Straight-Lines-in-the-Modern-Era/raw/main/analysis/eyefitting-feedback-data.csv")
eyefitting_simulated_data <- read_csv("https://github.com/earobinson95/Eye-Fitting-Straight-Lines-in-the-Modern-Era/raw/main/analysis/eyefitting-simulated-data.csv")
# fit deming regression
demingFit <- function(data){
fit <- mcreg(data$x, data$y, method.reg = "Deming")
demingIntercept <- fit@para[1]
demingSlope <- fit@para[2]
return(tibble(demingIntercept = demingIntercept, demingSlope = demingSlope))
}
pca_preds <- eyefitting_simulated_data %>%
filter(dataset == "point_data", participant_id %in% unique(eyefitting_feedback_data$participant_id)) %>%
as_tibble() %>%
tidyr::nest(-c(participant_id, parm_id)) %>%
dplyr::mutate(deming_fit = purrr::map(data, demingFit)) %>%
unnest(deming_fit) %>%
select(participant_id, parm_id, demingIntercept, demingSlope) %>%
expand_grid(x = seq(0,20,0.25)) %>%
mutate(ypca = demingIntercept + demingSlope*x)
eyefitting_model_data <- eyefitting_feedback_data %>%
left_join(pca_preds, by = c("participant_id", "x", "parm_id")) %>%
mutate(residual_ols_drawn = ydrawn - yols,
residual_pca_drawn = ydrawn - ypca) %>%
dplyr::select(nick_name, study_starttime, age, gender, academic_study, recruitment, participant_id, parm_id, plot_id, x, yols, ypca, ydrawn, residual_ols_drawn, residual_pca_drawn)
# save data to csv - already run once and pushed to GitHub.
# write.csv(eyefitting_model_data, file = "../data/eyefitting-model-data.csv", row.names = F, na = "")
De-identified participant data collected in the study and used for analyses are available to be viewed and downloaded from GitHub at here.
eyefitting_model_data <- read_csv("https://github.com/earobinson95/Eye-Fitting-Straight-Lines-in-the-Modern-Era/raw/main/data/eyefitting-model-data.csv")
factorCols <- c("nick_name", "study_starttime", "age", "gender", "academic_study",
"recruitment", "participant_id", "parm_id", "plot_id")
eyefitting_model_data[,factorCols] <- lapply(eyefitting_model_data[,factorCols], factor)
There are a total of 35 participants and 35 ‘You Draw It’ plots completed.
| nick_name | study_starttime | age | gender | academic_study | recruitment | participant_id | parm_id | plot_id | x | yols | ypca | ydrawn | residual_ols_drawn | residual_pca_drawn |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| a43e9f8c045e5762f6396aca7d3484f2 | 1620154415.23688 | 26-30 | Female | Graduate Degree | Direct Email | 0f9fcc784854be73d00970816ffde0e5 | V | 1fa58f3faaa173962e9af0aa98dbeeef | 4.00 | -7.762164 | -8.082525 | -6.883773 | 0.8783913 | 1.1987521 |
| a43e9f8c045e5762f6396aca7d3484f2 | 1620154415.23688 | 26-30 | Female | Graduate Degree | Direct Email | 0f9fcc784854be73d00970816ffde0e5 | V | 1fa58f3faaa173962e9af0aa98dbeeef | 4.25 | -7.278002 | -7.584984 | -6.432879 | 0.8451235 | 1.1521056 |
| a43e9f8c045e5762f6396aca7d3484f2 | 1620154415.23688 | 26-30 | Female | Graduate Degree | Direct Email | 0f9fcc784854be73d00970816ffde0e5 | V | 1fa58f3faaa173962e9af0aa98dbeeef | 4.50 | -6.793840 | -7.087443 | -6.239638 | 0.5542017 | 0.8478051 |
| a43e9f8c045e5762f6396aca7d3484f2 | 1620154415.23688 | 26-30 | Female | Graduate Degree | Direct Email | 0f9fcc784854be73d00970816ffde0e5 | V | 1fa58f3faaa173962e9af0aa98dbeeef | 4.75 | -6.309678 | -6.589902 | -5.853157 | 0.4565204 | 0.7367451 |
| a43e9f8c045e5762f6396aca7d3484f2 | 1620154415.23688 | 26-30 | Female | Graduate Degree | Direct Email | 0f9fcc784854be73d00970816ffde0e5 | V | 1fa58f3faaa173962e9af0aa98dbeeef | 5.00 | -5.825515 | -6.092361 | -5.209022 | 0.6164931 | 0.8833391 |
| a43e9f8c045e5762f6396aca7d3484f2 | 1620154415.23688 | 26-30 | Female | Graduate Degree | Direct Email | 0f9fcc784854be73d00970816ffde0e5 | V | 1fa58f3faaa173962e9af0aa98dbeeef | 5.25 | -5.341353 | -5.594820 | -4.822541 | 0.5188118 | 0.7722791 |
| a43e9f8c045e5762f6396aca7d3484f2 | 1620154415.23688 | 26-30 | Female | Graduate Degree | Direct Email | 0f9fcc784854be73d00970816ffde0e5 | V | 1fa58f3faaa173962e9af0aa98dbeeef | 5.50 | -4.857191 | -5.097279 | -4.500474 | 0.3567170 | 0.5968056 |
| a43e9f8c045e5762f6396aca7d3484f2 | 1620154415.23688 | 26-30 | Female | Graduate Degree | Direct Email | 0f9fcc784854be73d00970816ffde0e5 | V | 1fa58f3faaa173962e9af0aa98dbeeef | 5.75 | -4.373028 | -4.599738 | -4.049579 | 0.3234492 | 0.5501591 |
| a43e9f8c045e5762f6396aca7d3484f2 | 1620154415.23688 | 26-30 | Female | Graduate Degree | Direct Email | 0f9fcc784854be73d00970816ffde0e5 | V | 1fa58f3faaa173962e9af0aa98dbeeef | 6.00 | -3.888866 | -4.102197 | -3.405444 | 0.4834218 | 0.6967531 |
| a43e9f8c045e5762f6396aca7d3484f2 | 1620154415.23688 | 26-30 | Female | Graduate Degree | Direct Email | 0f9fcc784854be73d00970816ffde0e5 | V | 1fa58f3faaa173962e9af0aa98dbeeef | 6.25 | -3.404704 | -3.604656 | -2.503655 | 0.9010485 | 1.1010011 |
# Example
eyefitting_model_data %>%
filter(participant_id == "60f510d6896cddc508ea6d3de4b58234", parm_id == "F") %>%
ggplot(aes(x = x)) +
geom_line(aes(y = yols, color = "OLS", linetype = "OLS")) +
geom_line(aes(y = ypca, color = "PA", linetype = "PA")) +
geom_line(aes(y = ydrawn, color = "Drawn", linetype = "Drawn")) +
geom_point(data = eyefitting_simulated_data %>%
filter(dataset == "point_data", participant_id == "60f510d6896cddc508ea6d3de4b58234", parm_id == "F"),
aes(x = x, y = y)) +
facet_wrap(~ parm_id) +
theme_bw() +
theme(aspect.ratio = 1) +
scale_y_continuous("y") +
scale_color_manual("", values = c("black", "steelblue", "orange2")) +
scale_linetype_manual("", values = c("dashed", "solid", "solid"))
eyefitting_model_data %>%
ggplot(aes(x = x)) +
geom_line(aes(y = ydrawn, group = plot_id), alpha = 0.5, color = "steelblue") +
geom_line(alpha = 0.2, aes(y = yols, group = participant_id)) +
facet_wrap(~ parm_id, ncol = 4) +
theme_bw() +
theme(aspect.ratio = 1) +
scale_x_continuous(limits = c(0, 20)) +
ggtitle("OLS")
eyefitting_model_data %>%
ggplot(aes(x = x)) +
geom_line(aes(y = ydrawn, group = plot_id), alpha = 0.5, color = "steelblue") +
geom_line(alpha = 0.2, aes(y = ypca, group = participant_id)) +
facet_wrap(~ parm_id, ncol = 4) +
theme_bw() +
theme(aspect.ratio = 1) +
scale_x_continuous(limits = c(0, 20)) +
ggtitle("PA")
eyefitting_model_data %>%
ggplot(aes(x = x)) +
geom_line(aes(y = residual_ols_drawn, group = plot_id, color = "OLS"), alpha = 0.3) +
geom_line(aes(y = residual_pca_drawn, group = plot_id, color = "PA"), alpha = 0.3) +
geom_hline(yintercept = 0, linetype = "dashed") +
facet_wrap(~parm_id, ncol = 4) +
theme_bw() +
theme(aspect.ratio = 1) +
scale_x_continuous(limits = c(0, 20)) +
scale_color_manual("Estimate", values = c("steelblue", "orange"))
Treatments:
Response: raw residuals
Experimental Design:
ANOVA Table:
| SV | DF |
|---|---|
| participant_id | (50 - 1) = 49 |
| parm_id | (4 - 1) = 3 |
| x | 1 |
| x:parm_id | (4 - 1) = 3 |
| residual | 143 - by subtraction |
LMM Model:
\[y_{drawn} - y_{ols} = e_{ij,ols} = \left[\gamma_0 + \alpha_i\right] + \left[\gamma_{1} x_{ij} + \gamma_{2i} x_{ij}\right] + p_{j} + \epsilon_{ij}\]
# OLS
eyefitting.ols.lmer <- lmer(residual_ols_drawn ~ -1 + parm_id + x:parm_id + (1|participant_id),
data = eyefitting_model_data)
anova(eyefitting.ols.lmer)
## Analysis of Variance Table
## npar Sum Sq Mean Sq F value
## parm_id 4 123.98 30.99 55.56
## parm_id:x 4 1330.50 332.62 596.26
# PCA
eyefitting.pca.lmer <- lmer(residual_pca_drawn ~ -1 + parm_id + x:parm_id + (1|participant_id),
data = eyefitting_model_data)
anova(eyefitting.pca.lmer)
## Analysis of Variance Table
## npar Sum Sq Mean Sq F value
## parm_id 4 63.266 15.817 22.197
## parm_id:x 4 237.392 59.348 83.290
# Obtain Predictions
eyefitting.ols.grid.lmer <- ref_grid(eyefitting.ols.lmer, at = list(x = seq(1,20,0.5)))
eyefitting.ols.preds.lmer <- emmeans(eyefitting.ols.grid.lmer, ~ parm_id:x, cov.reduce = FALSE) %>%
as_tibble()
eyefitting.pca.grid.lmer <- ref_grid(eyefitting.pca.lmer, at = list(x = seq(1,20,0.5)))
eyefitting.pca.preds.lmer <- emmeans(eyefitting.pca.grid.lmer, ~ parm_id:x, cov.reduce = FALSE) %>%
as_tibble()
eyefitting.preds.lmer <- eyefitting.ols.preds.lmer %>%
full_join(eyefitting.pca.preds.lmer, by = c("x", "parm_id"), suffix = c(".ols", ".pca"))
# write.csv(eyefitting.preds.lmer, "youdrawit-eyefitting-lmerpred-data.csv", row.names = F, na = "")
# Plot Predictions
eyefitting.lmer.plot <- eyefitting.preds.lmer %>%
filter((parm_id %in% c("F", "N", "S") | (x <= 16 & x >= 4))) %>%
mutate(parm_id = factor(parm_id, levels = c("F", "S", "V", "N"))) %>%
dplyr::rename(`Parameter Choice` = parm_id) %>%
ggplot(aes(x = x)) +
geom_line(data = eyefitting_model_data %>% mutate(parm_id = factor(parm_id, levels = c("F", "S", "V", "N"))) %>% dplyr::rename(`Parameter Choice` = parm_id),
aes(x = x, y = residual_ols_drawn, group = plot_id, color = "OLS"), alpha = 0.2) +
geom_line(data = eyefitting_model_data %>% mutate(parm_id = factor(parm_id, levels = c("F", "S", "V", "N"))) %>% dplyr::rename(`Parameter Choice` = parm_id),
aes(x = x, y = residual_pca_drawn, group = plot_id, color = "PCA"), alpha = 0.2) +
geom_ribbon(aes(ymin = asymp.LCL.ols, ymax = asymp.UCL.ols, fill = "OLS"), color = NA, alpha = 0.4) +
geom_line(aes(y = emmean.ols, color = "OLS")) +
geom_ribbon(aes(ymin = asymp.LCL.pca, ymax = asymp.UCL.pca, fill = "PA"), color = NA, alpha = 0.4) +
geom_line(aes(y = emmean.pca, color = "PCA")) +
geom_hline(yintercept = 0, linetype = "dashed", alpha = 0.5) +
facet_wrap(~`Parameter Choice`, ncol = 4) +
theme_bw(base_size = 14) +
theme(aspect.ratio = 1,
legend.position = "right",
plot.title = element_text(size = 12, hjust = 0),
axis.text = element_text(size = 12),
axis.title = element_text(size = 12),
legend.title = element_text(size = 12),
legend.text = element_text(size = 12),
# strip.text = element_text(size = 5, margin = margin(0.05,0,0.05,0, "cm")),
# strip.background = element_rect(size = 0.5),
legend.key.size = unit(1, "line")
) +
scale_y_continuous("Residual") +
scale_color_manual("Individual participant \nresiduals", values = c("steelblue", "orange"), labels = c("OLS", "PCA")) +
scale_fill_manual("LMER fitted trend", values = c("steelblue", "orange"), labels = c("OLS", "PA"))
eyefitting.lmer.plot
Treatments:
Response: raw residuals
Experimental Design:
GAMM Model: fit = OLS, PCA
\[y_{ijk, drawn} - \hat y_{ijk, fit} = e_{ijk,fit} = \alpha_i + s_{i}(x_{ijk}) + p_{j} + s_{j}(x_{ijk})\]
# OLS
eyefitting.ols.gamm <- bam(residual_ols_drawn ~ parm_id + s(x, by = parm_id) +
s(participant_id, bs = "re") +
s(x,participant_id, bs = "re"),
method = "REML",
data = eyefitting_model_data)
# evaluate gamm
plot(eyefitting.ols.gamm, pages = 1, all.terms=TRUE)
# PCA
eyefitting.pca.gamm <- bam(residual_pca_drawn ~ -1 + parm_id + s(x) +
s(x, by = parm_id) +
s(participant_id, bs = "re") +
s(x,participant_id, bs = "re"),
method = "REML",
data = eyefitting_model_data)
# evaluate gamm
plot(eyefitting.pca.gamm, pages = 1,all.terms=TRUE)
# Obtain Predictions
eyefitting.grid.gamm <- expand_grid(parm_id = c("S", "V", "F", "N"),
x = seq(0,20, 0.5),
participant_id = eyefitting_model_data$participant_id[1])
# OLS
eyefitting.ols.preds <- predict_gamm(eyefitting.ols.gamm, newdata = eyefitting.grid.gamm, se = T, re_form = NA)
eyefitting.grid.gamm$ols.pred <- eyefitting.ols.preds$prediction
eyefitting.grid.gamm$ols.lower <- eyefitting.ols.preds$prediction - (1.96 * eyefitting.ols.preds$se)
eyefitting.grid.gamm$ols.upper <- eyefitting.ols.preds$prediction + (1.96 * eyefitting.ols.preds$se)
# PCA
eyefitting.pca.preds <- predict_gamm(eyefitting.pca.gamm, newdata = eyefitting.grid.gamm, se = T, re_form = NA)
eyefitting.grid.gamm$pca.pred <- eyefitting.pca.preds$prediction
eyefitting.grid.gamm$pca.lower <- eyefitting.pca.preds$prediction - (1.96 * eyefitting.pca.preds$se)
eyefitting.grid.gamm$pca.upper <- eyefitting.pca.preds$prediction + (1.96 * eyefitting.pca.preds$se)
# write.csv(eyefitting.grid.gamm, "youdrawit-eyefitting-gammpred-data.csv", row.names = F, na = "")
# Plot Predictions
eyefitting.gamm.plot <- eyefitting.grid.gamm %>%
filter((parm_id %in% c("F", "N", "S") | (x <= 16 & x >= 4))) %>%
mutate(parm_id = factor(parm_id, levels = c("F", "S", "V", "N"))) %>%
dplyr::rename(`Parameter Choice` = parm_id) %>%
ggplot(aes(x = x)) +
geom_line(data = eyefitting_model_data %>% mutate(parm_id = factor(parm_id, levels = c("F", "S", "V", "N"))) %>% dplyr::rename(`Parameter Choice` = parm_id), aes(x = x, y = residual_ols_drawn, group = plot_id, color = "OLS"), alpha = 0.2) +
geom_line(data = eyefitting_model_data %>% mutate(parm_id = factor(parm_id, levels = c("F", "S", "V", "N"))) %>% dplyr::rename(`Parameter Choice` = parm_id), aes(x = x, y = residual_pca_drawn, group = plot_id, color = "PA"), alpha = 0.2) +
geom_ribbon(aes(ymin = ols.lower, ymax = ols.upper, fill = "OLS"), color = NA, alpha = 0.4) +
geom_line(aes(y = ols.pred, color = "OLS")) +
geom_ribbon(aes(ymin = pca.lower, ymax = pca.upper, fill = "PA"), color = NA, alpha = 0.4) +
geom_line(aes(y = pca.pred, color = "PA")) +
geom_hline(yintercept = 0, linetype = "dashed", alpha = 0.5) +
facet_wrap(~`Parameter Choice`, ncol = 4) +
theme_bw(base_size = 14) +
theme(aspect.ratio = 1,
legend.position = "right",
plot.title = element_text(size = 12, hjust = 0),
axis.text = element_text(size = 12),
axis.title = element_text(size = 12),
legend.title = element_text(size = 12),
legend.text = element_text(size = 12),
# strip.text = element_text(size = 5, margin = margin(0.05,0,0.05,0, "cm")),
# strip.background = element_rect(size = 0.5),
legend.key.size = unit(1, "line")
) +
scale_y_continuous("Residual") +
scale_color_manual("Individual participant \nresiduals", values = c("steelblue", "orange"), labels = c("OLS", "PCA")) +
scale_fill_manual("GAMM fitted trend", values = c("steelblue", "orange"), labels = c("OLS", "PCA"))
eyefitting.gamm.plot