Commit 44d86220 authored by Chris Jewell's avatar Chris Jewell
Browse files

Implemented tensorA-based storage and manipulation of posterior.

parent 8ec8c210
......@@ -15,7 +15,9 @@ Description: Implements a non-parametric source attribution model
License: GPL-3
Depends:
R (>= 3.2.1),
dplyr
dplyr,
tensorA,
assertthat
Imports:
gtools,
hashmap,
......
......@@ -323,6 +323,7 @@ HaldDP <- R6::R6Class(
namesLocations = private$namesLocations,
namesTypes = private$namesTypes
)
#private$posterior$sample(private$DPModel_impl)
},
set_update_schema = function(update_schema)
{
......@@ -1052,7 +1053,7 @@ HaldDP <- R6::R6Class(
alpha=alpha,
CI_type=CI_type))
names(res)[1] = 'ci'
reorder.tensor(res, i = c('source','time','location','ci'))
res
},
calc_CI_q = function(object, alpha, CI_type)
{
......@@ -1066,7 +1067,6 @@ HaldDP <- R6::R6Class(
alpha=alpha,
CI_type=CI_type))
names(res)[1] = 'ci'
reorder.tensor(res, c('type','source','time','ci'))
},
calc_CI_lambda_i = function(object, alpha, CI_type)
{
......@@ -1075,7 +1075,6 @@ HaldDP <- R6::R6Class(
alpha=alpha,
CI_type=CI_type))
names(res)[1] = 'ci'
reorder.tensor(res, c('type','time','location','ci'))
},
calc_CI_lambda_j = function(object, alpha, CI_type)
{
......@@ -1084,7 +1083,6 @@ HaldDP <- R6::R6Class(
alpha=0.05,
CI_type=CI_type))
names(res)[1] = 'ci'
reorder.tensor(res, i=c('source','time','location','ci'))
},
check_extract_summary_params = function(params,
times,
......@@ -1272,7 +1270,8 @@ HaldDP <- R6::R6Class(
)
)
},
calc_acceptance = function() {
calc_acceptance = function()
{
## mcmc is finished, save and print acceptance rate summary for r and alpha
private$acceptance <-
......@@ -1317,11 +1316,11 @@ HaldDP <- R6::R6Class(
})
if ('alpha' %in% private$update_schema) {
cat("\nalpha acceptance: \n")
message(print(private$acceptance$alpha))
print(private$acceptance$alpha)
}
if ('r' %in% private$update_schema) {
cat("\nr acceptance: \n")
message(print(private$acceptance$r))
print(private$acceptance$r)
}
}
),
......@@ -1466,7 +1465,6 @@ HaldDP <- R6::R6Class(
{
return(private$acceptance)
},
extract = function(params = c("alpha", "q", "s", "r", "lambda_i", "lambda_j", "lambda_j_prop"),
times = NULL,
locations = NULL,
......@@ -1483,51 +1481,82 @@ HaldDP <- R6::R6Class(
assert_that(is.atomic(drop), is.logical(drop))
params <- params_checked$params
times <- params_checked$times
locations <- params_checked$locations
sources <- params_checked$sources
types <- params_checked$types
iters <- params_checked$iters
flatten <- params_checked$flatten
params = params_checked$params
times = params_checked$times
locations = params_checked$locations
sources = params_checked$sources
types = params_checked$types
iters = params_checked$iters
flatten = params_checked$flatten
if(isTRUE(flatten)) drop = FALSE
res = lapply(setNames(params, params), function(x) {
switch(
x,
# TODO: This is a PITA hack for buggy tensorA::[.tensor
# method: replaced with sliceTensor(x,...) for now.
# See utils.R for details. CPJ 2017-04-06
"alpha" = sliceTensor(
private$posterior$alpha,
sources,
times,
locations,
iters,
drop = drop
),
"q" = sliceTensor(private$posterior$q, types, iters, drop = drop),
"s" = sliceTensor(private$posterior$s, types, iters, drop = drop),
"r" = sliceTensor(private$posterior$r, types, sources, times, iters, drop = drop),
"lambda_i" = sliceTensor(
private$posterior$lambda_i,
types,
times,
locations,
iters,
drop = drop
),
"lambda_j" = sliceTensor(
private$posterior$lambda_j,
sources,
times,
locations,
iters,
drop = drop
),
"lambda_j_prop" = sliceTensor(
private$posterior$lambda_j_prop,
sources,
times,
locations,
iters,
drop = drop
),
stop("Unrecognised model component")
)
})
if (flatten == FALSE) {
return(lapply(setNames(params, params), function(x) {
switch(
x,
"alpha" = private$posterior$alpha[sources, times, locations, iters, drop = drop],
"q" = private$posterior$q[types, iters, drop = drop],
"s" = private$posterior$s[types, iters, drop = drop],
"r" = private$posterior$r[types, sources, times, iters, drop = drop],
"lambda_i" = private$posterior$lambda_i[types, times, locations, iters, drop = drop],
"lambda_j" = private$posterior$lambda_j[sources, times, locations, iters, drop = drop],
"lambda_j_prop" = private$posterior$lambda_j_prop[sources, times, locations, iters, drop = drop],
stop("Unrecognised model component")
)
}))
} else {
if (isTRUE(flatten)) {
res <- do.call(cbind, lapply(params, function(x) {
switch(
x,
"alpha" = private$flatten_alpha(private$posterior$alpha[sources, times, locations, iters, drop = FALSE]),
"q" = private$flatten_q_s(private$posterior$q[types, iters, drop = FALSE], names = "q"),
"r" = private$flatten_r(private$posterior$r[types, sources, times, iters, drop = FALSE]),
"lambda_i" = private$flatten_lambda_i(private$posterior$lambda_i[types, times, locations, iters, drop = FALSE]),
"lambda_j" = private$flatten_lambda_j(private$posterior$lambda_j[sources, times, locations, iters, drop = FALSE]),
"lambda_j_prop" = private$flatten_lambda_j(private$posterior$lambda_j_prop[sources, times, locations, iters, drop = FALSE]),
"alpha" = private$flatten_alpha(res$alpha),
"q" = private$flatten_q_s(res$q),
"r" = private$flatten_r(res$r),
"lambda_i" = private$flatten_lambda_i(res$lambda_i),
"lambda_j" = private$flatten_lambda_j(res$lambda_j),
"lambda_j_prop" = private$flatten_lambda_j(res$lambda_j),
stop("Unrecognised model component")
)
}))
res <- as.data.frame(res)
if ("s" %in% params) {
tmp <-
as.data.frame(private$flatten_q_s(private$posterior$s[types, iters, drop = FALSE], names = "s"))
as.data.frame(private$flatten_q_s(res$s, names = "s"))
res <- cbind(res, tmp)
}
return(res)
}
res
},
plot_heatmap = function(iters,
cols = c("blue", "white"),
......@@ -1570,6 +1599,7 @@ HaldDP <- R6::R6Class(
{
object <-
self$extract(params, times, locations, sources, types, iters, flatten, drop = F)
if (flatten == TRUE) {
## do summary on each column of array
## remove cols starting with s_ as no summary for s objects
......
......@@ -37,3 +37,15 @@ arrayextend = function(x, along, size, newdimnames)
args[[length(args)+1]] = x
do.call('[<-',args)
}
#' Slices a tensorA::tensor
#'
#' Slices a tensorA::tensor, preserving the dimnames.
#' This is a workaround for a buggy implementation of
#' [.tensor as of tensorA v0.36.
sliceTensor <- function(x,...) {
class(x) <- 'array'
as.tensor.default(x[...])
}
......@@ -83,6 +83,7 @@ test_that("HaldDP model construction", {
model$update(n_iter=100)
expect_equal_to_reference(model$summary(), "haldDPres2.rds")
expect_equal_to_reference(model$extract('lambda_j'), "haldDPlambdaj.rds")
#expect_equal_to_reference(model$extract(flatten=TRUE), 'haldDPFlat.rds')
#model$fit_params(n_iter = 100,
# burn_in = 10,
# thin = 5)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment