Summarize Actigraphy Data
Usage
summarize_daily_actigraphy(
x,
unit = "1 min",
fix_zeros = TRUE,
fill_in = TRUE,
trim = FALSE,
verbose = TRUE,
calculate_mims = FALSE,
calculate_ac = FALSE,
flag_data = TRUE,
ensure_all_time = TRUE,
flags = NULL,
...
)
summarise_daily_actigraphy(
x,
unit = "1 min",
fix_zeros = TRUE,
fill_in = TRUE,
trim = FALSE,
verbose = TRUE,
calculate_mims = FALSE,
calculate_ac = FALSE,
flag_data = TRUE,
ensure_all_time = TRUE,
flags = NULL,
...
)
summarize_actigraphy(
x,
unit = "1 min",
.fns = list(mean = mean, median = median),
verbose = TRUE,
...
)
collapse_daily_actigraphy(
x,
.fns = list(mean = function(x) mean(x, na.rm = TRUE), median = function(x) median(x,
na.rm = TRUE)),
verbose = TRUE
)
summarise_actigraphy(
x,
unit = "1 min",
.fns = list(mean = mean, median = median),
verbose = TRUE,
...
)
Arguments
- x
an AccData object. If `x` is a character, then
read_actigraphy
will be run- unit
units to group the data to take the statistic over
- fix_zeros
Should
fix_zeros
be run before calculating the measures?- fill_in
if
fix_zeros = TRUE
, should the zeros be filled in with the last observation carried forward?- trim
if
fix_zeros = TRUE
, should the time course be trimmed for zero values at the beginning and the end of the time course? observation carried forward?- verbose
print diagnostic messages
- calculate_mims
Should MIMS units be calculated? Passed to
calculate_measures
- calculate_ac
Should Activity Counts from the
activityCounts
package be calculated?- flag_data
Should [SummarizedActigraphy::flag_qc()] be run? It will be executed after
fix_zeros
before any measure calculation- ensure_all_time
if
TRUE
, then all times from the first to last times will be in the output, even if data during that time was not in the input- flags
the flags to calculate, passed to [SummarizedActigraphy::flag_qc()]
- ...
Additional arguments to pass to
read_actigraphy
- .fns
Functions to apply to each of the selected columns. See
across
Examples
path = system.file("extdata",
"TAS1H30182785_2019-09-17.gt3x",
package = "SummarizedActigraphy")
x = read_actigraphy(path)
#> Input is a .gt3x file, unzipping to a temporary location first...
#> Unzipping gt3x data to /tmp/RtmpsiCePd
#> 1/1
#> Unzipping /home/runner/work/_temp/Library/SummarizedActigraphy/extdata/TAS1H30182785_2019-09-17.gt3x
#> === info.txt, log.bin extracted to /tmp/RtmpsiCePd/TAS1H30182785_2019-09-17
#> GT3X information
#> $ Serial Number :"TAS1H30182785"
#> $ Device Type :"Link"
#> $ Firmware :"1.7.2"
#> $ Battery Voltage :"4.18"
#> $ Sample Rate :100
#> $ Start Date : POSIXct, format: "2019-09-17 18:40:00"
#> $ Stop Date : POSIXct, format: "2019-09-18 19:00:00"
#> $ Last Sample Time : POSIXct, format: "2019-09-17 19:20:05"
#> $ TimeZone :"-04:00:00"
#> $ Download Date : POSIXct, format: "2019-09-17 19:20:05"
#> $ Board Revision :"8"
#> $ Unexpected Resets :"0"
#> $ Acceleration Scale:256
#> $ Acceleration Min :"-8.0"
#> $ Acceleration Max :"8.0"
#> $ Subject Name :"suffix_85"
#> $ Serial Prefix :"TAS"
#> Parsing GT3X data via CPP.. expected sample size: 240500
#> ---GT3X PARAMETERS
#> address: 0 key: 6 value: 1
#> address: 0 key: 7 value: 54703161
#> address: 0 key: 8 value: 8
#> address: 0 key: 9 value: 1534154836
#> address: 0 key: 13 value: 17235970
#> address: 0 key: 16 value: 3791650816
#> address: 0 key: 20 value: 0
#> address: 0 key: 21 value: 0
#> address: 0 key: 22 value: 0
#> address: 0 key: 23 value: 0
#> address: 0 key: 26 value: 2
#> address: 0 key: 28 value: 262013
#> address: 0 key: 29 value: 255
#> address: 0 key: 32 value: 16908288
#> address: 0 key: 37 value: 1024
#> address: 0 key: 38 value: 0
#> address: 0 key: 49 value: 2048
#> address: 0 key: 50 value: 88181047
#> address: 0 key: 51 value: 6.82667
#> address: 0 key: 55 value: 256
#> address: 0 key: 57 value: 333.87
#> address: 0 key: 58 value: 21
#> address: 0 key: 61 value: 2
#> address: 1 key: 0 value: 0
#> address: 1 key: 1 value: 872668711
#> address: 1 key: 2 (features) value: 388
#> address: 1 key: 3 value: 1
#> address: 1 key: 4 value: 4294967131
#> address: 1 key: 5 value: 4294967095
#> address: 1 key: 6 value: 4294967149
#> address: 1 key: 7 value: 298
#> address: 1 key: 8 value: 286
#> address: 1 key: 9 value: 300
#> address: 1 key: 10 value: 100
#> address: 1 key: 12 (start time) value: 1568745600
#> address: 1 key: 13 value: 1568833200
#> address: 1 key: 14 value: 1568745556
#> address: 1 key: 15 value: 74
#> address: 1 key: 16 value: 40
#> address: 1 key: 17 value: 72
#> address: 1 key: 20 value: 0
#> address: 1 key: 21 value: 0
#> address: 1 key: 33 value: 60000
#> address: 1 key: 34 value: 4294965247
#> address: 1 key: 35 value: 4294965190
#> address: 1 key: 36 value: 4294965237
#> address: 1 key: 37 value: 2051
#> address: 1 key: 38 value: 2000
#> address: 1 key: 39 value: 2048
#> address: 1 key: 40 value: 0
#> address: 1 key: 41 value: 1
#> address: 1 key: 42 value: 0
#> address: 1 key: 43 value: 4294967283
#> address: 1 key: 44 value: 0
#> address: 1 key: 45 value: 0
#> address: 1 key: 46 value: 0
#> ---END PARAMETERS
#>
#> Activity with Sample Size of 0
#> payload start: 1568747741
#> total_records: 214100
#> max_samples: 240500
#> Activity with Sample Size of 0
#> payload start: 1568747759
#> total_records: 215900
#> max_samples: 240500
#> Total Records: 216000
#> Scaling...
#> Creating dimnames
#> CPP returning
#> Done (in 0.50767970085144 seconds)
options(digit.secs = 2)
fixed = fix_zeros(x)
daily = summarize_daily_actigraphy(fixed, fix_zeros = FALSE)
#> Flagging data
#> Flagging Spikes
#> Flagging Interval Jumps
#> Flagging Spikes at Second-level
#> Flagging Repeated Values
#> Flagging Device Limit Values
#> Flagging Zero Values
#> Flagging 'Impossible' Values
#> Calculating ai0
#> Calculating MAD
#> Joining AI and MAD
#> Joining flags
average_day = collapse_daily_actigraphy(daily)
#> Getting the First Day
#> Summarizing Data
if (FALSE) { # \dontrun{
average_day = summarize_actigraphy(fixed, fix_zeros = FALSE)
if (requireNamespace("ggplot2", quietly = TRUE)) {
library(magrittr)
average_day %>%
ggplot(aes(x = time, y = ai_mean)) +
geom_line()
average_day %>%
ggplot(aes(x = time, y = ai_median)) +
geom_line()
}
} # }