diff --git a/Project.toml b/Project.toml
index 1be561c..726f545 100644
--- a/Project.toml
+++ b/Project.toml
@@ -1,7 +1,7 @@
name = "Baytes"
uuid = "72ddfcfc-6e9d-43df-829b-7aed7c549d4f"
authors = ["Patrick Aschermayr
"]
-version = "0.1.16"
+version = "0.2.0"
[deps]
ArgCheck = "dce04be8-c92d-5529-be00-80e4d2c0e197"
diff --git a/src/sampling/chain.jl b/src/sampling/chain.jl
index 2f7db78..ffa3bfb 100644
--- a/src/sampling/chain.jl
+++ b/src/sampling/chain.jl
@@ -142,7 +142,7 @@ function chainsummary(
)
## Assign utility values
@unpack Ndigits, quantiles = printdefault
- @unpack progress = trace.info
+ @unpack progress = trace.summary
@unpack tagged, paramnames = transform
Nparams = length(tagged)
Nchains = length(transform.chains)
diff --git a/src/sampling/diagnostics.jl b/src/sampling/diagnostics.jl
index 75a9329..886fff6 100644
--- a/src/sampling/diagnostics.jl
+++ b/src/sampling/diagnostics.jl
@@ -19,7 +19,7 @@ function printdiagnosticssummary(
## Assign utility variables
@unpack effective_iterations = transform
@unpack Ndigits, quantiles = printdefault
- @unpack Nchains, Nalgorithms, burnin = trace.info.sampling
+ @unpack Nchains, Nalgorithms, burnin = trace.summary.info
## Print diagnostics for each sampler for each chain
println(
"#####################################################################################",
diff --git a/src/sampling/inference.jl b/src/sampling/inference.jl
index 26b9b66..5610b10 100644
--- a/src/sampling/inference.jl
+++ b/src/sampling/inference.jl
@@ -103,13 +103,13 @@ end
function TraceTransform(
trace::Trace,
model::ModelWrapper,
- tagged::Tagged = Tagged(model, trace.info.sampling.printedparam.printed),
+ tagged::Tagged = Tagged(model, trace.summary.info.printedparam.printed),
info::TransformInfo = TransformInfo(
- collect(Base.OneTo(trace.info.sampling.Nchains)),
- collect(Base.OneTo(trace.info.sampling.Nalgorithms)),
- trace.info.sampling.burnin,
- trace.info.sampling.thinning,
- trace.info.sampling.iterations
+ collect(Base.OneTo(trace.summary.info.Nchains)),
+ collect(Base.OneTo(trace.summary.info.Nalgorithms)),
+ trace.summary.info.burnin,
+ trace.summary.info.thinning,
+ trace.summary.info.iterations
)
)
@unpack chains, algorithms, burnin, thinning, maxiterations = info
diff --git a/src/sampling/logging.jl b/src/sampling/logging.jl
index 01f5833..3e00850 100644
--- a/src/sampling/logging.jl
+++ b/src/sampling/logging.jl
@@ -7,7 +7,7 @@ Return `Progress` struct with arguments from `info` for sampling session.
```
"""
-function progress(report::ProgressReport, info::SamplingInfo)
+function progress(report::ProgressReport, info::SampleInfo)
return ProgressMeter.Progress(
info.iterations * info.Nalgorithms * info.Nchains;
enabled=report.bar,
diff --git a/src/sampling/sample.jl b/src/sampling/sample.jl
index facf863..956effb 100644
--- a/src/sampling/sample.jl
+++ b/src/sampling/sample.jl
@@ -44,9 +44,9 @@ function sample(
ArgCheck.@argcheck iterations > burnin "Burnin set higher than number of iterations."
## Check if we can capture previous samples
updatesampler = update(datatune, tempering.adaption, args...)
- ## Construct SamplingInfo and ProgressLog
+ ## Construct SampleInfo and ProgressLog
printedparameter = PrintedParameter(showparam(model, datatune, args...)...)
- info = SamplingInfo(printedparameter, iterations, burnin, thinning, length(args), chains, updatesampler, tempering.adaption)
+ info = SampleInfo(printedparameter, iterations, burnin, thinning, length(args), chains, updatesampler, tempering.adaption)
progressmeter = progress(report, info)
## Initialize algorithms
println("Constructing new sampler...")
@@ -56,7 +56,7 @@ function sample(
## Initialize trace
trace = Trace(
_rng, algorithmᵛ, model, BaytesCore.adjust(datatune, data),
- TraceInfo(tempertune, datatuneᵛ, default, info, progressmeter)
+ TraceSummary(tempertune, datatuneᵛ, default, info, progressmeter)
)
## Loop through iterations
println("Sampling starts...")
@@ -91,22 +91,22 @@ function sample!(iterations::Integer,
_rng::Random.AbstractRNG, model::M, data::D,
trace::Trace, algorithmᵛ
) where {M<:ModelWrapper,D}
- @unpack tempertune, datatune, sampling, default = trace.info
- @unpack Nalgorithms, Nchains, burnin, thinning, captured, tempered = sampling
+ @unpack tempertune, datatune, info, default = trace.summary
+ @unpack Nalgorithms, Nchains, burnin, thinning, captured, tempered = info
@unpack safeoutput, printoutput, printdefault, report = default
## Create new DataTune struct, taking into account current Index and data dimension
datatune_new = update(datatune, data)
## Check if iterations have to be adjusted if sequential data is used
iterations = maxiterations(datatune_new, iterations)
ArgCheck.@argcheck iterations > burnin "Burnin set higher than number of iterations."
- info = SamplingInfo(sampling.printedparam, iterations, burnin, thinning, Nalgorithms, Nchains, captured, tempered)
- progressmeter = progress(report, info)
+ info_new = SampleInfo(info.printedparam, iterations, burnin, thinning, Nalgorithms, Nchains, captured, tempered)
+ progressmeter = progress(report, info_new)
## Construct new models for algorithms
modelᵛ, datatuneᵛ = construct(model, datatune_new, Nchains, algorithmᵛ)
## Construct new trace to store new samples
trace_new = Trace(
_rng, algorithmᵛ, model, BaytesCore.adjust(datatune_new, data),
- TraceInfo(tempertune, datatuneᵛ, default, info, progressmeter)
+ TraceSummary(tempertune, datatuneᵛ, default, info_new, progressmeter)
)
## Loop through iterations
println("Sampling starts...")
diff --git a/src/sampling/trace.jl b/src/sampling/trace.jl
index 69bb537..2c4fd59 100644
--- a/src/sampling/trace.jl
+++ b/src/sampling/trace.jl
@@ -7,36 +7,36 @@ Contains useful information for post-sampling analysis. Also allows to continue
# Fields
$(TYPEDFIELDS)
"""
-struct TraceInfo{
+struct TraceSummary{
A<:TemperingMethod,
B<:Union{DataTune, Vector{<:DataTune}},
D<:BaytesCore.SampleDefault,
- S<:SamplingInfo
+ S<:SampleInfo
}
"Tuning container for temperature tempering"
tempertune::A
"Tuning container for data tempering"
datatune::B
- "Default information used for sample function"
+ "Default settings used for sample function"
default::D
"Information about trace used for postprocessing."
- sampling::S
+ info::S
"Progress Log while sampling."
progress::ProgressMeter.Progress
- function TraceInfo(
+ function TraceSummary(
tempertune::A,
datatune::B,
default::D,
- sampling::S,
+ info::S,
progress::ProgressMeter.Progress,
) where {
A<:TemperingMethod,
B<:Union{DataTune, Vector{<:DataTune}},
D<:BaytesCore.SampleDefault,
- S<:SamplingInfo
+ S<:SampleInfo
}
## Return info
- return new{A,B,D,S}(tempertune, datatune, default, sampling, progress)
+ return new{A,B,D,S}(tempertune, datatune, default, info, progress)
end
end
@@ -49,20 +49,20 @@ Contains sampling chain and diagnostics for given algorithms.
# Fields
$(TYPEDFIELDS)
"""
-struct Trace{C<:TraceInfo, A<:NamedTuple,B}
+struct Trace{C<:TraceSummary, A<:NamedTuple,B}
"Model samples ~ out vector for corresponding chain, inner vector for iteration"
val::Vector{Vector{A}}
"Algorithm diagnostics ~ out vector for corresponding chain, inner vector for iteration"
diagnostics::Vector{B}
"Information about trace used for postprocessing."
- info::C
+ summary::C
function Trace(
val::Vector{Vector{A}},
diagnostics::Vector{B},
- info::C,
- ) where {A,B,C<:TraceInfo}
+ summary::C,
+ ) where {A,B,C<:TraceSummary}
## Return trace
- return new{C,A,B}(val, diagnostics, info)
+ return new{C,A,B}(val, diagnostics, summary)
end
end
@@ -71,16 +71,16 @@ function Trace(
algorithmᵛ::A,
model::ModelWrapper,
data::D,
- info::TraceInfo,
+ summary::TraceSummary,
) where {A,D}
- @unpack iterations, Nchains = info.sampling
+ @unpack iterations, Nchains = summary.info
## Create Model Parameter buffer
val = [Vector{typeof(model.val)}(undef, iterations) for _ in Base.OneTo(Nchains)]
## Create Diagnostics buffer for each algorithm used
diagtypes = infer(_rng, AbstractDiagnostics, algorithmᵛ, model, data)
diagnostics = diagnosticsbuffer(diagtypes, iterations, Nchains, algorithmᵛ)
## Return trace
- return Trace(val, diagnostics, info)
+ return Trace(val, diagnostics, summary)
end
############################################################################################
@@ -98,13 +98,13 @@ Note that smc still works as intended if used alongside other mcmc sampler in `a
"""
function propose!(
_rng::Random.AbstractRNG,
- trace::Trace{<:TraceInfo{<:BaytesCore.IterationTempering}},
+ trace::Trace{<:TraceSummary{<:BaytesCore.IterationTempering}},
algorithmᵛ::AbstractVector,
modelᵛ::Vector{M},
data::D,
) where {M<:ModelWrapper,D}
- @unpack default, tempertune, datatune, sampling, progress = trace.info
- @unpack iterations, Nchains, Nalgorithms, captured = sampling
+ @unpack default, tempertune, datatune, info, progress = trace.summary
+ @unpack iterations, Nchains, Nalgorithms, captured = info
@unpack log = default.report
## Propagate through data
Base.Threads.@threads for Nchain in Base.OneTo(Nchains)
@@ -136,13 +136,13 @@ end
function propose!(
_rng::Random.AbstractRNG,
- trace::Trace{<:TraceInfo{<:BaytesCore.JointTempering}},
+ trace::Trace{<:TraceSummary{<:BaytesCore.JointTempering}},
algorithmᵛ::AbstractVector,
modelᵛ::Vector{M},
data::D,
) where {M<:ModelWrapper,D}
- @unpack default, tempertune, datatune, sampling, progress = trace.info
- @unpack iterations, Nchains, Nalgorithms, captured = sampling
+ @unpack default, tempertune, datatune, info, progress = trace.summary
+ @unpack iterations, Nchains, Nalgorithms, captured = info
@unpack log = default.report
## Compute initial temperature
temperature = BaytesCore.initial(tempertune)
@@ -179,8 +179,8 @@ function propose!(
modelᵛ::M,
data::D,
) where {T<:Trace,M<:ModelWrapper,D}
- @unpack default, tempertune, datatune, sampling, progress = trace.info
- @unpack iterations, Nchains, Nalgorithms, captured = sampling
+ @unpack default, tempertune, datatune, info, progress = trace.summary
+ @unpack iterations, Nchains, Nalgorithms, captured = info
@unpack log = default.report
## Compute initial temperature
temperature = BaytesCore.initial(tempertune)
@@ -257,11 +257,11 @@ function savetrace(trace::Trace, model::ModelWrapper, algorithm,
"M",
Dates.minute(Dates.now()),
"_Nchains",
- trace.info.sampling.Nchains,
+ trace.summary.info.Nchains,
"_Iter",
- trace.info.sampling.iterations,
+ trace.summary.info.iterations,
"_Burnin",
- trace.info.sampling.burnin,
+ trace.summary.info.burnin,
))
)
JLD2.jldsave(
@@ -275,4 +275,4 @@ end
############################################################################################
#export
-export TraceInfo, Trace, propose!, savetrace
+export TraceSummary, Trace, propose!, savetrace
diff --git a/src/sampling/utility.jl b/src/sampling/utility.jl
index 35b037b..d98527c 100644
--- a/src/sampling/utility.jl
+++ b/src/sampling/utility.jl
@@ -29,7 +29,7 @@ Contains several useful information for constructing sampler.
# Fields
$(TYPEDFIELDS)
"""
-struct SamplingInfo{A<:PrintedParameter, U<:BaytesCore.UpdateBool, B<:BaytesCore.UpdateBool}
+struct SampleInfo{A<:PrintedParameter, U<:BaytesCore.UpdateBool, B<:BaytesCore.UpdateBool}
"Parameter settings for printing."
printedparam::A
"Total number of sampling iterations."
@@ -46,7 +46,7 @@ struct SamplingInfo{A<:PrintedParameter, U<:BaytesCore.UpdateBool, B<:BaytesCore
captured::U
"Boolean if temperature is adapted for target function."
tempered::B
- function SamplingInfo(
+ function SampleInfo(
printedparam::A,
iterations::Int64,
burnin::Int64,
@@ -214,4 +214,4 @@ end
############################################################################################
#export
-export SamplingInfo, update, infer
+export SampleInfo, update, infer
diff --git a/test/test-construction.jl b/test/test-construction.jl
index 40c44a1..adac83f 100644
--- a/test/test-construction.jl
+++ b/test/test-construction.jl
@@ -44,10 +44,10 @@ tempermethod = tempermethods[iter]
mcmc = MCMC(NUTS,(:μ, :σ,); stepsize = ConfigStepsize(;stepsizeadaption = UpdateFalse()))
trace, algorithms = sample(_rng, _obj.model, _obj.data, mcmc ; default = deepcopy(sampledefault))
## If single mcmc kernel assigned, can capture previous results
- @test isa(trace.info.sampling.captured, typeof(temperupdate))
+ @test isa(trace.summary.info.captured, typeof(temperupdate))
## Continue sampling
trace2, algorithms2 = sample!(100, _rng, _obj.model, _obj.data, trace, algorithms)
- @test isa(trace2.info.sampling.captured, typeof(temperupdate))
+ @test isa(trace2.summary.info.captured, typeof(temperupdate))
## Inference Section
transform = Baytes.TraceTransform(trace, _obj.model)
postmean = trace_to_posteriormean(trace, transform)
@@ -74,19 +74,19 @@ tempermethod = tempermethods[iter]
ibis = SMCConstructor(mcmc, SMCDefault(jitterthreshold=0.99, resamplingthreshold=1.0))
trace, algorithms = sample(_rng, _obj.model, _obj.data, ibis; default = deepcopy(sampledefault))
## If single mcmc kernel assigned, can capture previous results
- @test isa(trace.info.sampling.captured, UpdateFalse)
+ @test isa(trace.summary.info.captured, UpdateFalse)
## Continue sampling
newdat = randn(_rng, length(_obj.data)+100)
trace2, algorithms2 = sample!(100, _rng, _obj.model, newdat, trace, algorithms)
- @test isa(trace2.info.sampling.captured, UpdateFalse)
+ @test isa(trace2.summary.info.captured, UpdateFalse)
# Combinations
trace, algorithms = sample(_rng, _obj.model, _obj.data, mcmc, ibis; default = deepcopy(sampledefault))
## If single mcmc kernel assigned, can capture previous results
- @test isa(trace.info.sampling.captured, UpdateTrue)
+ @test isa(trace.summary.info.captured, UpdateTrue)
## Continue sampling
newdat = randn(_rng, length(_obj.data)+100)
trace2, algorithms2 = sample!(100, _rng, _obj.model, newdat, trace, algorithms)
- @test isa(trace2.info.sampling.captured, UpdateTrue)
+ @test isa(trace2.summary.info.captured, UpdateTrue)
end
end
end
@@ -125,18 +125,18 @@ temperchainmethods = [
mcmc = MCMC(NUTS,(:μ, :σ,); stepsize = ConfigStepsize(;stepsizeadaption = UpdateFalse()))
trace, algorithms = sample(_rng, _obj.model, _obj.data, mcmc ; default = deepcopy(sampledefault))
## If single mcmc kernel assigned, can capture previous results
- @test isa(trace.info.sampling.captured, typeof(temperupdate))
+ @test isa(trace.summary.info.captured, typeof(temperupdate))
## Continue sampling
trace2, algorithms2 = sample!(100, _rng, _obj.model, _obj.data, trace, algorithms)
- @test isa(trace2.info.sampling.captured, typeof(temperupdate))
+ @test isa(trace2.summary.info.captured, typeof(temperupdate))
# Combinations
mcmc = MCMC(NUTS,(:μ, :σ,); stepsize = ConfigStepsize(;stepsizeadaption = UpdateFalse()))
trace, algorithms = sample(_rng, _obj.model, _obj.data, mcmc, mcmc ; default = deepcopy(sampledefault))
## If single mcmc kernel assigned, can capture previous results
- @test isa(trace.info.sampling.captured, typeof(temperupdate))
+ @test isa(trace.summary.info.captured, typeof(temperupdate))
## Continue sampling
trace2, algorithms2 = sample!(100, _rng, _obj.model, _obj.data, trace, algorithms)
- @test isa(trace2.info.sampling.captured, typeof(temperupdate))
+ @test isa(trace2.summary.info.captured, typeof(temperupdate))
end
end
@@ -167,27 +167,27 @@ end
mcmc = MCMC(NUTS,(:μ, :σ,); stepsize = ConfigStepsize(;stepsizeadaption = UpdateFalse()))
trace, algorithms = sample(_rng, _obj.model, _obj.data, mcmc ; default = deepcopy(sampledefault))
## If single mcmc kernel assigned, can capture previous results
- @test isa(trace.info.sampling.captured, typeof(temperupdate))
+ @test isa(trace.summary.info.captured, typeof(temperupdate))
## Continue sampling
trace2, algorithms2 = sample!(100, _rng, _obj.model, _obj.data, trace, algorithms)
- @test isa(trace2.info.sampling.captured, typeof(temperupdate))
+ @test isa(trace2.summary.info.captured, typeof(temperupdate))
#SMC
ibis = SMCConstructor(mcmc, SMCDefault(jitterthreshold=0.99, resamplingthreshold=1.0))
trace, algorithms = sample(_rng, _obj.model, _obj.data, ibis; default = deepcopy(sampledefault))
## If single mcmc kernel assigned, can capture previous results
- @test isa(trace.info.sampling.captured, UpdateFalse)
+ @test isa(trace.summary.info.captured, UpdateFalse)
## Continue sampling
newdat = randn(_rng, length(_obj.data)+100)
trace2, algorithms2 = sample!(100, _rng, _obj.model, newdat, trace, algorithms)
- @test isa(trace2.info.sampling.captured, UpdateFalse)
+ @test isa(trace2.summary.info.captured, UpdateFalse)
# Combinations
trace, algorithms = sample(_rng, _obj.model, _obj.data, mcmc, ibis; default = deepcopy(sampledefault))
## If single mcmc kernel assigned, can capture previous results
- @test isa(trace.info.sampling.captured, UpdateTrue)
+ @test isa(trace.summary.info.captured, UpdateTrue)
## Continue sampling
newdat = randn(_rng, length(_obj.data)+100)
trace2, algorithms2 = sample!(100, _rng, _obj.model, newdat, trace, algorithms)
- @test isa(trace2.info.sampling.captured, UpdateTrue)
+ @test isa(trace2.summary.info.captured, UpdateTrue)
end
end
@@ -221,21 +221,21 @@ end
trace, algorithms = sample(_rng, _obj.model, _obj.data, mcmc ; default = deepcopy(sampledefault))
## If single mcmc kernel assigned, can capture previous results
#!NOTE: If Expanding/Increasing data, update always true
- @test isa(trace.info.sampling.captured, UpdateTrue)
+ @test isa(trace.summary.info.captured, UpdateTrue)
## Continue sampling
trace2, algorithms2 = sample!(500, _rng, _obj.model, _obj.data, trace, algorithms)
- @test isa(trace2.info.sampling.captured, UpdateTrue)
+ @test isa(trace2.summary.info.captured, UpdateTrue)
#SMC
ibis = SMCConstructor(mcmc, SMCDefault(jitterthreshold=0.99, resamplingthreshold=1.0))
trace, algorithms = sample(_rng, _obj.model, _obj.data, ibis; default = deepcopy(sampledefault))
# transform = TraceTransform(trace, _obj.model)
# summary(trace, algorithms,transform,PrintDefault(),)
## If single mcmc kernel assigned, can capture previous results
- @test isa(trace.info.sampling.captured, UpdateFalse)
+ @test isa(trace.summary.info.captured, UpdateFalse)
## Continue sampling
newdat = randn(_rng, length(_obj.data)+500)
trace2, algorithms2 = sample!(500, _rng, _obj.model, newdat, trace, algorithms)
- @test isa(trace2.info.sampling.captured, UpdateFalse)
+ @test isa(trace2.summary.info.captured, UpdateFalse)
# SMC2
_obj = deepcopy(myobjective_mcmc)
_tagged_pf = myobjective_pf.tagged
@@ -251,15 +251,15 @@ end
smc2 = SMCConstructor(SMC2Constructor(pf, pmcmc), SMCDefault(jitterthreshold=0.75, resamplingthreshold=0.75))
trace, algorithms = sample(_rng, _obj.model, data, smc2; default = deepcopy(sampledefault))
## If single mcmc kernel assigned, can capture previous results
- @test isa(trace.info.sampling.captured, UpdateFalse)
+ @test isa(trace.summary.info.captured, UpdateFalse)
#Check if correct parameter are printed
- allparam, printparam = Baytes.showparam(_obj.model, trace.info.datatune, smc2)
+ allparam, printparam = Baytes.showparam(_obj.model, trace.summary.datatune, smc2)
@test allparam == keys(_obj.model.val)
@test printparam == (:μ, :σ, :p)
## Continue sampling
newdat = randn(_rng, length(data)+500)
trace2, algorithms2 = sample!(500, _rng, _obj.model, newdat, trace, algorithms)
- @test isa(trace2.info.sampling.captured, UpdateFalse)
+ @test isa(trace2.summary.info.captured, UpdateFalse)
end
end