Skip to content

Commit

Permalink
Use Julia v1.6 syntax simplifications (#171)
Browse files Browse the repository at this point in the history
* Use Julia 1.6 simplified keyword syntax

* Increment patch version number

* Require at least one argument

* Don't commit log files
  • Loading branch information
sethaxen authored Mar 23, 2022
1 parent 397bbeb commit d69f11c
Show file tree
Hide file tree
Showing 18 changed files with 95 additions and 160 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,7 @@
/deps/deps.jl
/docs/src/assets/logo*.png
/docs/src/assets/favicon.ico
/docs/src/*.log
/docs/build
/test/*.log
.DS_Store
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ArviZ"
uuid = "131c737c-5715-5e2e-ad31-c244f01c1dc7"
authors = ["Seth Axen <[email protected]>"]
version = "0.5.15"
version = "0.5.16"

[deps]
Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d"
Expand Down
12 changes: 6 additions & 6 deletions docs/src/mpl_examples.md
Original file line number Diff line number Diff line change
Expand Up @@ -422,7 +422,7 @@ using ArviZ
ArviZ.use_style("arviz-darkgrid")
idata = load_arviz_data("non_centered_eight")
plot_loo_pit(; idata=idata, y="obs", color="indigo")
plot_loo_pit(; idata, y="obs", color="indigo")
gcf()
```
Expand Down Expand Up @@ -475,7 +475,7 @@ ArviZ.use_style("arviz-darkgrid")
centered = load_arviz_data("centered_eight")
coords = Dict("school" => ["Choate", "Deerfield"])
plot_pair(
centered; var_names=["theta", "mu", "tau"], coords=coords, divergences=true, textsize=22
centered; var_names=["theta", "mu", "tau"], coords, divergences=true, textsize=22
)
gcf()
Expand All @@ -498,7 +498,7 @@ plot_pair(
centered;
var_names=["theta", "mu", "tau"],
kind="hexbin",
coords=coords,
coords,
colorbar=true,
divergences=true,
)
Expand All @@ -523,7 +523,7 @@ plot_pair(
centered;
var_names=["theta", "mu", "tau"],
kind="kde",
coords=coords,
coords,
divergences=true,
textsize=22,
)
Expand All @@ -550,7 +550,7 @@ plot_pair(
kind=["scatter", "kde"],
kde_kwargs=Dict("fill_last" => false),
marginals=true,
coords=coords,
coords,
point_estimate="median",
figsize=(10, 8),
)
Expand Down Expand Up @@ -590,7 +590,7 @@ ArviZ.use_style("arviz-darkgrid")
data = load_arviz_data("centered_eight")
coords = Dict("school" => ["Choate"])
plot_posterior(data; var_names=["mu", "theta"], coords=coords, rope=(-1, 1))
plot_posterior(data; var_names=["mu", "theta"], coords, rope=(-1, 1))
gcf()
```
Expand Down
2 changes: 1 addition & 1 deletion src/data.jl
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ function _from_dict(posterior=nothing; attrs=Dict(), coords=nothing, dims=nothin
datasets = []
for (name, dict) in pairs(dicts)
(dict === nothing || isempty(dict)) && continue
dataset = dict_to_dataset(dict; attrs=attrs, coords=coords, dims=dims)
dataset = dict_to_dataset(dict; attrs, coords, dims)
push!(datasets, name => dataset)
end

Expand Down
8 changes: 4 additions & 4 deletions src/dataset.jl
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ convert_to_dataset

function convert_to_dataset(obj; group=:posterior, kwargs...)
group = Symbol(group)
idata = convert_to_inference_data(obj; group=group, kwargs...)
idata = convert_to_inference_data(obj; group, kwargs...)
dataset = getproperty(idata, group)
return dataset
end
Expand Down Expand Up @@ -126,7 +126,7 @@ function convert_to_constant_dataset(
vals = _asarray(vals)
val_dims = get(dims, key, nothing)
(val_dims, val_coords) = base.generate_dims_coords(
size(vals), key; dims=val_dims, coords=coords
size(vals), key; dims=val_dims, coords
)
data[key] = xarray.DataArray(vals; dims=val_dims, coords=val_coords)
end
Expand All @@ -136,7 +136,7 @@ function convert_to_constant_dataset(
default_attrs = merge(default_attrs, Dict("inference_library" => string(library)))
end
attrs = merge(default_attrs, attrs)
return Dataset(; data_vars=data, coords=coords, attrs=attrs)
return Dataset(; data_vars=data, coords, attrs)
end

@doc doc"""
Expand Down Expand Up @@ -167,7 +167,7 @@ function dict_to_dataset(data; library=nothing, attrs=Dict(), kwargs...)
if library !== nothing
attrs = merge(attrs, Dict("inference_library" => string(library)))
end
return arviz.dict_to_dataset(data; attrs=attrs, kwargs...)
return arviz.dict_to_dataset(data; attrs, kwargs...)
end

@doc doc"""
Expand Down
18 changes: 7 additions & 11 deletions src/mcmcchains.jl
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ function from_mcmcchains(
stats_dict = nothing
else
post_dict = convert_to_eltypes(chains_to_dict(posterior), eltypes)
stats_dict = chains_to_dict(posterior; section=:internals, rekey_fun=rekey_fun)
stats_dict = chains_to_dict(posterior; section=:internals, rekey_fun)
stats_dict = enforce_stat_eltypes(stats_dict)
stats_dict = convert_to_eltypes(stats_dict, Dict("is_accept" => Bool))
end
Expand All @@ -199,9 +199,9 @@ function from_mcmcchains(
group_data = popsubdict!(post_dict, group_data)
end
group_dataset = if group_data isa Chains
convert_to_dataset(group_data; library=library, eltypes=eltypes, kwargs...)
convert_to_dataset(group_data; library, eltypes, kwargs...)
else
convert_to_dataset(group_data; library=library, kwargs...)
convert_to_dataset(group_data; library, kwargs...)
end
setattribute!(group_dataset, "inference_library", library)
concat!(all_idata, InferenceData(; group => group_dataset))
Expand All @@ -213,7 +213,7 @@ function from_mcmcchains(
else
attrs = merge(attributes_dict(posterior), attrs_library)
end
kwargs = Dict(pairs(merge((; attrs=attrs, dims=Dict()), kwargs)))
kwargs = Dict(pairs(merge((; attrs, dims=Dict()), kwargs)))
post_idata = _from_dict(post_dict; sample_stats=stats_dict, kwargs...)
concat!(all_idata, post_idata)
return all_idata
Expand Down Expand Up @@ -246,11 +246,7 @@ function from_mcmcchains(

if prior !== nothing
pre_prior_idata = convert_to_inference_data(
prior;
posterior_predictive=prior_predictive,
library=library,
eltypes=eltypes,
kwargs...,
prior; posterior_predictive=prior_predictive, library, eltypes, kwargs...
)
prior_idata = rekey(
pre_prior_idata,
Expand All @@ -263,7 +259,7 @@ function from_mcmcchains(
concat!(all_idata, prior_idata)
elseif prior_predictive !== nothing
pre_prior_predictive_idata = convert_to_inference_data(
prior_predictive; eltypes=eltypes, kwargs...
prior_predictive; eltypes, kwargs...
)
concat!(
all_idata,
Expand All @@ -278,7 +274,7 @@ function from_mcmcchains(
]
group_data === nothing && continue
group_data = convert_to_eltypes(group_data, eltypes)
group_dataset = convert_to_constant_dataset(group_data; library=library, kwargs...)
group_dataset = convert_to_constant_dataset(group_data; library, kwargs...)
concat!(all_idata, InferenceData(; group => group_dataset))
end

Expand Down
4 changes: 2 additions & 2 deletions src/namedtuple.jl
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ function from_namedtuple(
prior;
posterior_predictive=prior_predictive,
sample_stats=sample_stats_prior,
library=library,
library,
kwargs...,
)
prior_idata = rekey(
Expand All @@ -207,7 +207,7 @@ function from_namedtuple(
]
group_data === nothing && continue
group_dict = Dict(pairs(group_data))
group_dataset = convert_to_constant_dataset(group_dict; library=library, kwargs...)
group_dataset = convert_to_constant_dataset(group_dict; library, kwargs...)
concat!(all_idata, InferenceData(; group => group_dataset))
end

Expand Down
6 changes: 3 additions & 3 deletions src/plots.jl
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ for f in (:plot_density, :plot_forest, :plot_rank)
::typeof($(f)), data, args...; transform=identity, group=:posterior, kwargs...
)
tdata = transform(data)
dataset = convert_to_dataset(tdata; group=group)
dataset = convert_to_dataset(tdata; group)
return tuple(dataset, args...), kwargs
end
end
Expand All @@ -96,7 +96,7 @@ for f in (:plot_density, :plot_forest)
)
tdata = transform(data)
datasets = map(tdata) do datum
return convert_to_dataset(datum; group=group)
return convert_to_dataset(datum; group)
end
return tuple(datasets, args...), kwargs
end
Expand All @@ -109,7 +109,7 @@ for f in (:plot_density, :plot_forest)
kwargs...,
)
tdata = transform(data)
dataset = convert_to_dataset(tdata; group=group)
dataset = convert_to_dataset(tdata; group)
return tuple(dataset, args...), kwargs
end
end
Expand Down
13 changes: 5 additions & 8 deletions src/samplechains.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@ _maybe_multichain(x::SampleChains.AbstractChain) = SampleChains.MultiChain(x)
function _maybe_multichain(x::AbstractVector{<:SampleChains.AbstractChain})
return SampleChains.MultiChain(x)
end
function _maybe_multichain(x::Tuple{Vararg{<:SampleChains.AbstractChain}})
function _maybe_multichain(
x::Tuple{<:SampleChains.AbstractChain,Vararg{<:SampleChains.AbstractChain}}
)
return SampleChains.MultiChain(x...)
end

Expand Down Expand Up @@ -88,12 +90,7 @@ function from_samplechains(
sample_stats_prior = _samplechains_info(prior_mc)
end
return from_namedtuple(
posterior_mc;
prior=prior_mc,
sample_stats=sample_stats,
sample_stats_prior=sample_stats_prior,
library=library,
kwargs...,
posterior_mc; prior=prior_mc, sample_stats, sample_stats_prior, library, kwargs...
)
end

Expand All @@ -120,7 +117,7 @@ function convert_to_inference_data(
SampleChains.AbstractChain,
SampleChains.MultiChain,
AbstractVector{<:SampleChains.AbstractChain},
Tuple{Vararg{<:SampleChains.AbstractChain}},
Tuple{<:SampleChains.AbstractChain,Vararg{<:SampleChains.AbstractChain}},
},
}
group = Symbol(group)
Expand Down
8 changes: 4 additions & 4 deletions src/stats.jl
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ function psislw(logw, reff=1)
log_weights = result.log_weights
d = ndims(log_weights)
dims = d == 1 ? Colon() : ntuple(Base.Fix1(+, 1), d - 1)
log_norm_exp = logsumexp(log_weights; dims=dims)
log_norm_exp = logsumexp(log_weights; dims)
log_weights .-= log_norm_exp
return log_weights, result.pareto_shape
end
Expand Down Expand Up @@ -143,10 +143,10 @@ function StatsBase.summarystats(data::InferenceData; group=:posterior, kwargs...
return summarystats(dataset; kwargs...)
end
function StatsBase.summarystats(data::Dataset; fmt=:wide, kwargs...)
s = arviz.summary(data; fmt=fmt, kwargs...)
s = arviz.summary(data; fmt, kwargs...)
s isa Dataset && return s
index_name = Symbol(fmt) == :long ? :statistic : :variable
return todataframes(s; index_name=index_name)
return todataframes(s; index_name)
end

"""
Expand All @@ -168,6 +168,6 @@ Compute summary statistics on any object that can be passed to [`convert_to_data
- `kwargs`: Keyword arguments passed to [`summarystats`](@ref).
"""
function summary(data; group=:posterior, coords=nothing, dims=nothing, kwargs...)
dataset = convert_to_dataset(data; group=group, coords=coords, dims=dims)
dataset = convert_to_dataset(data; group, coords, dims)
return summarystats(dataset; kwargs...)
end
6 changes: 3 additions & 3 deletions src/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ macro forwardplotfun(f)
backend_val = Val(backend)
load_backend(backend_val)
args, kwargs = convert_arguments($(f), args...; kwargs...)
result = arviz.$(f)(args...; kwargs..., backend=backend)
result = arviz.$(f)(args...; kwargs..., backend)
return convert_result($(f), result, backend_val)
end

Expand Down Expand Up @@ -280,13 +280,13 @@ function topandas(::Val{:Series}, df)
initialize_pandas()
df = DataFrames.DataFrame(df)
rownames = names(df)
colvals = Array(first(eachrow(df)))
colvals = Array(only(eachrow(df)))
return pandas.Series(colvals, rownames)
end
function topandas(::Val{:ELPDData}, df)
initialize_pandas()
df = DataFrames.DataFrame(df)
rownames = names(df)
colvals = Array(first(eachrow(df)))
colvals = Array(only(eachrow(df)))
return ArviZ.arviz.stats.ELPDData(colvals, rownames)
end
12 changes: 6 additions & 6 deletions test/helpers.jl
Original file line number Diff line number Diff line change
Expand Up @@ -49,12 +49,12 @@ function create_model(seed=10)
"diverging" => Int.(randn(rng, nchains, ndraws) .> 0.95),
)
model = from_dict(;
posterior=posterior,
posterior_predictive=posterior_predictive,
sample_stats=sample_stats,
prior=prior,
prior_predictive=prior_predictive,
sample_stats_prior=sample_stats_prior,
posterior,
posterior_predictive,
sample_stats,
prior,
prior_predictive,
sample_stats_prior,
observed_data=Dict("y" => data["y"]),
dims=Dict("y" => ["obs_dim"], "log_likelihood" => ["obs_dim"]),
coords=Dict("obs_dim" => 1:J),
Expand Down
4 changes: 2 additions & 2 deletions test/test_data.jl
Original file line number Diff line number Diff line change
Expand Up @@ -162,12 +162,12 @@ end
posterior = Dict("A" => randn(rng, 2, 10, 2), "B" => randn(rng, 2, 10, 5, 2))
prior = Dict("C" => randn(rng, 2, 10, 2), "D" => randn(rng, 2, 10, 5, 2))

idata = from_dict(posterior; prior=prior)
idata = from_dict(posterior; prior)
@test check_multiple_attrs(
Dict(:posterior => ["A", "B"], :prior => ["C", "D"]), idata
) == []

idata2 = from_dict(; prior=prior)
idata2 = from_dict(; prior)
@test check_multiple_attrs(Dict(:prior => ["C", "D"]), idata2) == []
end

Expand Down
14 changes: 5 additions & 9 deletions test/test_dataset.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@
"dimx" => [1, 2, 3], "dimy_1" => ["a", "b", "c"], "dimy_2" => ["d", "e"]
)
attrs = Dict("prop1" => 1, "prop2" => "propval")
@inferred ArviZ.Dataset(; data_vars=vars, coords=coords, attrs=attrs)
ds = ArviZ.Dataset(; data_vars=vars, coords=coords, attrs=attrs)
@inferred ArviZ.Dataset(; data_vars=vars, coords, attrs)
ds = ArviZ.Dataset(; data_vars=vars, coords, attrs)
@test ds isa ArviZ.Dataset
vars2, kwargs = ArviZ.dataset_to_dict(ds)
for (k, v) in vars
Expand Down Expand Up @@ -113,9 +113,7 @@ end
dataset = ArviZ.convert_to_constant_dataset(data)
attrs = Dict("prop" => "propval")

dataset = ArviZ.convert_to_constant_dataset(
data; coords=coords, dims=dims, library=library, attrs=attrs
)
dataset = ArviZ.convert_to_constant_dataset(data; coords, dims, library, attrs)
@test dataset isa ArviZ.Dataset
@test "x" dataset.keys()
@test "y" dataset.keys()
Expand All @@ -139,9 +137,7 @@ end
dataset = ArviZ.convert_to_constant_dataset(data)
attrs = (prop="propval",)

dataset = ArviZ.convert_to_constant_dataset(
data; coords=coords, dims=dims, library=library, attrs=attrs
)
dataset = ArviZ.convert_to_constant_dataset(data; coords, dims, library, attrs)
@test dataset isa ArviZ.Dataset
@test "x" dataset.keys()
@test "y" dataset.keys()
Expand Down Expand Up @@ -171,7 +167,7 @@ end
dims = Dict("b" => ["bi", "bj"])
attrs = Dict("mykey" => 5)

ds = ArviZ.dict_to_dataset(vars; library=:MyLib, coords=coords, dims=dims, attrs=attrs)
ds = ArviZ.dict_to_dataset(vars; library=:MyLib, coords, dims, attrs)
@test ds isa ArviZ.Dataset
vars2, kwargs = ArviZ.dataset_to_dict(ds)
for (k, v) in vars
Expand Down
Loading

2 comments on commit d69f11c

@sethaxen
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/57173

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v0.5.16 -m "<description of version>" d69f11caa64e3eb81a6822c54bdd47c30e9f6bdf
git push origin v0.5.16

Please sign in to comment.