Skip to content

Commit

Permalink
Merge pull request #181 from NREL-SIIP/jd/deps_bump
Browse files Browse the repository at this point in the history
deps bump
  • Loading branch information
jd-lara authored Dec 7, 2020
2 parents 24e0248 + b869a50 commit ff7b5fa
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 30 deletions.
4 changes: 2 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@ CSV = "~0.8"
DataFrames = "~0.22"
DataStructures = "~0.18"
DocStringExtensions = "~0.8"
HDF5 = "~0.13"
HDF5 = "~0.14"
JSON3 = "1"
Mustache = "~1.0"
PrettyTables = "~0.10"
TimeSeries = "~0.19"
TimeSeries = "~0.20"
YAML = "~0.4"
julia = "^1.2"
56 changes: 28 additions & 28 deletions src/hdf5_time_series_storage.jl
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,8 @@ function serialize_time_series!(

HDF5.h5open(storage.file_path, "r+") do file
root = _get_root(storage, file)
if !HDF5.exists(root, uuid)
HDF5.g_create(root, uuid)
if !haskey(root, uuid)
HDF5.create_group(root, uuid)
path = root[uuid]
data = get_array_for_hdf(ts)
path["data"] = data
Expand Down Expand Up @@ -150,18 +150,18 @@ function _write_time_series_attributes!(
) where {T <: Forecast}
_write_time_series_attributes_common!(storage, ts, path)
interval = get_interval(ts)
HDF5.attrs(path)["interval"] = time_period_conversion(interval).value
HDF5.attributes(path)["interval"] = time_period_conversion(interval).value
end

function _write_time_series_attributes_common!(storage::Hdf5TimeSeriesStorage, ts, path)
initial_timestamp = Dates.datetime2epochms(get_initial_timestamp(ts))
resolution = get_resolution(ts)
data_type = get_data_type(ts)
HDF5.attrs(path)["module"] = string(parentmodule(typeof(ts)))
HDF5.attrs(path)["type"] = string(nameof(typeof(ts)))
HDF5.attrs(path)["initial_timestamp"] = initial_timestamp
HDF5.attrs(path)["resolution"] = time_period_conversion(resolution).value
HDF5.attrs(path)["data_type"] = data_type
HDF5.attributes(path)["module"] = string(parentmodule(typeof(ts)))
HDF5.attributes(path)["type"] = string(nameof(typeof(ts)))
HDF5.attributes(path)["initial_timestamp"] = initial_timestamp
HDF5.attributes(path)["resolution"] = time_period_conversion(resolution).value
HDF5.attributes(path)["data_type"] = data_type
end

function _read_time_series_attributes(
Expand All @@ -180,17 +180,17 @@ function _read_time_series_attributes(
::Type{T},
) where {T <: Forecast}
data = _read_time_series_attributes_common(storage, path, rows)
data["interval"] = Dates.Millisecond(HDF5.read(HDF5.attrs(path)["interval"]))
data["interval"] = Dates.Millisecond(HDF5.read(HDF5.attributes(path)["interval"]))
return data
end

const _TYPE_DICT = Dict("CONSTANT" => CONSTANT, "POLYNOMIAL" => POLYNOMIAL, "PWL" => PWL)

function _read_time_series_attributes_common(storage::Hdf5TimeSeriesStorage, path, rows)
initial_timestamp =
Dates.epochms2datetime(HDF5.read(HDF5.attrs(path)["initial_timestamp"]),)
resolution = Dates.Millisecond(HDF5.read(HDF5.attrs(path)["resolution"]))
data_type = _TYPE_DICT[HDF5.read(HDF5.attrs(path)["data_type"])]
Dates.epochms2datetime(HDF5.read(HDF5.attributes(path)["initial_timestamp"]),)
resolution = Dates.Millisecond(HDF5.read(HDF5.attributes(path)["resolution"]))
data_type = _TYPE_DICT[HDF5.read(HDF5.attributes(path)["data_type"])]
return Dict(
"type" => _read_time_series_type(path),
"initial_timestamp" => initial_timestamp,
Expand All @@ -202,8 +202,8 @@ function _read_time_series_attributes_common(storage::Hdf5TimeSeriesStorage, pat
end

function _read_time_series_type(path)
module_str = HDF5.read(HDF5.attrs(path)["module"])
type_str = HDF5.read(HDF5.attrs(path)["type"])
module_str = HDF5.read(HDF5.attributes(path)["module"])
type_str = HDF5.read(HDF5.attributes(path)["type"])
return get_type_from_strings(module_str, type_str)
end

Expand Down Expand Up @@ -237,8 +237,8 @@ function iterate_time_series(storage::Hdf5TimeSeriesStorage)

data = uuid_group["data"][:]
attributes = Dict()
for name in names(HDF5.attrs(uuid_group))
attributes[name] = HDF5.read(HDF5.attrs(uuid_group)[name])
for name in names(HDF5.attributes(uuid_group))
attributes[name] = HDF5.read(HDF5.attributes(uuid_group)[name])
end
for item in HDF5.read(uuid_group["components"])
component, name = deserialize_component_name(item)
Expand Down Expand Up @@ -276,7 +276,7 @@ function remove_time_series!(
path = _get_time_series_path(root, uuid)
if _remove_item!(path, "components", make_component_name(component_uuid, name))
@debug "$path has no more references; delete it."
HDF5.o_delete(path)
HDF5.delete_object(path)
end
end
end
Expand Down Expand Up @@ -602,29 +602,29 @@ end

function _make_file(storage::Hdf5TimeSeriesStorage)
HDF5.h5open(storage.file_path, "w") do file
HDF5.g_create(file, HDF5_TS_ROOT_PATH)
HDF5.create_group(file, HDF5_TS_ROOT_PATH)
end
end

_get_root(storage::Hdf5TimeSeriesStorage, file) = file[HDF5_TS_ROOT_PATH]

function _get_time_series_path(root::HDF5.HDF5Group, uuid::UUIDs.UUID)
function _get_time_series_path(root::HDF5.Group, uuid::UUIDs.UUID)
uuid_str = string(uuid)
if !HDF5.exists(root, uuid_str)
if !haskey(root, uuid_str)
throw(ArgumentError("UUID $uuid_str does not exist"))
end

return root[uuid_str]
end

function _append_item!(path::HDF5.HDF5Group, name::AbstractString, value::AbstractString)
handle = HDF5.o_open(path, name)
function _append_item!(path::HDF5.Group, name::AbstractString, value::AbstractString)
handle = HDF5.open_object(path, name)
values = HDF5.read(handle)
HDF5.close(handle)
push!(values, value)

ret = HDF5.o_delete(path, name)
@assert_op ret == 0
ret = HDF5.delete_object(path, name)
@assert_op ret === nothing

path[name] = values
@debug "Appended $value to $name" values
Expand All @@ -634,8 +634,8 @@ end
Removes value from the dataset called name.
Returns true if the array is empty afterwards.
"""
function _remove_item!(path::HDF5.HDF5Group, name::AbstractString, value::AbstractString)
handle = HDF5.o_open(path, name)
function _remove_item!(path::HDF5.Group, name::AbstractString, value::AbstractString)
handle = HDF5.open_object(path, name)
values = HDF5.read(handle)
HDF5.close(handle)

Expand All @@ -645,8 +645,8 @@ function _remove_item!(path::HDF5.HDF5Group, name::AbstractString, value::Abstra
throw(ArgumentError("$value wasn't stored in $name"))
end

ret = HDF5.o_delete(path, name)
@assert_op ret == 0
ret = HDF5.delete_object(path, name)
@assert_op ret === nothing

if isempty(values)
is_empty = true
Expand Down

0 comments on commit ff7b5fa

Please sign in to comment.