diff --git a/.github/workflows/format-check.yml b/.github/workflows/format-check.yml index 99b07d566..cf2bed076 100644 --- a/.github/workflows/format-check.yml +++ b/.github/workflows/format-check.yml @@ -24,7 +24,7 @@ jobs: - uses: actions/checkout@v2 - name: Install JuliaFormatter and format run: | - julia -e 'include(".github/workflows/formatter/formatter_code.jl")' + julia -e 'include("scripts/formatter/formatter_code.jl")' - uses: reviewdog/action-suggester@v1 if: github.event_name == 'pull_request' with: diff --git a/.github/workflows/formatter/formatter_code.jl b/.github/workflows/formatter/formatter_code.jl deleted file mode 100644 index 0e6d199f4..000000000 --- a/.github/workflows/formatter/formatter_code.jl +++ /dev/null @@ -1,26 +0,0 @@ -using Pkg -Pkg.activate(@__DIR__) -Pkg.instantiate() - -using JuliaFormatter - -main_paths = ["./src", "./test"] -for main_path in main_paths - for folder in readdir(main_path) - @show folder_path = joinpath(main_path, folder) - if isfile(folder_path) - !occursin(".jl", folder_path) && continue - end - format(folder_path; - whitespace_ops_in_indices = true, - remove_extra_newlines = true, - verbose = true, - always_for_in = true, - whitespace_typedefs = true, - whitespace_in_kwargs = false, - format_docstrings = true, - - # always_use_return = true # removed since it has false positives. - ) - end -end diff --git a/.github/workflows/formatter/formatter_docs.jl b/.github/workflows/formatter/formatter_docs.jl deleted file mode 100644 index 78143a26f..000000000 --- a/.github/workflows/formatter/formatter_docs.jl +++ /dev/null @@ -1,25 +0,0 @@ -using Pkg -Pkg.activate(@__DIR__) -Pkg.instantiate() - -using JuliaFormatter - -main_paths = ["./docs"] -for main_path in main_paths - for folder in readdir(main_path) - @show folder_path = joinpath(main_path, folder) - if isfile(folder_path) - !occursin(".md", folder_path) && continue - end - format(folder_path; - format_markdown=true, - whitespace_ops_in_indices = true, - remove_extra_newlines = true, - verbose = true, - always_for_in = true, - whitespace_typedefs = true, - whitespace_in_kwargs = false, - # always_use_return = true # removed since it has false positives. - ) - end -end diff --git a/Project.toml b/Project.toml index eaeff4e36..3e5649d34 100644 --- a/Project.toml +++ b/Project.toml @@ -21,7 +21,6 @@ Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" StructTypes = "856f2bd8-1eba-4b0a-8007-ebc267875bd4" TOML = "fa267f1f-6049-4f14-aa54-33bafae1ed76" TerminalLoggers = "5d786b92-1e48-4d6f-9151-6b4477ca9bed" -Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" TimeSeries = "9e3dc215-6440-5c97-bce1-76c03772f85e" UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" YAML = "ddb6d928-2868-570f-bddf-ab3f9cf99eb6" @@ -38,6 +37,13 @@ Mustache = "1" PrettyTables = "^1.3, 2" StructTypes = "^1.9" TerminalLoggers = "~0.1" -TimeSeries = "~0.22, 0.23" +TimeSeries = "0.23, 0.24" YAML = "~0.4" julia = "^1.6" +Dates = "1" +InteractiveUtils = "1" +Logging = "1" +Pkg = "1" +Random = "1" +TOML = "1" +UUIDs = "1" diff --git a/.github/workflows/formatter/Project.toml b/scripts/formatter/Project.toml similarity index 83% rename from .github/workflows/formatter/Project.toml rename to scripts/formatter/Project.toml index 08a9e6776..a33f747b5 100644 --- a/.github/workflows/formatter/Project.toml +++ b/scripts/formatter/Project.toml @@ -6,5 +6,5 @@ JuliaFormatter = "98e50ef6-434e-11e9-1051-2b60c6c9e899" Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" [compat] -JuliaFormatter = "v0.22.7" -julia = "^1.2" +JuliaFormatter = "1.0" +julia = "^1.7" diff --git a/scripts/formatter/formatter_code.jl b/scripts/formatter/formatter_code.jl new file mode 100644 index 000000000..080b7f258 --- /dev/null +++ b/scripts/formatter/formatter_code.jl @@ -0,0 +1,28 @@ +using Pkg +Pkg.activate(@__DIR__) +Pkg.instantiate() +Pkg.update() + +using JuliaFormatter + +main_paths = ["./src", "./test"] +for main_path in main_paths + for (root, dir, files) in walkdir(main_path) + for f in files + @show file_path = abspath(root, f) + !occursin(".jl", f) && continue + format(file_path; + whitespace_ops_in_indices = true, + remove_extra_newlines = true, + verbose = true, + always_for_in = true, + whitespace_typedefs = true, + conditional_to_if = true, + join_lines_based_on_source = true, + separate_kwargs_with_semicolon = true, + + # always_use_return = true. # Disabled since it throws a lot of false positives + ) + end + end +end diff --git a/src/InfrastructureSystems.jl b/src/InfrastructureSystems.jl index c6af32f8c..495b83784 100644 --- a/src/InfrastructureSystems.jl +++ b/src/InfrastructureSystems.jl @@ -43,6 +43,7 @@ Required interface functions for subtypes: Optional interface functions: - get_time_series_container() + - get_supplemental_attributes_container() Subtypes may contain time series. """ @@ -53,6 +54,25 @@ Base type for auxillary structs. These should not be stored in a system. """ abstract type DeviceParameter <: InfrastructureSystemsType end +""" +Base type for structs that store supplemental attributes + +Required interface functions for subtypes: + + - get_internal() + +Optional interface functions: + + - get_time_series_container() + - get_component_uuids() + - get_uuid() + +Subtypes may contain time series. Which requires + + - get_time_series_container() +""" +abstract type InfrastructureSystemsSupplementalAttribute <: InfrastructureSystemsType end + """ Return the internal time_series storage container or nothing, if the type doesn't store time series. @@ -63,7 +83,7 @@ function get_time_series_container(value::InfrastructureSystemsComponent) return nothing end -set_time_series_container!(value::InfrastructureSystemsComponent) = nothing +set_time_series_container!(value::InfrastructureSystemsComponent, _) = nothing get_name(value::InfrastructureSystemsComponent) = value.name @@ -92,7 +112,11 @@ include("forecasts.jl") include("static_time_series.jl") include("time_series_container.jl") include("time_series_parser.jl") +include("containers.jl") +include("supplemental_attribute.jl") +include("supplemental_attributes.jl") include("components.jl") +include("geographic_supplemental_attribute.jl") include("generated/includes.jl") include("single_time_series.jl") include("deterministic_single_time_series.jl") @@ -110,6 +134,7 @@ include("component.jl") include("results.jl") include("serialization.jl") include("system_data.jl") +include("time_series_interface.jl") include("validation.jl") include("utils/print.jl") include("utils/test.jl") diff --git a/src/component.jl b/src/component.jl index 93716e6f4..619586f7e 100644 --- a/src/component.jl +++ b/src/component.jl @@ -1,580 +1,3 @@ -function add_time_series!( - component::T, - time_series::TimeSeriesMetadata; - skip_if_present=false, -) where {T <: InfrastructureSystemsComponent} - component_name = get_name(component) - container = get_time_series_container(component) - if isnothing(container) - throw(ArgumentError("type $T does not support storing time series")) - end - - add_time_series!(container, time_series, skip_if_present=skip_if_present) - @debug "Added $time_series to $(typeof(component)) $(component_name) " * - "num_time_series=$(length(get_time_series_container(component).data))." _group = - LOG_GROUP_TIME_SERIES -end - -""" -Removes the metadata for a time_series. -If this returns true then the caller must also remove the actual time series data. -""" -function remove_time_series_metadata!( - component::InfrastructureSystemsComponent, - ::Type{T}, - name::AbstractString, -) where {T <: TimeSeriesMetadata} - container = get_time_series_container(component) - remove_time_series!(container, T, name) - @debug "Removed time_series from $(get_name(component)): $name." _group = - LOG_GROUP_TIME_SERIES - if T <: DeterministicMetadata && - has_time_series_internal(container, SingleTimeSeriesMetadata, name) - return false - elseif T <: SingleTimeSeriesMetadata && - has_time_series_internal(container, DeterministicMetadata, name) - return false - end - - return true -end - -function clear_time_series!(component::InfrastructureSystemsComponent) - container = get_time_series_container(component) - if !isnothing(container) - clear_time_series!(container) - @debug "Cleared time_series in $(get_name(component))." _group = - LOG_GROUP_TIME_SERIES - end -end - -function _get_columns(start_time, count, ts_metadata::ForecastMetadata) - offset = start_time - get_initial_timestamp(ts_metadata) - interval = time_period_conversion(get_interval(ts_metadata)) - window_count = get_count(ts_metadata) - if window_count > 1 - index = Int(offset / interval) + 1 - else - index = 1 - end - if count === nothing - count = window_count - index + 1 - end - - if index + count - 1 > get_count(ts_metadata) - throw( - ArgumentError( - "The requested start_time $start_time and count $count are invalid", - ), - ) - end - return UnitRange(index, index + count - 1) -end - -_get_columns(start_time, count, ts_metadata::StaticTimeSeriesMetadata) = UnitRange(1, 1) - -function _get_rows(start_time, len, ts_metadata::StaticTimeSeriesMetadata) - index = - Int( - (start_time - get_initial_timestamp(ts_metadata)) / get_resolution(ts_metadata), - ) + 1 - if len === nothing - len = length(ts_metadata) - index + 1 - end - if index + len - 1 > length(ts_metadata) - throw( - ArgumentError( - "The requested index=$index len=$len exceeds the range $(length(ts_metadata))", - ), - ) - end - - return UnitRange(index, index + len - 1) -end - -function _get_rows(start_time, len, ts_metadata::ForecastMetadata) - if len === nothing - len = get_horizon(ts_metadata) - end - - return UnitRange(1, len) -end - -function _check_start_time(start_time, ts_metadata::TimeSeriesMetadata) - if start_time === nothing - return get_initial_timestamp(ts_metadata) - end - - time_diff = start_time - get_initial_timestamp(ts_metadata) - if time_diff < Dates.Second(0) - throw( - ArgumentError( - "start_time=$start_time is earlier than $(get_initial_timestamp(ts_metadata))", - ), - ) - end - - if typeof(ts_metadata) <: ForecastMetadata - window_count = get_count(ts_metadata) - interval = get_interval(ts_metadata) - if window_count > 1 && - Dates.Millisecond(time_diff) % Dates.Millisecond(interval) != Dates.Second(0) - throw( - ArgumentError( - "start_time=$start_time is not on a multiple of interval=$interval", - ), - ) - end - end - - return start_time -end - -""" -Return a time series corresponding to the given parameters. - -# Arguments - - - `::Type{T}`: Concrete subtype of TimeSeriesData to return - - `component::InfrastructureSystemsComponent`: Component containing the time series - - `name::AbstractString`: name of time series - - `start_time::Union{Nothing, Dates.DateTime} = nothing`: If nothing, use the - `initial_timestamp` of the time series. If T is a subtype of Forecast then `start_time` - must be the first timstamp of a window. - - `len::Union{Nothing, Int} = nothing`: Length in the time dimension. If nothing, use the - entire length. - - `count::Union{Nothing, Int} = nothing`: Only applicable to subtypes of Forecast. Number - of forecast windows starting at `start_time` to return. Defaults to all available. -""" -function get_time_series( - ::Type{T}, - component::InfrastructureSystemsComponent, - name::AbstractString; - start_time::Union{Nothing, Dates.DateTime}=nothing, - len::Union{Nothing, Int}=nothing, - count::Union{Nothing, Int}=nothing, -) where {T <: TimeSeriesData} - if !has_time_series(component) - throw(ArgumentError("no forecasts are stored in $component")) - end - - metadata_type = time_series_data_to_metadata(T) - ts_metadata = get_time_series_metadata(metadata_type, component, name) - start_time = _check_start_time(start_time, ts_metadata) - rows = _get_rows(start_time, len, ts_metadata) - columns = _get_columns(start_time, count, ts_metadata) - storage = _get_time_series_storage(component) - return deserialize_time_series(T, storage, ts_metadata, rows, columns) -end - -function get_time_series_uuid( - ::Type{T}, - component::InfrastructureSystemsComponent, - name::AbstractString, -) where {T <: TimeSeriesData} - metadata_type = time_series_data_to_metadata(T) - metadata = get_time_series_metadata(metadata_type, component, name) - return get_time_series_uuid(metadata) -end - -function get_time_series_metadata( - ::Type{T}, - component::InfrastructureSystemsComponent, - name::AbstractString, -) where {T <: TimeSeriesMetadata} - return get_time_series_metadata(T, get_time_series_container(component), name) -end - -""" -Return a TimeSeries.TimeArray from storage for the given time series parameters. - -If the data are scaling factors then the stored scaling_factor_multiplier will be called on -the component and applied to the data unless ignore_scaling_factors is true. -""" -function get_time_series_array( - ::Type{T}, - component::InfrastructureSystemsComponent, - name::AbstractString; - start_time::Union{Nothing, Dates.DateTime}=nothing, - len::Union{Nothing, Int}=nothing, - ignore_scaling_factors=false, -) where {T <: TimeSeriesData} - ts = get_time_series(T, component, name; start_time=start_time, len=len, count=1) - if start_time === nothing - start_time = get_initial_timestamp(ts) - end - - return get_time_series_array( - component, - ts, - start_time; - len=len, - ignore_scaling_factors=ignore_scaling_factors, - ) -end - -""" -Return a TimeSeries.TimeArray for one forecast window from a cached Forecast instance. - -If the data are scaling factors then the stored scaling_factor_multiplier will be called on -the component and applied to the data unless ignore_scaling_factors is true. - -See also [`ForecastCache`](@ref). -""" -function get_time_series_array( - component::InfrastructureSystemsComponent, - forecast::Forecast, - start_time::Dates.DateTime; - len=nothing, - ignore_scaling_factors=false, -) - return _make_time_array(component, forecast, start_time, len, ignore_scaling_factors) -end - -""" -Return a TimeSeries.TimeArray from a cached StaticTimeSeries instance. - -If the data are scaling factors then the stored scaling_factor_multiplier will be called on -the component and applied to the data unless ignore_scaling_factors is true. - -See also [`StaticTimeSeriesCache`](@ref). -""" -function get_time_series_array( - component::InfrastructureSystemsComponent, - time_series::StaticTimeSeries, - start_time::Union{Nothing, Dates.DateTime}=nothing; - len::Union{Nothing, Int}=nothing, - ignore_scaling_factors=false, -) - if start_time === nothing - start_time = get_initial_timestamp(time_series) - end - - if len === nothing - len = length(time_series) - end - - return _make_time_array(component, time_series, start_time, len, ignore_scaling_factors) -end - -""" -Return a vector of timestamps from storage for the given time series parameters. -""" -function get_time_series_timestamps( - ::Type{T}, - component::InfrastructureSystemsComponent, - name::AbstractString; - start_time::Union{Nothing, Dates.DateTime}=nothing, - len::Union{Nothing, Int}=nothing, -) where {T <: TimeSeriesData} - return TimeSeries.timestamp( - get_time_series_array(T, component, name; start_time=start_time, len=len), - ) -end - -""" -Return a vector of timestamps from a cached Forecast instance. -""" -function get_time_series_timestamps( - component::InfrastructureSystemsComponent, - forecast::Forecast, - start_time::Union{Nothing, Dates.DateTime}=nothing; - len::Union{Nothing, Int}=nothing, -) - return TimeSeries.timestamp( - get_time_series_array(component, forecast, start_time; len=len), - ) -end - -""" -Return a vector of timestamps from a cached StaticTimeSeries instance. -""" -function get_time_series_timestamps( - component::InfrastructureSystemsComponent, - time_series::StaticTimeSeries, - start_time::Union{Nothing, Dates.DateTime}=nothing; - len::Union{Nothing, Int}=nothing, -) - return TimeSeries.timestamp( - get_time_series_array(component, time_series, start_time; len=len), - ) -end - -""" -Return an Array of values from storage for the requested time series parameters. - -If the data size is small and this will be called many times, consider using the version -that accepts a cached TimeSeriesData instance. -""" -function get_time_series_values( - ::Type{T}, - component::InfrastructureSystemsComponent, - name::AbstractString; - start_time::Union{Nothing, Dates.DateTime}=nothing, - len::Union{Nothing, Int}=nothing, - ignore_scaling_factors=false, -) where {T <: TimeSeriesData} - return TimeSeries.values( - get_time_series_array( - T, - component, - name; - start_time=start_time, - len=len, - ignore_scaling_factors=ignore_scaling_factors, - ), - ) -end - -""" -Return an Array of values for one forecast window from a cached Forecast instance. -""" -function get_time_series_values( - component::InfrastructureSystemsComponent, - forecast::Forecast, - start_time::Dates.DateTime; - len::Union{Nothing, Int}=nothing, - ignore_scaling_factors=false, -) - return TimeSeries.values( - get_time_series_array( - component, - forecast, - start_time; - len=len, - ignore_scaling_factors=ignore_scaling_factors, - ), - ) -end - -""" -Return an Array of values from a cached StaticTimeSeries instance for the requested time -series parameters. -""" -function get_time_series_values( - component::InfrastructureSystemsComponent, - time_series::StaticTimeSeries, - start_time::Union{Nothing, Dates.DateTime}=nothing; - len::Union{Nothing, Int}=nothing, - ignore_scaling_factors=false, -) - return TimeSeries.values( - get_time_series_array( - component, - time_series, - start_time; - len=len, - ignore_scaling_factors=ignore_scaling_factors, - ), - ) -end - -function _make_time_array(component, time_series, start_time, len, ignore_scaling_factors) - ta = make_time_array(time_series, start_time; len=len) - if ignore_scaling_factors - return ta - end - - multiplier = get_scaling_factor_multiplier(time_series) - if multiplier === nothing - return ta - end - - return ta .* multiplier(component) -end - -""" -Return true if the component has time series data. -""" -function has_time_series(component::InfrastructureSystemsComponent) - container = get_time_series_container(component) - return !isnothing(container) && !isempty(container) -end - -""" -Return true if the component has time series data of type T. -""" -function has_time_series( - component::InfrastructureSystemsComponent, - ::Type{T}, -) where {T <: TimeSeriesData} - container = get_time_series_container(component) - if container === nothing - return false - end - - for key in keys(container.data) - if isabstracttype(T) - if is_time_series_sub_type(key.time_series_type, T) - return true - end - elseif time_series_data_to_metadata(T) <: key.time_series_type - return true - end - end - - return false -end - -function has_time_series( - component::InfrastructureSystemsComponent, - type::Type{<:TimeSeriesMetadata}, - name::AbstractString, -) - container = get_time_series_container(component) - container === nothing && return false - return has_time_series_internal(container, type, name) -end - -""" -Efficiently add all time_series in one component to another by copying the underlying -references. - -# Arguments - - - `dst::InfrastructureSystemsComponent`: Destination component - - `src::InfrastructureSystemsComponent`: Source component - - `name_mapping::Dict = nothing`: Optionally map src names to different dst names. - If provided and src has a time_series with a name not present in name_mapping, that - time_series will not copied. If name_mapping is nothing then all time_series will be - copied with src's names. - - `scaling_factor_multiplier_mapping::Dict = nothing`: Optionally map src multipliers to - different dst multipliers. If provided and src has a time_series with a multiplier not - present in scaling_factor_multiplier_mapping, that time_series will not copied. If - scaling_factor_multiplier_mapping is nothing then all time_series will be copied with - src's multipliers. -""" -function copy_time_series!( - dst::InfrastructureSystemsComponent, - src::InfrastructureSystemsComponent; - name_mapping::Union{Nothing, Dict{Tuple{String, String}, String}}=nothing, - scaling_factor_multiplier_mapping::Union{Nothing, Dict{String, String}}=nothing, -) - storage = _get_time_series_storage(dst) - if isnothing(storage) - throw( - ArgumentError( - "Component does not have time series storage. " * - "It may not be attached to the system.", - ), - ) - end - - # There may be time series that share time series arrays as a result of - # transform_single_time_series! being called. - # Don't add these references to the storage more than once. - refs = Set{Tuple{String, Base.UUID}}() - - for ts_metadata in get_time_series_multiple(TimeSeriesMetadata, src) - name = get_name(ts_metadata) - new_name = name - if !isnothing(name_mapping) - new_name = get(name_mapping, (get_name(src), name), nothing) - if isnothing(new_name) - @debug "Skip copying ts_metadata" _group = LOG_GROUP_TIME_SERIES name - continue - end - @debug "Copy ts_metadata with" _group = LOG_GROUP_TIME_SERIES new_name - end - multiplier = get_scaling_factor_multiplier(ts_metadata) - new_multiplier = multiplier - if !isnothing(scaling_factor_multiplier_mapping) - new_multiplier = get(scaling_factor_multiplier_mapping, multiplier, nothing) - if isnothing(new_multiplier) - @debug "Skip copying ts_metadata" _group = LOG_GROUP_TIME_SERIES multiplier - continue - end - @debug "Copy ts_metadata with" _group = LOG_GROUP_TIME_SERIES new_multiplier - end - new_time_series = deepcopy(ts_metadata) - assign_new_uuid!(new_time_series) - set_name!(new_time_series, new_name) - set_scaling_factor_multiplier!(new_time_series, new_multiplier) - add_time_series!(dst, new_time_series) - ts_uuid = get_time_series_uuid(new_time_series) - ref = (new_name, ts_uuid) - if !in(ref, refs) - add_time_series_reference!(storage, get_uuid(dst), new_name, ts_uuid) - push!(refs, ref) - end - end -end - -function get_time_series_keys(component::InfrastructureSystemsComponent) - return keys(get_time_series_container(component).data) -end - -function list_time_series_metadata(component::InfrastructureSystemsComponent) - return collect(values(get_time_series_container(component).data)) -end - -function get_time_series_names( - ::Type{T}, - component::InfrastructureSystemsComponent, -) where {T <: TimeSeriesData} - return get_time_series_names( - time_series_data_to_metadata(T), - get_time_series_container(component), - ) -end - -function get_num_time_series(component::InfrastructureSystemsComponent) - container = get_time_series_container(component) - if isnothing(container) - return (0, 0) - end - - static_ts_count = 0 - forecast_count = 0 - for key in keys(container.data) - if key.time_series_type <: StaticTimeSeriesMetadata - static_ts_count += 1 - elseif key.time_series_type <: ForecastMetadata - forecast_count += 1 - else - error("panic") - end - end - - return (static_ts_count, forecast_count) -end - -function get_num_time_series_by_type(component::InfrastructureSystemsComponent) - counts = Dict{String, Int}() - container = get_time_series_container(component) - if isnothing(container) - return counts - end - - for metadata in values(container.data) - type = string(nameof(time_series_metadata_to_data(metadata))) - if haskey(counts, type) - counts[type] += 1 - else - counts[type] = 1 - end - end - - return counts -end - -function get_time_series( - component::InfrastructureSystemsComponent, - time_series::TimeSeriesData, -) - storage = _get_time_series_storage(component) - return get_time_series(storage, get_time_series_uuid(time_series)) -end - -function get_time_series_uuids(component::InfrastructureSystemsComponent) - container = get_time_series_container(component) - - return [ - (get_time_series_uuid(container.data[key]), key.name) for - key in get_time_series_keys(component) - ] -end - """ This function must be called when a component is removed from a system. """ @@ -585,6 +8,7 @@ function prepare_for_removal!(component::InfrastructureSystemsComponent) set_time_series_storage!(component, nothing) clear_time_series!(component) @debug "cleared all time series data from" _group = LOG_GROUP_SYSTEM get_name(component) + return end """ @@ -604,10 +28,10 @@ Call `collect` on the result to get an array. """ function get_time_series_multiple( component::InfrastructureSystemsComponent, - filter_func=nothing; - type=nothing, - start_time=nothing, - name=nothing, + filter_func = nothing; + type = nothing, + start_time = nothing, + name = nothing, ) container = get_time_series_container(component) storage = _get_time_series_storage(component) @@ -654,10 +78,10 @@ end function get_time_series_with_metadata_multiple( component::InfrastructureSystemsComponent, - filter_func=nothing; - type=nothing, - start_time=nothing, - name=nothing, + filter_func = nothing; + type = nothing, + start_time = nothing, + name = nothing, ) container = get_time_series_container(component) storage = _get_time_series_storage(component) @@ -712,17 +136,17 @@ function transform_single_time_series_internal!( params.forecast_params.interval, ) check_params_compatibility(params, _params) - new_metadata = DeterministicMetadata( - name=get_name(ts_metadata), - resolution=params.resolution, - initial_timestamp=params.forecast_params.initial_timestamp, - interval=params.forecast_params.interval, - count=params.forecast_params.count, - time_series_uuid=get_time_series_uuid(ts_metadata), - horizon=params.forecast_params.horizon, - time_series_type=DeterministicSingleTimeSeries, - scaling_factor_multiplier=get_scaling_factor_multiplier(ts_metadata), - internal=get_internal(ts_metadata), + new_metadata = DeterministicMetadata(; + name = get_name(ts_metadata), + resolution = params.resolution, + initial_timestamp = params.forecast_params.initial_timestamp, + interval = params.forecast_params.interval, + count = params.forecast_params.count, + time_series_uuid = get_time_series_uuid(ts_metadata), + horizon = params.forecast_params.horizon, + time_series_type = DeterministicSingleTimeSeries, + scaling_factor_multiplier = get_scaling_factor_multiplier(ts_metadata), + internal = get_internal(ts_metadata), ) push!(metadata_to_add, new_metadata) end @@ -802,6 +226,7 @@ function clear_time_series_storage!(component::InfrastructureSystemsComponent) end end end + return end function set_time_series_storage!( @@ -812,6 +237,7 @@ function set_time_series_storage!( if !isnothing(container) set_time_series_storage!(container, storage) end + return end function _get_time_series_storage(component::InfrastructureSystemsComponent) @@ -826,9 +252,9 @@ end function get_time_series_by_key( key::TimeSeriesKey, component::InfrastructureSystemsComponent; - start_time::Union{Nothing, Dates.DateTime}=nothing, - len::Union{Nothing, Int}=nothing, - count::Union{Nothing, Int}=nothing, + start_time::Union{Nothing, Dates.DateTime} = nothing, + len::Union{Nothing, Int} = nothing, + count::Union{Nothing, Int} = nothing, ) container = get_time_series_container(component) ts_metadata = container.data[key] @@ -836,10 +262,10 @@ function get_time_series_by_key( return get_time_series( ts_type, component, - key.name, - start_time=start_time, - len=len, - count=count, + key.name; + start_time = start_time, + len = len, + count = count, ) end @@ -868,3 +294,61 @@ function assign_new_uuid!(component::InfrastructureSystemsComponent) set_uuid!(get_internal(component), new_uuid) return end + +""" +Attach an attribute to a component. +""" +function attach_supplemental_attribute!( + component::InfrastructureSystemsComponent, + attribute::T, +) where {T <: InfrastructureSystemsSupplementalAttribute} + attribute_container = get_supplemental_attributes_container(component) + + if !haskey(attribute_container, T) + attribute_container[T] = Set{T}() + end + push!(attribute_container[T], attribute) + @debug "SupplementalAttribute type $T with UUID $(get_uuid(attribute)) stored in component $(summary(component))" _group = + LOG_GROUP_SYSTEM + return +end + +""" +Return true if the component has attributes. +""" +function has_supplemental_attributes(component::InfrastructureSystemsComponent) + container = get_supplemental_attributes_container(component) + return !isempty(container) +end + +function clear_supplemental_attributes!(component::InfrastructureSystemsComponent) + container = get_supplemental_attributes_container(component) + for attribute_set in values(container) + for attribute in attribute_set + detach_component!(attribute, component) + detach_supplemental_attribute!(component, attribute) + end + end + empty!(container) + @debug "Cleared attributes in $(summary(component))." + return +end + +function detach_supplemental_attribute!( + component::InfrastructureSystemsComponent, + attribute::T, +) where {T <: InfrastructureSystemsSupplementalAttribute} + container = get_supplemental_attributes_container(component) + if !haskey(container, T) + throw( + ArgumentError( + "SupplementalAttribute of type $T is not stored in component $(summary(component))", + ), + ) + end + delete!(container[T], attribute) + if isempty(container[T]) + pop!(container, T) + end + return +end diff --git a/src/components.jl b/src/components.jl index ff122da96..508c49225 100644 --- a/src/components.jl +++ b/src/components.jl @@ -1,13 +1,18 @@ const ComponentsByType = Dict{DataType, Dict{String, <:InfrastructureSystemsComponent}} -struct Components +struct Components <: InfrastructureSystemsContainer data::ComponentsByType time_series_storage::TimeSeriesStorage validation_descriptors::Vector end -function Components(time_series_storage::TimeSeriesStorage, validation_descriptors=nothing) +get_display_string(::Components) = "components" + +function Components( + time_series_storage::TimeSeriesStorage, + validation_descriptors = nothing, +) if isnothing(validation_descriptors) validation_descriptors = Vector() end @@ -15,16 +20,11 @@ function Components(time_series_storage::TimeSeriesStorage, validation_descripto return Components(ComponentsByType(), time_series_storage, validation_descriptors) end -function serialize(components::Components) - # time_series_storage and validation_descriptors are serialized elsewhere. - return [serialize(x) for y in values(components.data) for x in values(y)] -end - function _add_component!( components::Components, component::T; - skip_validation=false, - allow_existing_time_series=false, + skip_validation = false, + allow_existing_time_series = false, ) where {T <: InfrastructureSystemsComponent} component_name = get_name(component) if !isconcretetype(T) @@ -91,6 +91,7 @@ function check_component(components::Components, comp::InfrastructureSystemsComp if !validate_struct(comp) throw(InvalidValue("$(summary(comp)) is invalid")) end + return end """ @@ -132,13 +133,13 @@ Throws ArgumentError if the component is not stored. function remove_component!( components::Components, component::T; - remove_time_series=true, + remove_time_series = true, ) where {T <: InfrastructureSystemsComponent} return _remove_component!( T, components, - get_name(component), - remove_time_series=remove_time_series, + get_name(component); + remove_time_series = remove_time_series, ) end @@ -151,16 +152,16 @@ function remove_component!( ::Type{T}, components::Components, name::AbstractString; - remove_time_series=true, + remove_time_series = true, ) where {T <: InfrastructureSystemsComponent} - return _remove_component!(T, components, name, remove_time_series=remove_time_series) + return _remove_component!(T, components, name; remove_time_series = remove_time_series) end function _remove_component!( ::Type{T}, components::Components, name::AbstractString; - remove_time_series=true, + remove_time_series = true, ) where {T <: InfrastructureSystemsComponent} if !haskey(components.data, T) throw(ArgumentError("component $T is not stored")) @@ -268,7 +269,7 @@ Call collect on the result if an array is desired. # Arguments - `T`: component type - - `components::Components`: Components of the sytem + - `components::Components`: Components of the system - `filter_func::Union{Nothing, Function} = nothing`: Optional function that accepts a component of type T and returns a Bool. Apply this function to each component and only return components where the result is true. @@ -278,7 +279,7 @@ See also: [`iterate_components`](@ref) function get_components( ::Type{T}, components::Components, - filter_func::Union{Nothing, Function}=nothing, + filter_func::Union{Nothing, Function} = nothing, ) where {T <: InfrastructureSystemsComponent} if isconcretetype(T) _components = get(components.data, T, nothing) @@ -321,39 +322,15 @@ end See also: [`get_components`](@ref) """ function iterate_components(components::Components) - Channel() do channel - for comp_dict in values(components.data) - for component in values(comp_dict) - put!(channel, component) - end - end - end + iterate_container(components) end function iterate_components_with_time_series(components::Components) - Channel() do channel - for comp_dict in values(components.data) - for component in values(comp_dict) - if has_time_series(component) - put!(channel, component) - end - end - end - end + iterate_container_with_time_series(components) end function get_num_components(components::Components) - count = 0 - for components in values(components.data) - count += length(components) - end - return count -end - -function clear_time_series!(components::Components) - for component in iterate_components_with_time_series(components) - clear_time_series!(component) - end + return get_num_members(components) end function is_attached( @@ -388,16 +365,17 @@ function set_name!( set_name_internal!(component, name) components.data[T][name] = component @debug "Changed the name of component $(summary(component))" _group = LOG_GROUP_SYSTEM + return end -function compare_values(x::Components, y::Components; compare_uuids=false) +function compare_values(x::Components, y::Components; compare_uuids = false) match = true for name in fieldnames(Components) # This gets validated in SystemData. name == :time_series_storage && continue val_x = getfield(x, name) val_y = getfield(y, name) - if !compare_values(val_x, val_y, compare_uuids=compare_uuids) + if !compare_values(val_x, val_y; compare_uuids = compare_uuids) @error "Components field = $name does not match" val_x val_y match = false end diff --git a/src/containers.jl b/src/containers.jl new file mode 100644 index 000000000..afe3ad1f9 --- /dev/null +++ b/src/containers.jl @@ -0,0 +1,46 @@ +abstract type InfrastructureSystemsContainer end + +function serialize(container::InfrastructureSystemsContainer) + # time_series_storage and validation_descriptors are serialized elsewhere. + return [serialize(x) for y in values(container.data) for x in values(y)] +end + +""" +Iterates over all data in the container. +""" +function iterate_container(container::InfrastructureSystemsContainer) + Channel() do channel + for m_dict in values(container.data) + for member in values(m_dict) + put!(channel, member) + end + end + end +end + +function iterate_container_with_time_series(container::InfrastructureSystemsContainer) + Channel() do channel + for m_dict in values(container.data) + for member in values(m_dict) + if has_time_series(member) + put!(channel, member) + end + end + end + end +end + +function get_num_members(container::InfrastructureSystemsContainer) + count = 0 + for members in values(container.data) + count += length(members) + end + return count +end + +function clear_time_series!(container::InfrastructureSystemsContainer) + for member in iterate_components_with_time_series(container) + clear_time_series!(member) + end + return +end diff --git a/src/deterministic.jl b/src/deterministic.jl index fa3ca5991..11c71425d 100644 --- a/src/deterministic.jl +++ b/src/deterministic.jl @@ -42,9 +42,9 @@ function Deterministic(; name, data, resolution, - scaling_factor_multiplier=nothing, - normalization_factor=1.0, - internal=InfrastructureSystemsInternal(), + scaling_factor_multiplier = nothing, + normalization_factor = 1.0, + internal = InfrastructureSystemsInternal(), ) data = handle_normalization_factor(convert_data(data), normalization_factor) return Deterministic(name, data, resolution, scaling_factor_multiplier, internal) @@ -54,15 +54,15 @@ function Deterministic( name::AbstractString, data::AbstractDict, resolution::Dates.Period; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) - return Deterministic( - name=name, - data=data, - resolution=resolution, - scaling_factor_multiplier=scaling_factor_multiplier, - internal=InfrastructureSystemsInternal(), + return Deterministic(; + name = name, + data = data, + resolution = resolution, + scaling_factor_multiplier = scaling_factor_multiplier, + internal = InfrastructureSystemsInternal(), ) end @@ -84,8 +84,8 @@ Construct Deterministic from a Dict of TimeArrays. function Deterministic( name::AbstractString, input_data::AbstractDict{Dates.DateTime, <:TimeSeries.TimeArray}; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) data_type = eltype(TimeSeries.values(first(values(input_data)))) data = SortedDict{Dates.DateTime, Vector{data_type}}() @@ -99,12 +99,12 @@ function Deterministic( data[k] = TimeSeries.values(v) end - return Deterministic( - name=name, - data=data, - resolution=resolution, - normalization_factor=normalization_factor, - scaling_factor_multiplier=scaling_factor_multiplier, + return Deterministic(; + name = name, + data = data, + resolution = resolution, + normalization_factor = normalization_factor, + scaling_factor_multiplier = scaling_factor_multiplier, ) end @@ -128,8 +128,8 @@ function Deterministic( filename::AbstractString, component::InfrastructureSystemsComponent, resolution::Dates.Period; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) component_name = get_name(component) raw_data = read_time_series(Deterministic, filename, component_name) @@ -137,8 +137,8 @@ function Deterministic( name, raw_data, resolution; - normalization_factor=normalization_factor, - scaling_factor_multiplier=scaling_factor_multiplier, + normalization_factor = normalization_factor, + scaling_factor_multiplier = scaling_factor_multiplier, ) end @@ -149,25 +149,25 @@ function Deterministic( name::AbstractString, series_data::RawTimeSeries, resolution::Dates.Period; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) - return Deterministic( - name=name, - data=series_data.data, - resolution=resolution, - normalization_factor=normalization_factor, - scaling_factor_multiplier=scaling_factor_multiplier, + return Deterministic(; + name = name, + data = series_data.data, + resolution = resolution, + normalization_factor = normalization_factor, + scaling_factor_multiplier = scaling_factor_multiplier, ) end function Deterministic(ts_metadata::DeterministicMetadata, data::SortedDict) - return Deterministic( - name=get_name(ts_metadata), - resolution=get_resolution(ts_metadata), - data=data, - scaling_factor_multiplier=get_scaling_factor_multiplier(ts_metadata), - internal=InfrastructureSystemsInternal(get_time_series_uuid(ts_metadata)), + return Deterministic(; + name = get_name(ts_metadata), + resolution = get_resolution(ts_metadata), + data = data, + scaling_factor_multiplier = get_scaling_factor_multiplier(ts_metadata), + internal = InfrastructureSystemsInternal(get_time_series_uuid(ts_metadata)), ) end @@ -176,8 +176,8 @@ function Deterministic(info::TimeSeriesParsedInfo) info.name, info.data, info.resolution; - normalization_factor=info.normalization_factor, - scaling_factor_multiplier=info.scaling_factor_multiplier, + normalization_factor = info.normalization_factor, + scaling_factor_multiplier = info.scaling_factor_multiplier, ) end @@ -278,16 +278,16 @@ get_initial_times(forecast::Deterministic) = get_initial_times_common(forecast) get_initial_timestamp(forecast::Deterministic) = get_initial_timestamp_common(forecast) get_interval(forecast::Deterministic) = get_interval_common(forecast) iterate_windows(forecast::Deterministic) = iterate_windows_common(forecast) -get_window(f::Deterministic, initial_time::Dates.DateTime; len=nothing) = - get_window_common(f, initial_time; len=len) +get_window(f::Deterministic, initial_time::Dates.DateTime; len = nothing) = + get_window_common(f, initial_time; len = len) function make_time_array(forecast::Deterministic) # Artificial limitation to reduce scope. @assert_op get_count(forecast) == 1 timestamps = range( get_initial_timestamp(forecast); - step=get_resolution(forecast), - length=get_horizon(forecast), + step = get_resolution(forecast), + length = get_horizon(forecast), ) data = first(values(get_data(forecast))) return TimeSeries.TimeArray(timestamps, data) diff --git a/src/deterministic_single_time_series.jl b/src/deterministic_single_time_series.jl index cc19c5888..c5253c421 100644 --- a/src/deterministic_single_time_series.jl +++ b/src/deterministic_single_time_series.jl @@ -104,7 +104,7 @@ get_resolution(val::DeterministicSingleTimeSeries) = get_resolution(val.single_t function get_window( forecast::DeterministicSingleTimeSeries, initial_time::Dates.DateTime; - len::Union{Nothing, Int}=nothing, + len::Union{Nothing, Int} = nothing, ) tdiff = Dates.Millisecond(initial_time - forecast.initial_timestamp) interval_ms = Dates.Millisecond(forecast.interval) @@ -134,7 +134,7 @@ function iterate_windows(forecast::DeterministicSingleTimeSeries) end initial_times = - range(forecast.initial_timestamp; step=forecast.interval, length=forecast.count) + range(forecast.initial_timestamp; step = forecast.interval, length = forecast.count) return (get_window(forecast, it) for it in initial_times) end diff --git a/src/forecasts.jl b/src/forecasts.jl index 9b7fa0a7a..892765cbd 100644 --- a/src/forecasts.jl +++ b/src/forecasts.jl @@ -61,8 +61,8 @@ end """ Return the forecast window corresponsing to interval index. """ -function get_window(forecast::Forecast, index::Int; len=nothing) - return get_window(forecast, index_to_initial_time(forecast, index); len=len) +function get_window(forecast::Forecast, index::Int; len = nothing) + return get_window(forecast, index_to_initial_time(forecast, index); len = len) end function iterate_windows_common(forecast) @@ -82,17 +82,17 @@ Return a TimeSeries.TimeArray for one forecast window. function make_time_array( forecast::Forecast, start_time::Dates.DateTime; - len::Union{Nothing, Int}=nothing, + len::Union{Nothing, Int} = nothing, ) - return get_window(forecast, start_time; len=len) + return get_window(forecast, start_time; len = len) end -function make_timestamps(forecast::Forecast, initial_time::Dates.DateTime, len=nothing) +function make_timestamps(forecast::Forecast, initial_time::Dates.DateTime, len = nothing) if len === nothing len = get_horizon(forecast) end - return range(initial_time; length=len, step=get_resolution(forecast)) + return range(initial_time; length = len, step = get_resolution(forecast)) end # This method requires that the forecast type implement a `get_data` method like @@ -118,7 +118,7 @@ end function get_window_common( forecast, initial_time::Dates.DateTime; - len::Union{Nothing, Int}=nothing, + len::Union{Nothing, Int} = nothing, ) horizon = get_horizon(forecast) if len === nothing diff --git a/src/geographic_supplemental_attribute.jl b/src/geographic_supplemental_attribute.jl new file mode 100644 index 000000000..92bd45f55 --- /dev/null +++ b/src/geographic_supplemental_attribute.jl @@ -0,0 +1,21 @@ +""" +Attribute to store Geographic Information about the system components +""" +struct GeographicInfo <: InfrastructureSystemsSupplementalAttribute + geo_json::Dict{String, Any} + component_uuids::Set{UUIDs.UUID} + internal::InfrastructureSystemsInternal +end + +function GeographicInfo(; + geo_json::Dict{String, Any} = Dict{String, Any}(), + component_uuids::Set{UUIDs.UUID} = Set{UUIDs.UUID}(), +) + return GeographicInfo(geo_json, component_uuids, InfrastructureSystemsInternal()) +end + +get_geo_json(geo::GeographicInfo) = geo.geo_json +get_internal(geo::GeographicInfo) = geo.internal +get_uuid(geo::GeographicInfo) = get_uuid(get_internal(geo)) +get_time_series_container(::GeographicInfo) = nothing +get_component_uuids(geo::GeographicInfo) = geo.component_uuids diff --git a/src/hdf5_time_series_storage.jl b/src/hdf5_time_series_storage.jl index 5aecbc404..13bb48c87 100644 --- a/src/hdf5_time_series_storage.jl +++ b/src/hdf5_time_series_storage.jl @@ -41,10 +41,10 @@ Constructs Hdf5TimeSeriesStorage. """ function Hdf5TimeSeriesStorage( create_file::Bool; - filename=nothing, - directory=nothing, - read_only=false, - compression=CompressionSettings(), + filename = nothing, + directory = nothing, + read_only = false, + compression = CompressionSettings(), ) if create_file if isnothing(filename) @@ -72,8 +72,8 @@ Constructs Hdf5TimeSeriesStorage from an existing file. function from_file( ::Type{Hdf5TimeSeriesStorage}, filename::AbstractString; - read_only=false, - directory=nothing, + read_only = false, + directory = nothing, ) if !isfile(filename) error("time series storage $filename does not exist") @@ -88,7 +88,7 @@ function from_file( copy_h5_file(filename, file_path) end - storage = Hdf5TimeSeriesStorage(false; filename=file_path, read_only=read_only) + storage = Hdf5TimeSeriesStorage(false; filename = file_path, read_only = read_only) if !read_only version = read_data_format_version(storage) if version == "1.0.0" @@ -117,7 +117,7 @@ undergoing a deepcopy. - `storage::Hdf5TimeSeriesStorage`: storage instance - `directory::String`: If nothing, use tempdir """ -function copy_to_new_file!(storage::Hdf5TimeSeriesStorage, directory=nothing) +function copy_to_new_file!(storage::Hdf5TimeSeriesStorage, directory = nothing) if directory === nothing directory = tempdir() end @@ -181,12 +181,12 @@ function serialize_time_series!( settings = storage.compression if settings.enabled if settings.type == CompressionTypes.BLOSC - group["data", blosc=settings.level] = data + group["data", blosc = settings.level] = data elseif settings.type == CompressionTypes.DEFLATE if settings.shuffle - group["data", shuffle=(), deflate=settings.level] = data + group["data", shuffle = (), deflate = settings.level] = data else - group["data", deflate=settings.level] = data + group["data", deflate = settings.level] = data end else error("not implemented for type=$(settings.type)") @@ -402,8 +402,8 @@ function deserialize_time_series( TimeSeries.TimeArray( range( attributes["start_time"]; - length=length(rows), - step=attributes["resolution"], + length = length(rows), + step = attributes["resolution"], ), data, ), @@ -459,7 +459,8 @@ function get_hdf_array( data[start_time] = dataset[rows, columns.start] else data_read = dataset[rows, columns] - for (i, it) in enumerate(range(start_time; length=length(columns), step=interval)) + for (i, it) in + enumerate(range(start_time; length = length(columns), step = interval)) data[it] = @view data_read[1:length(rows), i] end end @@ -481,7 +482,8 @@ function get_hdf_array( data[start_time] = retransform_hdf_array(dataset[rows, columns.start, :], type) else data_read = retransform_hdf_array(dataset[rows, columns, :], type) - for (i, it) in enumerate(range(start_time; length=length(columns), step=interval)) + for (i, it) in + enumerate(range(start_time; length = length(columns), step = interval)) data[it] = @view data_read[1:length(rows), i] end end @@ -503,7 +505,8 @@ function get_hdf_array( data[start_time] = retransform_hdf_array(dataset[rows, columns.start, :, :], type) else data_read = retransform_hdf_array(dataset[rows, columns, :, :], type) - for (i, it) in enumerate(range(start_time; length=length(columns), step=interval)) + for (i, it) in + enumerate(range(start_time; length = length(columns), step = interval)) data[it] = @view data_read[1:length(rows), i] end end @@ -622,7 +625,7 @@ function deserialize_time_series( [3, 2, 1], ) for (i, it) in enumerate( - range(start_time; length=length(columns), step=attributes["interval"]), + range(start_time; length = length(columns), step = attributes["interval"]), ) data[it] = @view data_read[i, 1:length(rows), 1:total_percentiles] end @@ -661,7 +664,7 @@ function deserialize_time_series( data_read = PermutedDimsArray(path["data"][1:total_scenarios, rows, columns], [3, 2, 1]) for (i, it) in enumerate( - range(start_time; length=length(columns), step=attributes["interval"]), + range(start_time; length = length(columns), step = attributes["interval"]), ) data[it] = @view data_read[i, 1:length(rows), 1:total_scenarios] end @@ -735,11 +738,11 @@ end function _deserialize_compression_settings!(storage::Hdf5TimeSeriesStorage) HDF5.h5open(storage.file_path, "r+") do file root = _get_root(storage, file) - storage.compression = CompressionSettings( - enabled=HDF5.read(HDF5.attributes(root)["compression_enabled"]), - type=CompressionTypes(HDF5.read(HDF5.attributes(root)["compression_type"])), - level=HDF5.read(HDF5.attributes(root)["compression_level"]), - shuffle=HDF5.read(HDF5.attributes(root)["compression_shuffle"]), + storage.compression = CompressionSettings(; + enabled = HDF5.read(HDF5.attributes(root)["compression_enabled"]), + type = CompressionTypes(HDF5.read(HDF5.attributes(root)["compression_type"])), + level = HDF5.read(HDF5.attributes(root)["compression_level"]), + shuffle = HDF5.read(HDF5.attributes(root)["compression_shuffle"]), ) return end @@ -768,10 +771,10 @@ is_read_only(storage::Hdf5TimeSeriesStorage) = storage.read_only function compare_values( x::Hdf5TimeSeriesStorage, y::Hdf5TimeSeriesStorage; - compare_uuids=false, + compare_uuids = false, ) - item_x = sort!(collect(iterate_time_series(x)), by=z -> z[1]) - item_y = sort!(collect(iterate_time_series(y)), by=z -> z[1]) + item_x = sort!(collect(iterate_time_series(x)); by = z -> z[1]) + item_y = sort!(collect(iterate_time_series(y)); by = z -> z[1]) if length(item_x) != length(item_y) @error "lengths don't match" length(item_x) length(item_y) return false diff --git a/src/in_memory_time_series_storage.jl b/src/in_memory_time_series_storage.jl index 9fd90fa6e..3693afc61 100644 --- a/src/in_memory_time_series_storage.jl +++ b/src/in_memory_time_series_storage.jl @@ -42,7 +42,7 @@ Base.isempty(storage::InMemoryTimeSeriesStorage) = isempty(storage.data) check_read_only(storage::InMemoryTimeSeriesStorage) = nothing get_compression_settings(storage::InMemoryTimeSeriesStorage) = - CompressionSettings(enabled=false) + CompressionSettings(; enabled = false) is_read_only(storage::InMemoryTimeSeriesStorage) = false @@ -164,7 +164,7 @@ function deserialize_time_series( interval = get_interval(ts) start_time = initial_timestamp + interval * (columns.start - 1) data = SortedDict{Dates.DateTime, eltype(typeof(full_data)).parameters[2]}() - for initial_time in range(start_time; step=interval, length=length(columns)) + for initial_time in range(start_time; step = interval, length = length(columns)) if rows.start == 1 it = initial_time else @@ -218,7 +218,7 @@ function replace_component_uuid!( end function convert_to_hdf5(storage::InMemoryTimeSeriesStorage, filename::AbstractString) create_file = true - hdf5_storage = Hdf5TimeSeriesStorage(create_file; filename=filename) + hdf5_storage = Hdf5TimeSeriesStorage(create_file; filename = filename) for record in values(storage.data) for pair in record.component_names serialize_time_series!(hdf5_storage, pair[1], pair[2], record.ts) @@ -229,7 +229,7 @@ end function compare_values( x::InMemoryTimeSeriesStorage, y::InMemoryTimeSeriesStorage; - compare_uuids=false, + compare_uuids = false, ) keys_x = sort!(collect(keys(x.data))) keys_y = sort!(collect(keys(y.data))) diff --git a/src/internal.jl b/src/internal.jl index bad946ca3..0ff78f177 100644 --- a/src/internal.jl +++ b/src/internal.jl @@ -32,7 +32,7 @@ end """ Creates InfrastructureSystemsInternal with a new UUID. """ -InfrastructureSystemsInternal(; uuid=make_uuid(), units_info=nothing, ext=nothing) = +InfrastructureSystemsInternal(; uuid = make_uuid(), units_info = nothing, ext = nothing) = InfrastructureSystemsInternal(uuid, units_info, ext) """ @@ -111,7 +111,7 @@ end function compare_values( x::InfrastructureSystemsInternal, y::InfrastructureSystemsInternal; - compare_uuids=false, + compare_uuids = false, ) match = true for name in fieldnames(InfrastructureSystemsInternal) @@ -127,14 +127,14 @@ function compare_values( if val2 isa Dict && isempty(val2) val2 = nothing end - if !compare_values(val1, val2, compare_uuids=compare_uuids) + if !compare_values(val1, val2; compare_uuids = compare_uuids) @error "ext does not match" val1 val2 match = false end elseif !compare_values( getfield(x, name), - getfield(y, name), - compare_uuids=compare_uuids, + getfield(y, name); + compare_uuids = compare_uuids, ) @error "InfrastructureSystemsInternal field=$name does not match" match = false diff --git a/src/probabilistic.jl b/src/probabilistic.jl index 11484a949..eaf778bd0 100644 --- a/src/probabilistic.jl +++ b/src/probabilistic.jl @@ -47,9 +47,9 @@ function Probabilistic(; resolution, percentiles, data, - scaling_factor_multiplier=nothing, - normalization_factor=1.0, - internal=InfrastructureSystemsInternal(), + scaling_factor_multiplier = nothing, + normalization_factor = 1.0, + internal = InfrastructureSystemsInternal(), ) data = handle_normalization_factor(convert_data(data), normalization_factor) quantile_count = size(first(values(data)))[2] @@ -91,17 +91,17 @@ function Probabilistic( input_data::AbstractDict, percentiles::Vector, resolution::Dates.Period; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) - return Probabilistic( - name=name, - data=input_data, - percentiles=percentiles, - resolution=resolution, - scaling_factor_multiplier=scaling_factor_multiplier, - normalization_factor=normalization_factor, - internal=InfrastructureSystemsInternal(), + return Probabilistic(; + name = name, + data = input_data, + percentiles = percentiles, + resolution = resolution, + scaling_factor_multiplier = scaling_factor_multiplier, + normalization_factor = normalization_factor, + internal = InfrastructureSystemsInternal(), ) end @@ -125,8 +125,8 @@ function Probabilistic( name::AbstractString, input_data::AbstractDict{Dates.DateTime, <:TimeSeries.TimeArray}, percentiles::Vector{Float64}; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) data = SortedDict{Dates.DateTime, Matrix{Float64}}() resolution = @@ -136,13 +136,13 @@ function Probabilistic( data[k] = TimeSeries.values(v) end - return Probabilistic( - name=name, - data=data, - percentiles=percentiles, - resolution=resolution, - normalization_factor=normalization_factor, - scaling_factor_multiplier=scaling_factor_multiplier, + return Probabilistic(; + name = name, + data = data, + percentiles = percentiles, + resolution = resolution, + normalization_factor = normalization_factor, + scaling_factor_multiplier = scaling_factor_multiplier, ) end @@ -154,38 +154,38 @@ function Probabilistic( series_data::RawTimeSeries, percentiles::Vector, resolution::Dates.Period; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) - return Probabilistic( - name=name, - data=series_data.data, - percentiles=percentiles, - resolution=resolution, - normalization_factor=normalization_factor, - scaling_factor_multiplier=scaling_factor_multiplier, + return Probabilistic(; + name = name, + data = series_data.data, + percentiles = percentiles, + resolution = resolution, + normalization_factor = normalization_factor, + scaling_factor_multiplier = scaling_factor_multiplier, ) end function Probabilistic(ts_metadata::ProbabilisticMetadata, data::SortedDict) - return Probabilistic( - name=get_name(ts_metadata), - percentiles=get_percentiles(ts_metadata), - resolution=get_resolution(ts_metadata), - data=data, - scaling_factor_multiplier=get_scaling_factor_multiplier(ts_metadata), - internal=InfrastructureSystemsInternal(get_time_series_uuid(ts_metadata)), + return Probabilistic(; + name = get_name(ts_metadata), + percentiles = get_percentiles(ts_metadata), + resolution = get_resolution(ts_metadata), + data = data, + scaling_factor_multiplier = get_scaling_factor_multiplier(ts_metadata), + internal = InfrastructureSystemsInternal(get_time_series_uuid(ts_metadata)), ) end function Probabilistic(info::TimeSeriesParsedInfo) - return Probabilistic( - name=info.name, - data=info.data, - percentiles=info.percentiles, - resolution=info.resolution, - normalization_factor=info.normalization_factor, - scaling_factor_multiplier=info.scaling_factor_multiplier, + return Probabilistic(; + name = info.name, + data = info.data, + percentiles = info.percentiles, + resolution = info.resolution, + normalization_factor = info.normalization_factor, + scaling_factor_multiplier = info.scaling_factor_multiplier, ) end @@ -290,6 +290,6 @@ get_count(forecast::Probabilistic) = get_count_common(forecast) get_initial_times(forecast::Probabilistic) = get_initial_times_common(forecast) get_initial_timestamp(forecast::Probabilistic) = get_initial_timestamp_common(forecast) get_interval(forecast::Probabilistic) = get_interval_common(forecast) -get_window(f::Probabilistic, initial_time::Dates.DateTime; len=nothing) = - get_window_common(f, initial_time; len=len) +get_window(f::Probabilistic, initial_time::Dates.DateTime; len = nothing) = + get_window_common(f, initial_time; len = len) iterate_windows(forecast::Probabilistic) = iterate_windows_common(forecast) diff --git a/src/scenarios.jl b/src/scenarios.jl index b1e59369e..aae6d922e 100644 --- a/src/scenarios.jl +++ b/src/scenarios.jl @@ -47,9 +47,9 @@ function Scenarios(; data, scenario_count, resolution, - scaling_factor_multiplier=nothing, - normalization_factor=1.0, - internal=InfrastructureSystemsInternal(), + scaling_factor_multiplier = nothing, + normalization_factor = 1.0, + internal = InfrastructureSystemsInternal(), ) data = handle_normalization_factor(convert_data(data), normalization_factor) return Scenarios( @@ -80,19 +80,19 @@ function Scenarios( name::AbstractString, input_data::AbstractDict, resolution::Dates.Period; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) scenario_count = size(first(values(input_data)))[2] - return Scenarios( - name=name, - data=input_data, - scenario_count=scenario_count, - resolution=resolution, - scaling_factor_multiplier=scaling_factor_multiplier, - normalization_factor=normalization_factor, - internal=InfrastructureSystemsInternal(), + return Scenarios(; + name = name, + data = input_data, + scenario_count = scenario_count, + resolution = resolution, + scaling_factor_multiplier = scaling_factor_multiplier, + normalization_factor = normalization_factor, + internal = InfrastructureSystemsInternal(), ) end @@ -114,8 +114,8 @@ Construct Scenarios from a Dict of TimeArrays. function Scenarios( name::AbstractString, input_data::AbstractDict{Dates.DateTime, <:TimeSeries.TimeArray}; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) data = SortedDict{Dates.DateTime, Matrix{Float64}}() resolution = @@ -129,19 +129,19 @@ function Scenarios( name, data, resolution; - normalization_factor=normalization_factor, - scaling_factor_multiplier=scaling_factor_multiplier, + normalization_factor = normalization_factor, + scaling_factor_multiplier = scaling_factor_multiplier, ) end function Scenarios(ts_metadata::ScenariosMetadata, data::SortedDict) - return Scenarios( - name=get_name(ts_metadata), - scenario_count=get_scenario_count(ts_metadata), - resolution=get_resolution(ts_metadata), - data=data, - scaling_factor_multiplier=get_scaling_factor_multiplier(ts_metadata), - internal=InfrastructureSystemsInternal(get_time_series_uuid(ts_metadata)), + return Scenarios(; + name = get_name(ts_metadata), + scenario_count = get_scenario_count(ts_metadata), + resolution = get_resolution(ts_metadata), + data = data, + scaling_factor_multiplier = get_scaling_factor_multiplier(ts_metadata), + internal = InfrastructureSystemsInternal(get_time_series_uuid(ts_metadata)), ) end @@ -150,8 +150,8 @@ function Scenarios(info::TimeSeriesParsedInfo) info.name, info.data, info.resolution; - normalization_factor=info.normalization_factor, - scaling_factor_multiplier=info.scaling_factor_multiplier, + normalization_factor = info.normalization_factor, + scaling_factor_multiplier = info.scaling_factor_multiplier, ) end @@ -242,6 +242,6 @@ get_count(forecast::Scenarios) = get_count_common(forecast) get_initial_times(forecast::Scenarios) = get_initial_times_common(forecast) get_initial_timestamp(forecast::Scenarios) = get_initial_timestamp_common(forecast) get_interval(forecast::Scenarios) = get_interval_common(forecast) -get_window(f::Scenarios, initial_time::Dates.DateTime; len=nothing) = - get_window_common(f, initial_time; len=len) +get_window(f::Scenarios, initial_time::Dates.DateTime; len = nothing) = + get_window_common(f, initial_time; len = len) iterate_windows(forecast::Scenarios) = iterate_windows_common(forecast) diff --git a/src/serialization.jl b/src/serialization.jl index 2769b5ca1..e2fb05792 100644 --- a/src/serialization.jl +++ b/src/serialization.jl @@ -12,14 +12,14 @@ Serializes a InfrastructureSystemsType to a JSON file. function to_json( obj::T, filename::AbstractString; - force=false, - pretty=false, + force = false, + pretty = false, ) where {T <: InfrastructureSystemsType} if !force && isfile(filename) error("$file already exists. Set force=true to overwrite.") end result = open(filename, "w") do io - return to_json(io, obj, pretty=pretty) + return to_json(io, obj; pretty = pretty) end @info "Serialized $T to $filename" @@ -29,10 +29,10 @@ end """ Serializes a InfrastructureSystemsType to a JSON string. """ -function to_json(obj::T; pretty=false, indent=2) where {T <: InfrastructureSystemsType} +function to_json(obj::T; pretty = false, indent = 2) where {T <: InfrastructureSystemsType} if pretty io = IOBuffer() - JSON3.pretty(io, serialize(obj), JSON3.AlignmentContext(indent=indent)) + JSON3.pretty(io, serialize(obj), JSON3.AlignmentContext(; indent = indent)) return take!(io) else return JSON3.write(serialize(obj)) @@ -42,12 +42,12 @@ end function to_json( io::IO, obj::T; - pretty=false, - indent=2, + pretty = false, + indent = 2, ) where {T <: InfrastructureSystemsType} data = serialize(obj) if pretty - res = JSON3.pretty(io, data, JSON3.AlignmentContext(indent=indent)) + res = JSON3.pretty(io, data, JSON3.AlignmentContext(; indent = indent)) else res = JSON3.write(io, data) end @@ -267,7 +267,7 @@ deserialize(::Type{Vector{Symbol}}, data::Vector) = Symbol.(data) function serialize_julia_info() data = Dict{String, Any}("julia_version" => string(VERSION)) io = IOBuffer() - Pkg.status(io=io, mode=Pkg.PKGMODE_MANIFEST) + Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST) data["package_info"] = String(take!(io)) return data end diff --git a/src/single_time_series.jl b/src/single_time_series.jl index 4e04072a2..b1bc499a7 100644 --- a/src/single_time_series.jl +++ b/src/single_time_series.jl @@ -30,9 +30,9 @@ end function SingleTimeSeries(; name, data, - scaling_factor_multiplier=nothing, - normalization_factor=1.0, - internal=InfrastructureSystemsInternal(), + scaling_factor_multiplier = nothing, + normalization_factor = 1.0, + internal = InfrastructureSystemsInternal(), ) data = handle_normalization_factor(data, normalization_factor) return SingleTimeSeries( @@ -73,24 +73,24 @@ Construct SingleTimeSeries from a TimeArray or DataFrame. function SingleTimeSeries( name::AbstractString, data::Union{TimeSeries.TimeArray, DataFrames.DataFrame}; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, - timestamp=:timestamp, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, + timestamp = :timestamp, ) if data isa DataFrames.DataFrame - ta = TimeSeries.TimeArray(data; timestamp=timestamp) + ta = TimeSeries.TimeArray(data; timestamp = timestamp) elseif data isa TimeSeries.TimeArray ta = data else error("fatal: $(typeof(data))") end - return SingleTimeSeries( - name=name, - data=ta, - scaling_factor_multiplier=scaling_factor_multiplier, - normalization_factor=normalization_factor, - internal=InfrastructureSystemsInternal(), + return SingleTimeSeries(; + name = name, + data = ta, + scaling_factor_multiplier = scaling_factor_multiplier, + normalization_factor = normalization_factor, + internal = InfrastructureSystemsInternal(), ) end @@ -115,17 +115,17 @@ function SingleTimeSeries( filename::AbstractString, component::InfrastructureSystemsComponent, resolution::Dates.Period; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) component_name = get_name(component) raw = read_time_series(SingleTimeSeries, filename, component_name) ta = make_time_array(raw, component_name, resolution) - return SingleTimeSeries( - name=name, - data=ta, - normalization_factor=normalization_factor, - scaling_factor_multiplier=scaling_factor_multiplier, + return SingleTimeSeries(; + name = name, + data = ta, + normalization_factor = normalization_factor, + scaling_factor_multiplier = scaling_factor_multiplier, ) end @@ -143,7 +143,7 @@ function SingleTimeSeries( initial_time:resolution:(initial_time + resolution * (time_steps - 1)), ones(time_steps), ) - return SingleTimeSeries(; name=name, data=data) + return SingleTimeSeries(; name = name, data = data) end function SingleTimeSeries(time_series::Vector{SingleTimeSeries}) @@ -153,10 +153,10 @@ function SingleTimeSeries(time_series::Vector{SingleTimeSeries}) data = collect(Iterators.flatten((TimeSeries.values(get_data(x)) for x in time_series))) ta = TimeSeries.TimeArray(timestamps, data) - time_series = SingleTimeSeries( - name=get_name(time_series[1]), - data=ta, - scaling_factor_multiplier=time_series[1].scaling_factor_multiplier, + time_series = SingleTimeSeries(; + name = get_name(time_series[1]), + data = ta, + scaling_factor_multiplier = time_series[1].scaling_factor_multiplier, ) @debug "concatenated time_series" LOG_GROUP_TIME_SERIES time_series return time_series @@ -185,11 +185,11 @@ end function SingleTimeSeries(info::TimeSeriesParsedInfo) data = make_time_array(info) - return SingleTimeSeries( - name=info.name, - data=data, - normalization_factor=info.normalization_factor, - scaling_factor_multiplier=info.scaling_factor_multiplier, + return SingleTimeSeries(; + name = info.name, + data = data, + normalization_factor = info.normalization_factor, + scaling_factor_multiplier = info.scaling_factor_multiplier, ) end @@ -257,7 +257,7 @@ Base.lastindex(time_series::SingleTimeSeries, d) = lastindex(get_data(time_serie Base.eachindex(time_series::SingleTimeSeries) = eachindex(get_data(time_series)) -Base.iterate(time_series::SingleTimeSeries, n=1) = iterate(get_data(time_series), n) +Base.iterate(time_series::SingleTimeSeries, n = 1) = iterate(get_data(time_series), n) """ Refer to TimeSeries.when(). Underlying data is copied. @@ -270,9 +270,9 @@ end Return a time_series truncated starting with timestamp. """ function from(time_series::SingleTimeSeries, timestamp) - return SingleTimeSeries( - name=get_name(time_series), - data=TimeSeries.from(get_data(time_series), timestamp), + return SingleTimeSeries(; + name = get_name(time_series), + data = TimeSeries.from(get_data(time_series), timestamp), ) end @@ -280,9 +280,9 @@ end Return a time_series truncated after timestamp. """ function to(time_series::SingleTimeSeries, timestamp) - return SingleTimeSeries( - name=get_name(time_series), - data=TimeSeries.to(get_data(time_series), timestamp), + return SingleTimeSeries(; + name = get_name(time_series), + data = TimeSeries.to(get_data(time_series), timestamp), ) end @@ -334,7 +334,7 @@ get_columns(::Type{<:TimeSeriesMetadata}, ta::TimeSeries.TimeArray) = nothing function make_time_array( time_series::SingleTimeSeries, start_time::Dates.DateTime; - len::Union{Nothing, Int}=nothing, + len::Union{Nothing, Int} = nothing, ) ta = get_data(time_series) first_time = first(TimeSeries.timestamp(ta)) @@ -349,13 +349,13 @@ function make_time_array( end function SingleTimeSeriesMetadata(ts_metadata::DeterministicMetadata) - return SingleTimeSeriesMetadata( - name=get_name(ts_metadata), - resolution=get_resolution(ts_metadata), - initial_timestamp=get_initial_timestamp(ts_metadata), - time_series_uuid=get_time_series_uuid(ts_metadata), - length=get_count(ts_metadata) * get_horizon(ts_metadata), - scaling_factor_multiplier=get_scaling_factor_multiplier(ts_metadata), - internal=get_internal(ts_metadata), + return SingleTimeSeriesMetadata(; + name = get_name(ts_metadata), + resolution = get_resolution(ts_metadata), + initial_timestamp = get_initial_timestamp(ts_metadata), + time_series_uuid = get_time_series_uuid(ts_metadata), + length = get_count(ts_metadata) * get_horizon(ts_metadata), + scaling_factor_multiplier = get_scaling_factor_multiplier(ts_metadata), + internal = get_internal(ts_metadata), ) end diff --git a/src/supplemental_attribute.jl b/src/supplemental_attribute.jl new file mode 100644 index 000000000..02ea656f3 --- /dev/null +++ b/src/supplemental_attribute.jl @@ -0,0 +1,103 @@ +function attach_component!( + attribute::T, + component::InfrastructureSystemsComponent, +) where {T <: InfrastructureSystemsSupplementalAttribute} + component_uuid = get_uuid(component) + + if component_uuid ∈ get_component_uuids(attribute) + throw( + ArgumentError( + "SupplementalAttribute type $T with UUID $(get_uuid(attribute)) already attached to component $(summary(component))", + ), + ) + end + + push!(get_component_uuids(attribute), component_uuid) + return +end + +function detach_component!( + attribute::InfrastructureSystemsSupplementalAttribute, + component::InfrastructureSystemsComponent, +) + delete!(get_component_uuids(attribute), get_uuid(component)) + return +end + +""" +Return true if the attribute has time series data. +""" +function has_time_series(attribute::InfrastructureSystemsSupplementalAttribute) + container = get_time_series_container(attribute) + return !isnothing(container) && !isempty(container) +end + +function clear_time_series_storage!(attribute::InfrastructureSystemsSupplementalAttribute) + storage = _get_time_series_storage(attribute) + if !isnothing(storage) + # In the case of Deterministic and DeterministicSingleTimeSeries the UUIDs + # can be shared. + uuids = Set{Base.UUID}() + for (uuid, name) in get_time_series_uuids(attribute) + if !(uuid in uuids) + remove_time_series!(storage, uuid, get_uuid(attribute), name) + push!(uuids, uuid) + end + end + end +end + +function set_time_series_storage!( + attribute::InfrastructureSystemsSupplementalAttribute, + storage::Union{Nothing, TimeSeriesStorage}, +) + container = get_time_series_container(attribute) + if !isnothing(container) + set_time_series_storage!(container, storage) + end + return +end + +""" +This function must be called when an attribute is removed from a system. +""" +function prepare_for_removal!( + attribute::T, +) where {T <: InfrastructureSystemsSupplementalAttribute} + if !isempty(get_component_uuids(attribute)) + throw( + ArgumentError( + "attribute type $T with uuid $(get_uuid(attribute)) still attached to a component", + ), + ) + end + + # TimeSeriesContainer can only be part of a component when that component is part of a + # system. + clear_time_series_storage!(attribute) + set_time_series_storage!(attribute, nothing) + clear_time_series!(attribute) + @debug "cleared all time series data from" _group = LOG_GROUP_SYSTEM get_uuid(attribute) + return +end + +function _get_time_series_storage(attribute::InfrastructureSystemsSupplementalAttribute) + container = get_time_series_container(attribute) + if isnothing(container) + return nothing + end + + return container.time_series_storage +end + +function clear_time_series!( + attribute::T, +) where {T <: InfrastructureSystemsSupplementalAttribute} + container = get_time_series_container(attribute) + if !isnothing(container) + clear_time_series!(container) + @debug "Cleared time_series in attribute type $T, $(get_uuid(attribute))." _group = + LOG_GROUP_TIME_SERIES + end + return +end diff --git a/src/supplemental_attributes.jl b/src/supplemental_attributes.jl new file mode 100644 index 000000000..739e934f8 --- /dev/null +++ b/src/supplemental_attributes.jl @@ -0,0 +1,229 @@ +const SupplementalAttributesContainer = + Dict{DataType, Set{<:InfrastructureSystemsSupplementalAttribute}} +const SupplementalAttributesByType = + Dict{DataType, Dict{UUIDs.UUID, <:InfrastructureSystemsSupplementalAttribute}} + +struct SupplementalAttributes <: InfrastructureSystemsContainer + data::SupplementalAttributesByType + time_series_storage::TimeSeriesStorage +end + +get_display_string(::SupplementalAttributes) = "SupplementalAttributes" + +function SupplementalAttributes(time_series_storage::TimeSeriesStorage) + return SupplementalAttributes(SupplementalAttributesByType(), time_series_storage) +end + +function add_supplemental_attribute!( + supplemental_attributes::SupplementalAttributes, + component::InfrastructureSystemsComponent, + supplemental_attribute::InfrastructureSystemsSupplementalAttribute; + kwargs..., +) + try + attach_component!(supplemental_attribute, component) + attach_supplemental_attribute!(component, supplemental_attribute) + _add_supplemental_attribute!( + supplemental_attributes, + supplemental_attribute; + kwargs..., + ) + catch e + detach_component!(supplemental_attribute, component) + detach_supplemental_attribute!(component, supplemental_attribute) + rethrow(e) + end + return +end + +function _add_supplemental_attribute!( + supplemental_attributes::SupplementalAttributes, + supplemental_attribute::T; + allow_existing_time_series = false, +) where {T <: InfrastructureSystemsSupplementalAttribute} + if !isconcretetype(T) + throw(ArgumentError("add_supplemental_attribute! only accepts concrete types")) + end + + supplemental_attribute_uuid = get_uuid(supplemental_attribute) + if isempty(get_component_uuids(supplemental_attribute)) + throw( + ArgumentError( + "SupplementalAttribute type $T with UUID $supplemental_attribute_uuid is not attached to any component", + ), + ) + end + + if !haskey(supplemental_attributes.data, T) + supplemental_attributes.data[T] = Dict{UUIDs.UUID, T}() + elseif haskey(supplemental_attributes.data[T], supplemental_attribute_uuid) + @debug "SupplementalAttribute type $T with UUID $supplemental_attribute_uuid already stored" _group = + LOG_GROUP_SYSTEM + return + end + + if !allow_existing_time_series && has_time_series(supplemental_attribute) + throw( + ArgumentError( + "cannot add an supplemental_attribute with time_series: $supplemental_attribute", + ), + ) + end + + set_time_series_storage!( + supplemental_attribute, + supplemental_attributes.time_series_storage, + ) + supplemental_attributes.data[T][supplemental_attribute_uuid] = supplemental_attribute + return +end + +""" +Check to see if supplemental_attribute exists. +""" +function has_supplemental_attributes( + ::Type{T}, + component::InfrastructureSystemsComponent, +) where {T <: InfrastructureSystemsSupplementalAttribute} + supplemental_attributes = get_supplemental_attributes_container(component) + if !isconcretetype(T) + for (k, v) in supplemental_attributes + if !isempty(v) && k <: T + return true + end + end + end + supplemental_attributes = get_supplemental_attributes_container(component) + !haskey(supplemental_attributes, T) && return false + return !isempty(supplemental_attributes[T]) +end + +""" +Iterates over all supplemental_attributes. + +# Examples + +```Julia +for supplemental_attribute in iterate_supplemental_attributes(obj) + @show supplemental_attribute +end +``` +""" +function iterate_supplemental_attributes(supplemental_attributes::SupplementalAttributes) + iterate_container(supplemental_attributes) +end + +function iterate_supplemental_attributes_with_time_series( + supplemental_attributes::SupplementalAttributes, +) + iterate_container_with_time_series(supplemental_attributes) +end + +""" +Returns the total number of stored supplemental_attributes +""" +function get_num_supplemental_attributes(supplemental_attributes::SupplementalAttributes) + return get_num_members(supplemental_attributes) +end + +""" +Removes all supplemental_attributes from the system. +""" +function clear_supplemental_attributes!(supplemental_attributes::SupplementalAttributes) + for type in collect(keys(supplemental_attributes.data)) + remove_supplemental_attributes!(type, supplemental_attributes) + end +end + +function remove_supplemental_attribute!( + supplemental_attributes::SupplementalAttributes, + supplemental_attribute::T, +) where {T <: InfrastructureSystemsSupplementalAttribute} + if !isempty(get_component_uuids(supplemental_attribute)) + throw( + ArgumentError( + "SupplementalAttribute type $T with uuid $(get_uuid(supplemental_attribute)) still attached to devices $(get_component_uuids(supplemental_attribute))", + ), + ) + end + + pop!(supplemental_attributes.data[T], get_uuid(supplemental_attribute)) + if isempty(supplemental_attributes.data[T]) + pop!(supplemental_attributes.data, T) + end + return +end + +""" +Remove all supplemental_attributes of type T. + +Throws ArgumentError if the type is not stored. +""" +function remove_supplemental_attributes!( + ::Type{T}, + supplemental_attributes::SupplementalAttributes, +) where {T <: InfrastructureSystemsSupplementalAttribute} + if !haskey(supplemental_attributes.data, T) + throw(ArgumentError("supplemental_attribute type $T is not stored")) + end + + _supplemental_attributes = pop!(supplemental_attributes.data, T) + for supplemental_attribute in values(_supplemental_attributes) + prepare_for_removal!(supplemental_attribute) + end + + @debug "Removed all supplemental_attributes of type $T" _group = LOG_GROUP_SYSTEM T + return values(_supplemental_attributes) +end + +# TODO: This function could be merged with the getter for components if no additional functionality is needed +""" +Returns an iterator of supplemental_attributes. T can be concrete or abstract. +Call collect on the result if an array is desired. + +# Arguments + + - `T`: supplemental_attribute type + - `supplemental_attributes::SupplementalAttributes`: SupplementalAttributes in the system + - `filter_func::Union{Nothing, Function} = nothing`: Optional function that accepts a component + of type T and returns a Bool. Apply this function to each component and only return components + where the result is true. +""" +function get_supplemental_attributes( + ::Type{T}, + supplemental_attributes::SupplementalAttributes, + filter_func::Union{Nothing, Function} = nothing, +) where {T <: InfrastructureSystemsSupplementalAttribute} + if isconcretetype(T) + _supplemental_attributes = get(supplemental_attributes.data, T, nothing) + if !isnothing(filter_func) && !isnothing(_supplemental_attributes) + _filter_func = x -> filter_func(x.second) + _supplemental_attributes = + values(filter(_filter_func, _supplemental_attributes)) + end + if isnothing(_supplemental_attributes) + iter = FlattenIteratorWrapper(T, Vector{Base.ValueIterator}([])) + else + iter = FlattenIteratorWrapper( + T, + Vector{Base.ValueIterator}([values(_supplemental_attributes)]), + ) + end + else + types = [x for x in keys(supplemental_attributes.data) if x <: T] + if isnothing(filter_func) + _supplemental_attributes = + [values(supplemental_attributes.data[x]) for x in types] + else + _filter_func = x -> filter_func(x.second) + _supplemental_attributes = [ + values(filter(_filter_func, supplemental_attributes.data[x])) for + x in types + ] + end + iter = FlattenIteratorWrapper(T, _supplemental_attributes) + end + + @assert_op eltype(iter) == T + return iter +end diff --git a/src/system_data.jl b/src/system_data.jl index b68aac9ff..a96822971 100644 --- a/src/system_data.jl +++ b/src/system_data.jl @@ -22,6 +22,7 @@ Container for system components and time series data mutable struct SystemData <: InfrastructureSystemsType components::Components masked_components::Components + attributes::SupplementalAttributes time_series_params::TimeSeriesParameters time_series_storage::TimeSeriesStorage validation_descriptors::Vector @@ -42,10 +43,10 @@ Construct SystemData to store components and time series data. - `compression = CompressionSettings()`: Controls compression of time series data. """ function SystemData(; - validation_descriptor_file=nothing, - time_series_in_memory=false, - time_series_directory=nothing, - compression=CompressionSettings(), + validation_descriptor_file = nothing, + time_series_in_memory = false, + time_series_directory = nothing, + compression = CompressionSettings(), ) if isnothing(validation_descriptor_file) validation_descriptors = Vector() @@ -58,15 +59,17 @@ function SystemData(; end ts_storage = make_time_series_storage(; - in_memory=time_series_in_memory, - directory=time_series_directory, - compression=compression, + in_memory = time_series_in_memory, + directory = time_series_directory, + compression = compression, ) components = Components(ts_storage, validation_descriptors) + attributes = SupplementalAttributes(ts_storage) masked_components = Components(ts_storage, validation_descriptors) return SystemData( components, masked_components, + attributes, TimeSeriesParameters(), ts_storage, validation_descriptors, @@ -85,6 +88,7 @@ function SystemData( return SystemData( components, masked_components, + SupplementalAttributes(time_series_storage), time_series_params, time_series_storage, validation_descriptors, @@ -107,10 +111,10 @@ function add_time_series_from_file_metadata!( data::SystemData, ::Type{T}, metadata_file::AbstractString; - resolution=nothing, + resolution = nothing, ) where {T <: InfrastructureSystemsComponent} metadata = read_time_series_file_metadata(metadata_file) - return add_time_series_from_file_metadata!(data, T, metadata; resolution=resolution) + return add_time_series_from_file_metadata!(data, T, metadata; resolution = resolution) end """ @@ -126,7 +130,7 @@ function add_time_series_from_file_metadata!( data::SystemData, ::Type{T}, file_metadata::Vector{TimeSeriesFileMetadata}; - resolution=nothing, + resolution = nothing, ) where {T <: InfrastructureSystemsComponent} cache = TimeSeriesParsingCache() for metadata in file_metadata @@ -152,16 +156,46 @@ function add_time_series!( data::SystemData, component::InfrastructureSystemsComponent, time_series::TimeSeriesData; - skip_if_present=false, + skip_if_present = false, ) metadata_type = time_series_data_to_metadata(typeof(time_series)) ts_metadata = metadata_type(time_series) - _attach_time_series_and_serialize!( + _validate_component(data, component) + attach_time_series_and_serialize!( data, component, ts_metadata, time_series; - skip_if_present=skip_if_present, + skip_if_present = skip_if_present, + ) + return +end + +""" +Add time series data to an attribute. + +# Arguments + + - `data::SystemData`: SystemData + - `component::InfrastructureSystemsComponent`: will store the time series reference + - `time_series::TimeSeriesData`: Any object of subtype TimeSeriesData + +Throws ArgumentError if the component is not stored in the system. +""" +function add_time_series!( + data::SystemData, + component::InfrastructureSystemsSupplementalAttribute, + time_series::TimeSeriesData; + skip_if_present = false, +) + metadata_type = time_series_data_to_metadata(typeof(time_series)) + ts_metadata = metadata_type(time_series) + attach_time_series_and_serialize!( + data, + component, + ts_metadata, + time_series; + skip_if_present = skip_if_present, ) return end @@ -184,37 +218,10 @@ function add_time_series!(data::SystemData, components, time_series::TimeSeriesD metadata_type = time_series_data_to_metadata(typeof(time_series)) ts_metadata = metadata_type(time_series) for component in components - _attach_time_series_and_serialize!(data, component, ts_metadata, time_series) + attach_time_series_and_serialize!(data, component, ts_metadata, time_series) end end -function _attach_time_series_and_serialize!( - data::SystemData, - component::InfrastructureSystemsComponent, - ts_metadata::T, - ts::TimeSeriesData; - skip_if_present=false, -) where {T <: TimeSeriesMetadata} - _validate_component(data, component) - check_add_time_series(data.time_series_params, ts) - check_read_only(data.time_series_storage) - if has_time_series(component, T, get_name(ts)) - skip_if_present && return - throw(ArgumentError("time_series $(typeof(ts)) $(get_name(ts)) is already stored")) - end - - serialize_time_series!( - data.time_series_storage, - get_uuid(component), - get_name(ts_metadata), - ts, - ) - add_time_series!(component, ts_metadata, skip_if_present=skip_if_present) - # Order is important. Set this last in case exceptions are thrown at previous steps. - set_parameters!(data.time_series_params, ts) - return -end - function add_time_series_from_file_metadata_internal!( data::SystemData, ::Type{T}, @@ -316,7 +323,7 @@ function _validate_component( end end -function compare_values(x::SystemData, y::SystemData; compare_uuids=false) +function compare_values(x::SystemData, y::SystemData; compare_uuids = false) match = true for name in fieldnames(SystemData) val_x = getfield(x, name) @@ -327,7 +334,7 @@ function compare_values(x::SystemData, y::SystemData; compare_uuids=false) # InMemoryTimeSeriesStorage continue end - if !compare_values(val_x, val_y, compare_uuids=compare_uuids) + if !compare_values(val_x, val_y; compare_uuids = compare_uuids) @error "SystemData field = $name does not match" getfield(x, name) getfield( y, name, @@ -355,13 +362,13 @@ end Removes the component from the main container and adds it to the masked container. """ function mask_component!(data::SystemData, component::InfrastructureSystemsComponent) - remove_component!(data.components, component, remove_time_series=false) + remove_component!(data.components, component; remove_time_series = false) set_time_series_storage!(component, nothing) return add_masked_component!( data, - component, - skip_validation=true, # validation has already occurred - allow_existing_time_series=true, + component; + skip_validation = true, # validation has already occurred + allow_existing_time_series = true, ) end @@ -422,14 +429,14 @@ Call `collect` on the result to get an array. """ function get_time_series_multiple( data::SystemData, - filter_func=nothing; - type=nothing, - name=nothing, + filter_func = nothing; + type = nothing, + name = nothing, ) Channel() do channel for component in iterate_components_with_time_series(data) for time_series in - get_time_series_multiple(component, filter_func; type=type, name=name) + get_time_series_multiple(component, filter_func; type = type, name = name) put!(channel, time_series) end end @@ -567,7 +574,11 @@ end Parent object should call this prior to serialization so that SystemData can store the appropriate path information for the time series data. """ -function prepare_for_serialization!(data::SystemData, filename::AbstractString; force=false) +function prepare_for_serialization!( + data::SystemData, + filename::AbstractString; + force = false, +) directory = dirname(filename) if !isdir(directory) mkpath(directory) @@ -634,8 +645,8 @@ end function deserialize( ::Type{SystemData}, raw::Dict; - time_series_read_only=false, - time_series_directory=nothing, + time_series_read_only = false, + time_series_directory = nothing, ) @debug "deserialize" raw _group = LOG_GROUP_SERIALIZATION time_series_params = deserialize(TimeSeriesParameters, raw["time_series_params"]) @@ -660,14 +671,14 @@ function deserialize( time_series_storage = from_file( Hdf5TimeSeriesStorage, raw["time_series_storage_file"]; - read_only=time_series_read_only, - directory=time_series_directory, + read_only = time_series_read_only, + directory = time_series_directory, ) else - time_series_storage = make_time_series_storage( - in_memory=raw["time_series_compression_enabled"], - compression=CompressionSettings(enabled=raw["time_series_in_memory"]), - directory=time_series_directory, + time_series_storage = make_time_series_storage(; + in_memory = raw["time_series_compression_enabled"], + compression = CompressionSettings(; enabled = raw["time_series_in_memory"]), + directory = time_series_directory, ) end @@ -692,7 +703,7 @@ add_component!(data::SystemData, component; kwargs...) = add_masked_component!(data::SystemData, component; kwargs...) = add_component!( data.masked_components, component; - allow_existing_time_series=true, + allow_existing_time_series = true, kwargs..., ) @@ -720,7 +731,7 @@ end function get_components( ::Type{T}, data::SystemData, - filter_func::Union{Nothing, Function}=nothing, + filter_func::Union{Nothing, Function} = nothing, ) where {T} return get_components(T, data.components, filter_func) end @@ -731,7 +742,7 @@ get_components_by_name(::Type{T}, data::SystemData, args...) where {T} = function get_masked_components( ::Type{T}, data::SystemData, - filter_func::Union{Nothing, Function}=nothing, + filter_func::Union{Nothing, Function} = nothing, ) where {T} return get_components(T, data.masked_components, filter_func) end @@ -811,3 +822,68 @@ end _get_system_basename(system_file) = splitext(basename(system_file))[1] _get_secondary_basename(system_basename, name) = system_basename * "_" * name + +add_supplemental_attribute!(data::SystemData, component, info; kwargs...) = + add_supplemental_attribute!(data.attributes, component, info; kwargs...) + +function get_supplemental_attributes( + filter_func::Function, + ::Type{T}, + data::SystemData, +) where {T <: InfrastructureSystemsSupplementalAttribute} + return get_supplemental_attributes(T, data.attributes, filter_func) +end + +function get_supplemental_attributes( + ::Type{T}, + data::SystemData, +) where {T <: InfrastructureSystemsSupplementalAttribute} + return get_supplemental_attributes(T, data.attributes) +end + +function iterate_supplemental_attributes(data::SystemData) + return iterate_supplemental_attributes(data.attributes) +end + +function remove_supplemental_attribute!( + data::SystemData, + component::InfrastructureSystemsComponent, + attribute::InfrastructureSystemsSupplementalAttribute, +) + detach_component!(attribute, component) + detach_supplemental_attribute!(component, attribute) + clear_time_series_storage!(attribute) + remove_supplemental_attribute!(data.attributes, attribute) + return +end + +function remove_supplemental_attribute!( + data::SystemData, + attribute::InfrastructureSystemsSupplementalAttribute, +) + current_components_uuid = collect(get_component_uuids(attribute)) + for c_uuid in current_components_uuid + component = get_component(data, c_uuid) + detach_component!(attribute, component) + detach_supplemental_attribute!(component, attribute) + end + clear_time_series_storage!(attribute) + return remove_supplemental_attribute!(data.attributes, attribute) +end + +function remove_supplemental_attributes!( + ::Type{T}, + data::SystemData, +) where {T <: InfrastructureSystemsSupplementalAttribute} + attributes = get_supplemental_attributes(T, data.attributes) + for attribute in attributes + for c_uuid in get_component_uuids(attribute) + component = get_component(data, c_uuid) + detach_component!(attribute, component) + detach_supplemental_attribute!(component, attribute) + end + remove_supplemental_attribute!(data.attributes, attribute) + clear_time_series_storage!(attribute) + end + return +end diff --git a/src/time_series_cache.jl b/src/time_series_cache.jl index 1fb63d730..cc9ded7ec 100644 --- a/src/time_series_cache.jl +++ b/src/time_series_cache.jl @@ -2,7 +2,7 @@ const TIME_SERIES_CACHE_SIZE_BYTES = 1024 * 1024 abstract type TimeSeriesCache end -function Base.iterate(cache::TimeSeriesCache, state=nothing) +function Base.iterate(cache::TimeSeriesCache, state = nothing) if state === nothing reset!(cache) end @@ -52,8 +52,8 @@ function get_time_series_array!(cache::TimeSeriesCache, timestamp::Dates.DateTim _get_component(cache), _get_time_series(cache), next_time; - len=len, - ignore_scaling_factors=_get_ignore_scaling_factors(cache), + len = len, + ignore_scaling_factors = _get_ignore_scaling_factors(cache), ) _increment_next_time!(cache, len) _decrement_iterations_remaining!(cache) @@ -200,10 +200,10 @@ function ForecastCache( ::Type{T}, component::InfrastructureSystemsComponent, name::AbstractString; - start_time::Union{Nothing, Dates.DateTime}=nothing, - horizon::Union{Nothing, Int}=nothing, - cache_size_bytes=TIME_SERIES_CACHE_SIZE_BYTES, - ignore_scaling_factors=false, + start_time::Union{Nothing, Dates.DateTime} = nothing, + horizon::Union{Nothing, Int} = nothing, + cache_size_bytes = TIME_SERIES_CACHE_SIZE_BYTES, + ignore_scaling_factors = false, ) where {T <: Forecast} metadata_type = time_series_data_to_metadata(T) ts_metadata = get_time_series_metadata(metadata_type, component, name) @@ -220,10 +220,10 @@ function ForecastCache( T, component, name; - start_time=start_time, - len=get_horizon(ts_metadata), + start_time = start_time, + len = get_horizon(ts_metadata), ) - vals = get_time_series_values(component, ts, start_time, len=get_horizon(ts_metadata)) + vals = get_time_series_values(component, ts, start_time; len = get_horizon(ts_metadata)) row_size = _get_row_size(vals) count = get_count(ts_metadata) @@ -238,14 +238,14 @@ function ForecastCache( @debug "ForecastCache" _group = LOG_GROUP_TIME_SERIES row_size window_size in_memory_count return ForecastCache( - TimeSeriesCacheCommon( - ts=ts, - component=component, - name=name, - next_time=start_time, - len=count, - num_iterations=count, - ignore_scaling_factors=ignore_scaling_factors, + TimeSeriesCacheCommon(; + ts = ts, + component = component, + name = name, + next_time = start_time, + len = count, + num_iterations = count, + ignore_scaling_factors = ignore_scaling_factors, ), in_memory_count, horizon, @@ -270,9 +270,9 @@ function _update!(cache::ForecastCache) _get_type(cache), _get_component(cache), _get_name(cache); - start_time=next_time, - len=len, - count=count, + start_time = next_time, + len = len, + count = count, ) _set_length_available!(cache, len) _set_time_series!(cache, ts) @@ -318,9 +318,9 @@ function StaticTimeSeriesCache( ::Type{T}, component::InfrastructureSystemsComponent, name::AbstractString; - cache_size_bytes=TIME_SERIES_CACHE_SIZE_BYTES, - start_time::Union{Nothing, Dates.DateTime}=nothing, - ignore_scaling_factors=false, + cache_size_bytes = TIME_SERIES_CACHE_SIZE_BYTES, + start_time::Union{Nothing, Dates.DateTime} = nothing, + ignore_scaling_factors = false, ) where {T <: StaticTimeSeries} metadata_type = time_series_data_to_metadata(T) ts_metadata = get_time_series_metadata(metadata_type, component, name) @@ -335,8 +335,8 @@ function StaticTimeSeriesCache( end # Get an instance to assess data size. - ts = get_time_series(T, component, name; start_time=start_time, len=1) - vals = get_time_series_values(component, ts, start_time, len=1) + ts = get_time_series(T, component, name; start_time = start_time, len = 1) + vals = get_time_series_values(component, ts, start_time; len = 1) row_size = _get_row_size(vals) if row_size > cache_size_bytes @@ -347,14 +347,14 @@ function StaticTimeSeriesCache( @debug "StaticTimeSeriesCache" _group = LOG_GROUP_TIME_SERIES total_length in_memory_rows return StaticTimeSeriesCache( - TimeSeriesCacheCommon( - ts=ts, - component=component, - name=name, - next_time=start_time, - len=total_length, - num_iterations=total_length, - ignore_scaling_factors=ignore_scaling_factors, + TimeSeriesCacheCommon(; + ts = ts, + component = component, + name = name, + next_time = start_time, + len = total_length, + num_iterations = total_length, + ignore_scaling_factors = ignore_scaling_factors, ), in_memory_rows, ) @@ -374,8 +374,8 @@ function _update!(cache::StaticTimeSeriesCache) _get_type(cache), _get_component(cache), _get_name(cache); - start_time=next_time, - len=len, + start_time = next_time, + len = len, ) _set_length_available!(cache, len) _set_time_series!(cache, ts) diff --git a/src/time_series_container.jl b/src/time_series_container.jl index 0e67a1835..5db13fa27 100644 --- a/src/time_series_container.jl +++ b/src/time_series_container.jl @@ -49,7 +49,7 @@ end function add_time_series!( container::TimeSeriesContainer, ts_metadata::T; - skip_if_present=false, + skip_if_present = false, ) where {T <: TimeSeriesMetadata} key = TimeSeriesKey(T, get_name(ts_metadata)) if haskey(container.data, key) diff --git a/src/time_series_formats.jl b/src/time_series_formats.jl index 96c898318..ea9fa5687 100644 --- a/src/time_series_formats.jl +++ b/src/time_series_formats.jl @@ -15,7 +15,7 @@ Pass component_name when the file does not have the component name in a column h function read_time_series( ::Type{T}, data_file::AbstractString, - component_name=nothing; + component_name = nothing; kwargs..., ) where {T <: TimeSeriesData} if !isfile(data_file) @@ -175,7 +175,7 @@ function read_time_series( ::Type{T}, ::Type{Deterministic}, file::CSV.File, - component_name=nothing; + component_name = nothing; kwargs..., ) where {T <: TimeSeriesFormatDateTimeAsColumn} @debug "Read CSV data from $file." _group = LOG_GROUP_TIME_SERIES @@ -202,7 +202,7 @@ function read_time_series( ::Type{T}, ::Type{<:StaticTimeSeries}, file::CSV.File, - component_name=nothing; + component_name = nothing; kwargs..., ) where {T <: Union{TimeSeriesFormatPeriodAsColumn, TimeSeriesFormatDateTimeAsColumn}} first_timestamp = get_timestamp(T, file, 1) @@ -248,7 +248,7 @@ function read_time_series( ::Type{T}, ::Type{<:StaticTimeSeries}, file::CSV.File, - component_name=nothing; + component_name = nothing; kwargs..., ) where {T <: TimeSeriesFormatComponentsAsColumnsNoTime} first_timestamp = get(kwargs, :start_datetime, Dates.DateTime(Dates.today())) diff --git a/src/time_series_interface.jl b/src/time_series_interface.jl new file mode 100644 index 000000000..6eadc8081 --- /dev/null +++ b/src/time_series_interface.jl @@ -0,0 +1,606 @@ +const SupportedTimeSeriesTypes = + Union{InfrastructureSystemsComponent, InfrastructureSystemsSupplementalAttribute} + +function add_time_series!( + component::T, + time_series::TimeSeriesMetadata; + skip_if_present = false, +) where {T <: SupportedTimeSeriesTypes} + component_id = get_uuid(component) + container = get_time_series_container(component) + if isnothing(container) + throw(ArgumentError("type $T does not support storing time series")) + end + + add_time_series!(container, time_series; skip_if_present = skip_if_present) + @debug "Added $time_series to $(typeof(component)) $(component_id) " * + "num_time_series=$(length(get_time_series_container(component).data))." _group = + LOG_GROUP_TIME_SERIES +end + +""" +Removes the metadata for a time_series. +If this returns true then the caller must also remove the actual time series data. +""" +function remove_time_series_metadata!( + component::SupportedTimeSeriesTypes, + ::Type{T}, + name::AbstractString, +) where {T <: TimeSeriesMetadata} + container = get_time_series_container(component) + remove_time_series!(container, T, name) + @debug "Removed time_series from $(get_name(component)): $name." _group = + LOG_GROUP_TIME_SERIES + if T <: DeterministicMetadata && + has_time_series_internal(container, SingleTimeSeriesMetadata, name) + return false + elseif T <: SingleTimeSeriesMetadata && + has_time_series_internal(container, DeterministicMetadata, name) + return false + end + + return true +end + +function clear_time_series!(component::SupportedTimeSeriesTypes) + container = get_time_series_container(component) + if !isnothing(container) + clear_time_series!(container) + @debug "Cleared time_series in $(get_name(component))." _group = + LOG_GROUP_TIME_SERIES + end + return +end + +function _get_columns(start_time, count, ts_metadata::ForecastMetadata) + offset = start_time - get_initial_timestamp(ts_metadata) + interval = time_period_conversion(get_interval(ts_metadata)) + window_count = get_count(ts_metadata) + if window_count > 1 + index = Int(offset / interval) + 1 + else + index = 1 + end + if count === nothing + count = window_count - index + 1 + end + + if index + count - 1 > get_count(ts_metadata) + throw( + ArgumentError( + "The requested start_time $start_time and count $count are invalid", + ), + ) + end + return UnitRange(index, index + count - 1) +end + +_get_columns(start_time, count, ts_metadata::StaticTimeSeriesMetadata) = UnitRange(1, 1) + +function _get_rows(start_time, len, ts_metadata::StaticTimeSeriesMetadata) + index = + Int( + (start_time - get_initial_timestamp(ts_metadata)) / get_resolution(ts_metadata), + ) + 1 + if len === nothing + len = length(ts_metadata) - index + 1 + end + if index + len - 1 > length(ts_metadata) + throw( + ArgumentError( + "The requested index=$index len=$len exceeds the range $(length(ts_metadata))", + ), + ) + end + + return UnitRange(index, index + len - 1) +end + +function _get_rows(start_time, len, ts_metadata::ForecastMetadata) + if len === nothing + len = get_horizon(ts_metadata) + end + + return UnitRange(1, len) +end + +function _check_start_time(start_time, ts_metadata::TimeSeriesMetadata) + if start_time === nothing + return get_initial_timestamp(ts_metadata) + end + + time_diff = start_time - get_initial_timestamp(ts_metadata) + if time_diff < Dates.Second(0) + throw( + ArgumentError( + "start_time=$start_time is earlier than $(get_initial_timestamp(ts_metadata))", + ), + ) + end + + if typeof(ts_metadata) <: ForecastMetadata + window_count = get_count(ts_metadata) + interval = get_interval(ts_metadata) + if window_count > 1 && + Dates.Millisecond(time_diff) % Dates.Millisecond(interval) != Dates.Second(0) + throw( + ArgumentError( + "start_time=$start_time is not on a multiple of interval=$interval", + ), + ) + end + end + + return start_time +end + +""" +Return a time series corresponding to the given parameters. + +# Arguments + + - `::Type{T}`: Concrete subtype of TimeSeriesData to return + - `component::SupportedTimeSeriesTypes`: Component containing the time series + - `name::AbstractString`: name of time series + - `start_time::Union{Nothing, Dates.DateTime} = nothing`: If nothing, use the + `initial_timestamp` of the time series. If T is a subtype of Forecast then `start_time` + must be the first timstamp of a window. + - `len::Union{Nothing, Int} = nothing`: Length in the time dimension. If nothing, use the + entire length. + - `count::Union{Nothing, Int} = nothing`: Only applicable to subtypes of Forecast. Number + of forecast windows starting at `start_time` to return. Defaults to all available. +""" +function get_time_series( + ::Type{T}, + component::SupportedTimeSeriesTypes, + name::AbstractString; + start_time::Union{Nothing, Dates.DateTime} = nothing, + len::Union{Nothing, Int} = nothing, + count::Union{Nothing, Int} = nothing, +) where {T <: TimeSeriesData} + if !has_time_series(component) + throw(ArgumentError("no forecasts are stored in $component")) + end + + metadata_type = time_series_data_to_metadata(T) + ts_metadata = get_time_series_metadata(metadata_type, component, name) + start_time = _check_start_time(start_time, ts_metadata) + rows = _get_rows(start_time, len, ts_metadata) + columns = _get_columns(start_time, count, ts_metadata) + storage = _get_time_series_storage(component) + return deserialize_time_series(T, storage, ts_metadata, rows, columns) +end + +function get_time_series_uuid( + ::Type{T}, + component::SupportedTimeSeriesTypes, + name::AbstractString, +) where {T <: TimeSeriesData} + metadata_type = time_series_data_to_metadata(T) + metadata = get_time_series_metadata(metadata_type, component, name) + return get_time_series_uuid(metadata) +end + +function get_time_series_metadata( + ::Type{T}, + component::SupportedTimeSeriesTypes, + name::AbstractString, +) where {T <: TimeSeriesMetadata} + return get_time_series_metadata(T, get_time_series_container(component), name) +end + +""" +Return a TimeSeries.TimeArray from storage for the given time series parameters. + +If the data are scaling factors then the stored scaling_factor_multiplier will be called on +the component and applied to the data unless ignore_scaling_factors is true. +""" +function get_time_series_array( + ::Type{T}, + component::SupportedTimeSeriesTypes, + name::AbstractString; + start_time::Union{Nothing, Dates.DateTime} = nothing, + len::Union{Nothing, Int} = nothing, + ignore_scaling_factors = false, +) where {T <: TimeSeriesData} + ts = get_time_series(T, component, name; start_time = start_time, len = len, count = 1) + if start_time === nothing + start_time = get_initial_timestamp(ts) + end + + return get_time_series_array( + component, + ts, + start_time; + len = len, + ignore_scaling_factors = ignore_scaling_factors, + ) +end + +""" +Return a TimeSeries.TimeArray for one forecast window from a cached Forecast instance. + +If the data are scaling factors then the stored scaling_factor_multiplier will be called on +the component and applied to the data unless ignore_scaling_factors is true. + +See also [`ForecastCache`](@ref). +""" +function get_time_series_array( + component::SupportedTimeSeriesTypes, + forecast::Forecast, + start_time::Dates.DateTime; + len = nothing, + ignore_scaling_factors = false, +) + return _make_time_array(component, forecast, start_time, len, ignore_scaling_factors) +end + +""" +Return a TimeSeries.TimeArray from a cached StaticTimeSeries instance. + +If the data are scaling factors then the stored scaling_factor_multiplier will be called on +the component and applied to the data unless ignore_scaling_factors is true. + +See also [`StaticTimeSeriesCache`](@ref). +""" +function get_time_series_array( + component::SupportedTimeSeriesTypes, + time_series::StaticTimeSeries, + start_time::Union{Nothing, Dates.DateTime} = nothing; + len::Union{Nothing, Int} = nothing, + ignore_scaling_factors = false, +) + if start_time === nothing + start_time = get_initial_timestamp(time_series) + end + + if len === nothing + len = length(time_series) + end + + return _make_time_array(component, time_series, start_time, len, ignore_scaling_factors) +end + +""" +Return a vector of timestamps from storage for the given time series parameters. +""" +function get_time_series_timestamps( + ::Type{T}, + component::SupportedTimeSeriesTypes, + name::AbstractString; + start_time::Union{Nothing, Dates.DateTime} = nothing, + len::Union{Nothing, Int} = nothing, +) where {T <: TimeSeriesData} + return TimeSeries.timestamp( + get_time_series_array(T, component, name; start_time = start_time, len = len), + ) +end + +""" +Return a vector of timestamps from a cached Forecast instance. +""" +function get_time_series_timestamps( + component::SupportedTimeSeriesTypes, + forecast::Forecast, + start_time::Union{Nothing, Dates.DateTime} = nothing; + len::Union{Nothing, Int} = nothing, +) + return TimeSeries.timestamp( + get_time_series_array(component, forecast, start_time; len = len), + ) +end + +""" +Return a vector of timestamps from a cached StaticTimeSeries instance. +""" +function get_time_series_timestamps( + component::SupportedTimeSeriesTypes, + time_series::StaticTimeSeries, + start_time::Union{Nothing, Dates.DateTime} = nothing; + len::Union{Nothing, Int} = nothing, +) + return TimeSeries.timestamp( + get_time_series_array(component, time_series, start_time; len = len), + ) +end + +""" +Return an Array of values from storage for the requested time series parameters. + +If the data size is small and this will be called many times, consider using the version +that accepts a cached TimeSeriesData instance. +""" +function get_time_series_values( + ::Type{T}, + component::SupportedTimeSeriesTypes, + name::AbstractString; + start_time::Union{Nothing, Dates.DateTime} = nothing, + len::Union{Nothing, Int} = nothing, + ignore_scaling_factors = false, +) where {T <: TimeSeriesData} + return TimeSeries.values( + get_time_series_array( + T, + component, + name; + start_time = start_time, + len = len, + ignore_scaling_factors = ignore_scaling_factors, + ), + ) +end + +""" +Return an Array of values for one forecast window from a cached Forecast instance. +""" +function get_time_series_values( + component::SupportedTimeSeriesTypes, + forecast::Forecast, + start_time::Dates.DateTime; + len::Union{Nothing, Int} = nothing, + ignore_scaling_factors = false, +) + return TimeSeries.values( + get_time_series_array( + component, + forecast, + start_time; + len = len, + ignore_scaling_factors = ignore_scaling_factors, + ), + ) +end + +""" +Return an Array of values from a cached StaticTimeSeries instance for the requested time +series parameters. +""" +function get_time_series_values( + component::SupportedTimeSeriesTypes, + time_series::StaticTimeSeries, + start_time::Union{Nothing, Dates.DateTime} = nothing; + len::Union{Nothing, Int} = nothing, + ignore_scaling_factors = false, +) + return TimeSeries.values( + get_time_series_array( + component, + time_series, + start_time; + len = len, + ignore_scaling_factors = ignore_scaling_factors, + ), + ) +end + +function _make_time_array(component, time_series, start_time, len, ignore_scaling_factors) + ta = make_time_array(time_series, start_time; len = len) + if ignore_scaling_factors + return ta + end + + multiplier = get_scaling_factor_multiplier(time_series) + if multiplier === nothing + return ta + end + + return ta .* multiplier(component) +end + +""" +Return true if the component has time series data. +""" +function has_time_series(component::SupportedTimeSeriesTypes) + container = get_time_series_container(component) + return !isnothing(container) && !isempty(container) +end + +""" +Return true if the component has time series data of type T. +""" +function has_time_series( + component::SupportedTimeSeriesTypes, + ::Type{T}, +) where {T <: TimeSeriesData} + container = get_time_series_container(component) + if container === nothing + return false + end + + for key in keys(container.data) + if isabstracttype(T) + if is_time_series_sub_type(key.time_series_type, T) + return true + end + elseif time_series_data_to_metadata(T) <: key.time_series_type + return true + end + end + + return false +end + +function has_time_series( + component::SupportedTimeSeriesTypes, + type::Type{<:TimeSeriesMetadata}, + name::AbstractString, +) + container = get_time_series_container(component) + container === nothing && return false + return has_time_series_internal(container, type, name) +end + +""" +Efficiently add all time_series in one component to another by copying the underlying +references. + +# Arguments + + - `dst::SupportedTimeSeriesTypes`: Destination component + - `src::SupportedTimeSeriesTypes`: Source component + - `name_mapping::Dict = nothing`: Optionally map src names to different dst names. + If provided and src has a time_series with a name not present in name_mapping, that + time_series will not copied. If name_mapping is nothing then all time_series will be + copied with src's names. + - `scaling_factor_multiplier_mapping::Dict = nothing`: Optionally map src multipliers to + different dst multipliers. If provided and src has a time_series with a multiplier not + present in scaling_factor_multiplier_mapping, that time_series will not copied. If + scaling_factor_multiplier_mapping is nothing then all time_series will be copied with + src's multipliers. +""" +function copy_time_series!( + dst::SupportedTimeSeriesTypes, + src::SupportedTimeSeriesTypes; + name_mapping::Union{Nothing, Dict{Tuple{String, String}, String}} = nothing, + scaling_factor_multiplier_mapping::Union{Nothing, Dict{String, String}} = nothing, +) + storage = _get_time_series_storage(dst) + if isnothing(storage) + throw( + ArgumentError( + "Component does not have time series storage. " * + "It may not be attached to the system.", + ), + ) + end + + # There may be time series that share time series arrays as a result of + # transform_single_time_series! being called. + # Don't add these references to the storage more than once. + refs = Set{Tuple{String, Base.UUID}}() + + for ts_metadata in get_time_series_multiple(TimeSeriesMetadata, src) + name = get_name(ts_metadata) + new_name = name + if !isnothing(name_mapping) + new_name = get(name_mapping, (get_name(src), name), nothing) + if isnothing(new_name) + @debug "Skip copying ts_metadata" _group = LOG_GROUP_TIME_SERIES name + continue + end + @debug "Copy ts_metadata with" _group = LOG_GROUP_TIME_SERIES new_name + end + multiplier = get_scaling_factor_multiplier(ts_metadata) + new_multiplier = multiplier + if !isnothing(scaling_factor_multiplier_mapping) + new_multiplier = get(scaling_factor_multiplier_mapping, multiplier, nothing) + if isnothing(new_multiplier) + @debug "Skip copying ts_metadata" _group = LOG_GROUP_TIME_SERIES multiplier + continue + end + @debug "Copy ts_metadata with" _group = LOG_GROUP_TIME_SERIES new_multiplier + end + new_time_series = deepcopy(ts_metadata) + assign_new_uuid!(new_time_series) + set_name!(new_time_series, new_name) + set_scaling_factor_multiplier!(new_time_series, new_multiplier) + add_time_series!(dst, new_time_series) + ts_uuid = get_time_series_uuid(new_time_series) + ref = (new_name, ts_uuid) + if !in(ref, refs) + add_time_series_reference!(storage, get_uuid(dst), new_name, ts_uuid) + push!(refs, ref) + end + end +end + +function get_time_series_keys(component::SupportedTimeSeriesTypes) + return keys(get_time_series_container(component).data) +end + +function list_time_series_metadata(component::SupportedTimeSeriesTypes) + return collect(values(get_time_series_container(component).data)) +end + +function get_time_series_names( + ::Type{T}, + component::SupportedTimeSeriesTypes, +) where {T <: TimeSeriesData} + return get_time_series_names( + time_series_data_to_metadata(T), + get_time_series_container(component), + ) +end + +function get_num_time_series(component::SupportedTimeSeriesTypes) + container = get_time_series_container(component) + if isnothing(container) + return (0, 0) + end + + static_ts_count = 0 + forecast_count = 0 + for key in keys(container.data) + if key.time_series_type <: StaticTimeSeriesMetadata + static_ts_count += 1 + elseif key.time_series_type <: ForecastMetadata + forecast_count += 1 + else + error("panic") + end + end + + return (static_ts_count, forecast_count) +end + +function get_num_time_series_by_type(component::SupportedTimeSeriesTypes) + counts = Dict{String, Int}() + container = get_time_series_container(component) + if isnothing(container) + return counts + end + + for metadata in values(container.data) + type = string(nameof(time_series_metadata_to_data(metadata))) + if haskey(counts, type) + counts[type] += 1 + else + counts[type] = 1 + end + end + + return counts +end + +function get_time_series( + component::SupportedTimeSeriesTypes, + time_series::TimeSeriesData, +) + storage = _get_time_series_storage(component) + return get_time_series(storage, get_time_series_uuid(time_series)) +end + +function get_time_series_uuids(component::SupportedTimeSeriesTypes) + container = get_time_series_container(component) + + return [ + (get_time_series_uuid(container.data[key]), key.name) for + key in get_time_series_keys(component) + ] +end + +function attach_time_series_and_serialize!( + data::SystemData, + component::SupportedTimeSeriesTypes, + ts_metadata::T, + ts::TimeSeriesData; + skip_if_present = false, +) where {T <: TimeSeriesMetadata} + check_add_time_series(data.time_series_params, ts) + check_read_only(data.time_series_storage) + if has_time_series(component, T, get_name(ts)) + skip_if_present && return + throw(ArgumentError("time_series $(typeof(ts)) $(get_name(ts)) is already stored")) + end + + serialize_time_series!( + data.time_series_storage, + get_uuid(component), + get_name(ts_metadata), + ts, + ) + add_time_series!(component, ts_metadata; skip_if_present = skip_if_present) + # Order is important. Set this last in case exceptions are thrown at previous steps. + set_parameters!(data.time_series_params, ts) + return +end diff --git a/src/time_series_parameters.jl b/src/time_series_parameters.jl index 43747031c..2bf6e79da 100644 --- a/src/time_series_parameters.jl +++ b/src/time_series_parameters.jl @@ -10,10 +10,10 @@ mutable struct ForecastParameters <: InfrastructureSystemsType end function ForecastParameters(; - horizon=UNINITIALIZED_LENGTH, - initial_timestamp=UNINITIALIZED_DATETIME, - interval=UNINITIALIZED_PERIOD, - count=UNINITIALIZED_LENGTH, + horizon = UNINITIALIZED_LENGTH, + initial_timestamp = UNINITIALIZED_DATETIME, + interval = UNINITIALIZED_PERIOD, + count = UNINITIALIZED_LENGTH, ) return ForecastParameters(horizon, initial_timestamp, interval, count) end @@ -87,22 +87,22 @@ mutable struct TimeSeriesParameters <: InfrastructureSystemsType end function TimeSeriesParameters(; - resolution=UNINITIALIZED_PERIOD, - forecast_params=ForecastParameters(), + resolution = UNINITIALIZED_PERIOD, + forecast_params = ForecastParameters(), ) return TimeSeriesParameters(resolution, forecast_params) end function TimeSeriesParameters(ts::StaticTimeSeries) - return TimeSeriesParameters(resolution=get_resolution(ts)) + return TimeSeriesParameters(; resolution = get_resolution(ts)) end function TimeSeriesParameters(ts::Forecast) - forecast_params = ForecastParameters( - count=get_count(ts), - horizon=get_horizon(ts), - initial_timestamp=get_initial_timestamp(ts), - interval=get_interval(ts), + forecast_params = ForecastParameters(; + count = get_count(ts), + horizon = get_horizon(ts), + initial_timestamp = get_initial_timestamp(ts), + interval = get_interval(ts), ) return TimeSeriesParameters(get_resolution(ts), forecast_params) end @@ -131,11 +131,11 @@ function TimeSeriesParameters( Dates.Millisecond(last_initial_time - initial_timestamp) / Dates.Millisecond(interval) + 1 end - fparams = ForecastParameters( - horizon=horizon, - initial_timestamp=initial_timestamp, - interval=interval, - count=count, + fparams = ForecastParameters(; + horizon = horizon, + initial_timestamp = initial_timestamp, + interval = interval, + count = count, ) return TimeSeriesParameters(resolution, fparams) end diff --git a/src/time_series_parser.jl b/src/time_series_parser.jl index 9962559fd..ccfa5f31d 100644 --- a/src/time_series_parser.jl +++ b/src/time_series_parser.jl @@ -40,7 +40,7 @@ mutable struct TimeSeriesFileMetadata end function TimeSeriesFileMetadata(; - simulation="", + simulation = "", category, component_name, name, @@ -50,8 +50,8 @@ function TimeSeriesFileMetadata(; percentiles, time_series_type_module, time_series_type, - scaling_factor_multiplier=nothing, - scaling_factor_multiplier_module=nothing, + scaling_factor_multiplier = nothing, + scaling_factor_multiplier_module = nothing, ) return TimeSeriesFileMetadata( simulation, @@ -91,23 +91,23 @@ function read_time_series_file_metadata(file_path::AbstractString) push!( metadata, TimeSeriesFileMetadata(; - simulation=simulation, - category=item["category"], - component_name=item["component_name"], - name=item["name"], - normalization_factor=normalization_factor, - data_file=item["data_file"], - resolution=parsed_resolution, + simulation = simulation, + category = item["category"], + component_name = item["component_name"], + name = item["name"], + normalization_factor = normalization_factor, + data_file = item["data_file"], + resolution = parsed_resolution, # Use default values until CDM data is updated. - percentiles=get(item, "percentiles", []), - time_series_type_module=get( + percentiles = get(item, "percentiles", []), + time_series_type_module = get( item, "module", "InfrastructureSystems", ), - time_series_type=get(item, "type", "SingleTimeSeries"), - scaling_factor_multiplier=scaling_factor_multiplier, - scaling_factor_multiplier_module=scaling_factor_multiplier_module, + time_series_type = get(item, "type", "SingleTimeSeries"), + scaling_factor_multiplier = scaling_factor_multiplier, + scaling_factor_multiplier_module = scaling_factor_multiplier_module, ), ) end @@ -125,18 +125,18 @@ function read_time_series_file_metadata(file_path::AbstractString) push!( metadata, TimeSeriesFileMetadata(; - simulation=simulation, - category=row.category, - component_name=row.component_name, - name=row.name, - resolution=Dates.Millisecond(Dates.Second(row.resolution)), - normalization_factor=row.normalization_factor, - data_file=row.data_file, - percentiles=[], - time_series_type_module=get(row, :module, "InfrastructureSystems"), - time_series_type=get(row, :type, "SingleTimeSeries"), - scaling_factor_multiplier=scaling_factor_multiplier, - scaling_factor_multiplier_module=scaling_factor_multiplier_module, + simulation = simulation, + category = row.category, + component_name = row.component_name, + name = row.name, + resolution = Dates.Millisecond(Dates.Second(row.resolution)), + normalization_factor = row.normalization_factor, + data_file = row.data_file, + percentiles = [], + time_series_type_module = get(row, :module, "InfrastructureSystems"), + time_series_type = get(row, :type, "SingleTimeSeries"), + scaling_factor_multiplier = scaling_factor_multiplier, + scaling_factor_multiplier_module = scaling_factor_multiplier_module, ), ) end @@ -210,7 +210,7 @@ struct TimeSeriesParsedInfo percentiles, file_path, resolution, - scaling_factor_multiplier=nothing, + scaling_factor_multiplier = nothing, ) return new( simulation, @@ -283,12 +283,12 @@ end function make_time_array(raw::RawTimeSeries, component_name, resolution) series_length = raw.length ini_time = raw.initial_time - timestamps = range(ini_time; length=series_length, step=resolution) + timestamps = range(ini_time; length = series_length, step = resolution) return TimeSeries.TimeArray(timestamps, raw.data[component_name]) end struct TimeSeriesParsingCache - time_series_infos::Vector{TimeSeriesParsedInfo} + time_series_attributes::Vector{TimeSeriesParsedInfo} data_files::Dict{String, RawTimeSeries} end diff --git a/src/time_series_storage.jl b/src/time_series_storage.jl index 4fdb1ed81..c00b3a48c 100644 --- a/src/time_series_storage.jl +++ b/src/time_series_storage.jl @@ -38,26 +38,27 @@ struct CompressionSettings end function CompressionSettings(; - enabled=DEFAULT_COMPRESSION, - type=CompressionTypes.DEFLATE, - level=3, - shuffle=true, + enabled = DEFAULT_COMPRESSION, + type = CompressionTypes.DEFLATE, + level = 3, + shuffle = true, ) return CompressionSettings(enabled, type, level, shuffle) end function make_time_series_storage(; - in_memory=false, - filename=nothing, - directory=nothing, - compression=CompressionSettings(), + in_memory = false, + filename = nothing, + directory = nothing, + compression = CompressionSettings(), ) if in_memory storage = InMemoryTimeSeriesStorage() elseif !isnothing(filename) - storage = Hdf5TimeSeriesStorage(; filename=filename, compression=compression) + storage = Hdf5TimeSeriesStorage(; filename = filename, compression = compression) else - storage = Hdf5TimeSeriesStorage(true; directory=directory, compression=compression) + storage = + Hdf5TimeSeriesStorage(true; directory = directory, compression = compression) end return storage diff --git a/src/utils/flatten_iterator_wrapper.jl b/src/utils/flatten_iterator_wrapper.jl index efa39c4e4..9028bb229 100644 --- a/src/utils/flatten_iterator_wrapper.jl +++ b/src/utils/flatten_iterator_wrapper.jl @@ -14,7 +14,7 @@ end Base.@propagate_inbounds function Base.iterate( iter::FlattenIteratorWrapper{T, I}, - state=(), + state = (), ) where {T, I} Base.iterate(iter.iter, state) end diff --git a/src/utils/generate_struct_files.jl b/src/utils/generate_struct_files.jl index 9eadea661..64f1b625d 100644 --- a/src/utils/generate_struct_files.jl +++ b/src/utils/generate_struct_files.jl @@ -45,14 +45,14 @@ Construct a StructField for code auto-generation purposes. function StructField(; name, data_type, - default=nothing, - comment="", - needs_conversion=false, - exclude_setter=false, - valid_range=nothing, - validation_action=nothing, - null_value=nothing, - internal_default=nothing, + default = nothing, + comment = "", + needs_conversion = false, + exclude_setter = false, + valid_range = nothing, + validation_action = nothing, + null_value = nothing, + internal_default = nothing, ) if !isnothing(valid_range) && valid_range isa Dict diff = setdiff(keys(valid_range), ("min", "max")) @@ -111,9 +111,9 @@ Construct a StructDefinition for code auto-generation purposes. function StructDefinition(; struct_name, fields, - supertype=nothing, - docstring="", - is_component=true, + supertype = nothing, + docstring = "", + is_component = true, ) if supertype isa DataType supertype = string(DataType) @@ -123,12 +123,12 @@ function StructDefinition(; if !any(x -> endswith(x.data_type, "InfrastructureSystemsInternal"), fields) push!( fields, - StructField( - name="internal", - data_type="InfrastructureSystemsInternal", - comment="Internal reference, do not modify.", - internal_default="InfrastructureSystemsInternal()", - exclude_setter=true, + StructField(; + name = "internal", + data_type = "InfrastructureSystemsInternal", + comment = "Internal reference, do not modify.", + internal_default = "InfrastructureSystemsInternal()", + exclude_setter = true, ), ) @info "Added InfrastructureSystemsInternal to component struct $struct_name." @@ -166,13 +166,13 @@ Refer to `StructDefinition` and `StructField` for descriptions of the available """ function generate_struct_file( definition::StructDefinition; - filename=nothing, - output_directory=nothing, + filename = nothing, + output_directory = nothing, ) generate_struct_files( - [definition], - filename=filename, - output_directory=output_directory, + [definition]; + filename = filename, + output_directory = output_directory, ) end @@ -190,7 +190,7 @@ Refer to `StructDefinition` and `StructField` for descriptions of the available - `output_directory::AbstractString`: Generate the files in this directory. Defaults to `src/generated` """ -function generate_struct_files(definitions; filename=nothing, output_directory=nothing) +function generate_struct_files(definitions; filename = nothing, output_directory = nothing) if isnothing(filename) filename = joinpath( dirname(Base.find_package("InfrastructureSystems")), @@ -224,7 +224,7 @@ function generate_struct_files(definitions; filename=nothing, output_directory=n end open(filename, "w") do io - JSON3.pretty(io, data, JSON3.AlignmentContext(indent=2)) + JSON3.pretty(io, data, JSON3.AlignmentContext(; indent = 2)) end @info "Added $(length(definitions)) structs to $filename" diff --git a/src/utils/generate_structs.jl b/src/utils/generate_structs.jl index 32f108f0a..05d74cf6d 100644 --- a/src/utils/generate_structs.jl +++ b/src/utils/generate_structs.jl @@ -90,7 +90,7 @@ function read_json_data(filename::String) end end -function generate_structs(directory, data::Vector; print_results=true) +function generate_structs(directory, data::Vector; print_results = true) struct_names = Vector{String}() unique_accessor_functions = Set{String}() unique_setter_functions = Set{String}() @@ -232,7 +232,7 @@ end function generate_structs( input_file::AbstractString, output_directory::AbstractString; - print_results=true, + print_results = true, ) # Include each generated file. if !isdir(output_directory) @@ -240,7 +240,7 @@ function generate_structs( end data = read_json_data(input_file) - generate_structs(output_directory, data, print_results=print_results) + generate_structs(output_directory, data; print_results = print_results) return end @@ -251,11 +251,11 @@ from descriptor_file. function test_generated_structs(descriptor_file, existing_dir) output_dir = "tmp-test-generated-structs" if isdir(output_dir) - rm(output_dir; recursive=true) + rm(output_dir; recursive = true) end mkdir(output_dir) - generate_structs(descriptor_file, output_dir; print_results=false) + generate_structs(descriptor_file, output_dir; print_results = false) matched = true for (file1, file2) in zip(readdir(output_dir), readdir(existing_dir)) @@ -275,6 +275,6 @@ function test_generated_structs(descriptor_file, existing_dir) end end - rm(output_dir; recursive=true) + rm(output_dir; recursive = true) return matched end diff --git a/src/utils/logging.jl b/src/utils/logging.jl index d9513bb7c..0d8345598 100644 --- a/src/utils/logging.jl +++ b/src/utils/logging.jl @@ -61,11 +61,11 @@ Returns a summary of log event counts by level. function report_log_summary(tracker::LogEventTracker) text = "\nLog message summary:\n" # Order by criticality. - for level in sort!(collect(keys(tracker.events)), rev=true) + for level in sort!(collect(keys(tracker.events)); rev = true) num_events = length(tracker.events[level]) text *= "\n$num_events $level events:\n" for event in - sort!(collect(get_log_events(tracker, level)), by=x -> x.count, rev=true) + sort!(collect(get_log_events(tracker, level)); by = x -> x.count, rev = true) text *= " count=$(event.count) at $(event.file):$(event.line)\n" text *= " example message=\"$(event.message)\"\n" if event.suppressed > 0 @@ -144,8 +144,8 @@ function LoggingConfiguration(config_filename) return LoggingConfiguration(; Dict(Symbol(k) => v for (k, v) in config)...) end -function make_logging_config_file(filename="logging_config.toml"; force=false) - cp(SIIP_LOGGING_CONFIG_FILENAME, filename, force=force) +function make_logging_config_file(filename = "logging_config.toml"; force = false) + cp(SIIP_LOGGING_CONFIG_FILENAME, filename; force = force) println("Created $filename") return end @@ -160,7 +160,7 @@ LogEventTracker() LogEventTracker((Logging.Info, Logging.Warn, Logging.Error)) ``` """ -function LogEventTracker(levels=(Logging.Info, Logging.Warn, Logging.Error)) +function LogEventTracker(levels = (Logging.Info, Logging.Warn, Logging.Error)) return LogEventTracker(Dict(l => Dict{Symbol, LogEvent}() for l in levels)) end @@ -201,28 +201,28 @@ logger = configure_logging(filename="mylog.txt") ``` """ function configure_logging(; - console=true, - console_stream=stderr, - console_level=Logging.Error, - progress=true, - file=true, - filename="log.txt", - file_level=Logging.Info, - file_mode="w+", - tracker=LogEventTracker(), - set_global=true, + console = true, + console_stream = stderr, + console_level = Logging.Error, + progress = true, + file = true, + filename = "log.txt", + file_level = Logging.Info, + file_mode = "w+", + tracker = LogEventTracker(), + set_global = true, ) - config = LoggingConfiguration( - console=console, - console_stream=console_stream, - console_level=console_level, - progress=progress, - file=file, - filename=filename, - file_level=file_level, - file_mode=file_mode, - tracker=tracker, - set_global=set_global, + config = LoggingConfiguration(; + console = console, + console_stream = console_stream, + console_level = console_level, + progress = progress, + file = file, + filename = filename, + file_level = file_level, + file_mode = file_mode, + tracker = tracker, + set_global = set_global, ) return configure_logging(config) end @@ -288,7 +288,7 @@ function Logging.handle_message( id, file, line; - maxlog=nothing, + maxlog = nothing, kwargs..., ) return Logging.handle_message( @@ -300,7 +300,7 @@ function Logging.handle_message( id, file, line; - maxlog=maxlog, + maxlog = maxlog, kwargs..., ) end @@ -327,7 +327,12 @@ open_file_logger("log.txt", Logging.Info) do logger end ``` """ -function open_file_logger(func::Function, filename::String, level=Logging.Info, mode="w+") +function open_file_logger( + func::Function, + filename::String, + level = Logging.Info, + mode = "w+", +) stream = open(filename, mode) try logger = FileLogger(stream, level) @@ -511,8 +516,8 @@ function Logging.handle_message( id, file, line; - maxlog=nothing, - _suppression_period=nothing, + maxlog = nothing, + _suppression_period = nothing, kwargs..., ) suppressed, num_suppressed = @@ -539,7 +544,7 @@ function Logging.handle_message( id, file, line; - maxlog=maxlog, + maxlog = maxlog, kwargs..., ) end diff --git a/src/utils/print.jl b/src/utils/print.jl index bf8937462..8a78e9e00 100644 --- a/src/utils/print.jl +++ b/src/utils/print.jl @@ -3,13 +3,13 @@ const MAX_SHOW_COMPONENTS = 10 const MAX_SHOW_FORECASTS = 10 const MAX_SHOW_FORECAST_INITIAL_TIMES = 1 -function Base.summary(components::Components) - return "$(typeof(components)): $(get_num_components(components))" +function Base.summary(container::InfrastructureSystemsContainer) + return "$(typeof(container)): $(get_num_members(container))" end -function Base.show(io::IO, components::Components) +function Base.show(io::IO, container::InfrastructureSystemsContainer) i = 1 - for component in iterate_components(components) + for component in iterate_container(container) if i <= MAX_SHOW_COMPONENTS show(io, component) println(io) @@ -19,18 +19,18 @@ function Base.show(io::IO, components::Components) if i > MAX_SHOW_COMPONENTS num = i - MAX_SHOW_COMPONENTS - println(io, "\n***Omitted $num components***\n") + println(io, "\n***Omitted $num $(get_display_string(container))***\n") end end -function Base.show(io::IO, ::MIME"text/plain", components::Components) - num_components = get_num_components(components) - println(io, "Components") +function Base.show(io::IO, ::MIME"text/plain", container::InfrastructureSystemsContainer) + num_components = get_num_members(container) + println(io, "$(get_display_string(container))") println(io, "==========") println(io, "Num components: $num_components") if num_components > 0 println(io) - show_components_table(io, components, backend=Val(:auto)) + show_components_table(io, container; backend = Val(:auto)) end end @@ -39,7 +39,7 @@ function Base.show(io::IO, ::MIME"text/html", components::Components) println(io, "

Components

") println(io, "

Num components: $num_components

") if num_components > 0 - show_components_table(io, components, backend=Val(:html), standalone=false) + show_components_table(io, components; backend = Val(:html), standalone = false) end end @@ -71,14 +71,14 @@ end function Base.show(io::IO, ::MIME"text/plain", data::SystemData) show(io, MIME"text/plain"(), data.components) println(io, "\n") - show_time_series_data(io, data, backend=Val(:auto)) + show_time_series_data(io, data; backend = Val(:auto)) show(io, data.time_series_params) end function Base.show(io::IO, ::MIME"text/html", data::SystemData) show(io, MIME"text/html"(), data.components) println(io, "\n") - show_time_series_data(io, data, backend=Val(:html), standalone=false) + show_time_series_data(io, data; backend = Val(:html), standalone = false) show(io, data.time_series_params) end @@ -113,9 +113,9 @@ function show_time_series_data(io::IO, data::SystemData; kwargs...) PrettyTables.pretty_table( io, table; - header=header, - title="Time Series Summary", - alignment=:l, + header = header, + title = "Time Series Summary", + alignment = :l, kwargs..., ) return @@ -202,7 +202,7 @@ function show_components_table(io::IO, components::Components; kwargs...) data = Array{Any, 2}(undef, length(components.data), length(header)) type_names = [(strip_module_name(string(x)), x) for x in keys(components.data)] - sort!(type_names, by=x -> x[1]) + sort!(type_names; by = x -> x[1]) for (i, (type_name, type)) in enumerate(type_names) vals = components.data[type] has_sts = false @@ -224,7 +224,7 @@ function show_components_table(io::IO, components::Components; kwargs...) data[i, 4] = has_forecasts end - PrettyTables.pretty_table(io, data; header=header, alignment=:l, kwargs...) + PrettyTables.pretty_table(io, data; header = header, alignment = :l, kwargs...) return end @@ -232,7 +232,7 @@ function show_components( io::IO, components::Components, component_type::Type{<:InfrastructureSystemsComponent}, - additional_columns::Union{Dict, Vector}=[]; + additional_columns::Union{Dict, Vector} = []; kwargs..., ) if !isconcretetype(component_type) @@ -293,7 +293,14 @@ function show_components( end end - PrettyTables.pretty_table(io, data; header=header, title=title, alignment=:l, kwargs...) + PrettyTables.pretty_table( + io, + data; + header = header, + title = title, + alignment = :l, + kwargs..., + ) return end diff --git a/src/utils/recorder_events.jl b/src/utils/recorder_events.jl index a36ddba4f..8d828a733 100644 --- a/src/utils/recorder_events.jl +++ b/src/utils/recorder_events.jl @@ -71,7 +71,12 @@ Construct a Recorder. - `mode = "w"`: Only used when io is nothing. - `directory = "."`: Only used when io is nothing. """ -function Recorder(name::Symbol; io::Union{Nothing, IO}=nothing, mode="w", directory=".") +function Recorder( + name::Symbol; + io::Union{Nothing, IO} = nothing, + mode = "w", + directory = ".", +) if isnothing(io) filename = joinpath(directory, string(name) * ".log") io = open(filename, mode) @@ -102,19 +107,19 @@ handle. """ function register_recorder!( name::Symbol; - io::Union{Nothing, IO}=nothing, - mode="w", - directory=".", + io::Union{Nothing, IO} = nothing, + mode = "w", + directory = ".", ) unregister_recorder!(name) - g_recorders[name] = Recorder(name; io=io, mode=mode, directory=directory) + g_recorders[name] = Recorder(name; io = io, mode = mode, directory = directory) @debug "registered new Recorder" _group = LOG_GROUP_RECORDER name end """ Unregister the recorder with this name and stop recording events. """ -function unregister_recorder!(name::Symbol; close_io=true) +function unregister_recorder!(name::Symbol; close_io = true) if haskey(g_recorders, name) @debug "unregister Recorder" _group = LOG_GROUP_RECORDER name recorder = pop!(g_recorders, name) @@ -161,7 +166,7 @@ Return the events of type T in filename. function list_recorder_events( ::Type{T}, filename::AbstractString, - filter_func::Union{Nothing, Function}=nothing, + filter_func::Union{Nothing, Function} = nothing, ) where {T <: AbstractRecorderEvent} events = Vector{T}() for line in eachline(filename) @@ -207,7 +212,7 @@ show_recorder_events(TestEvent, test_recorder.log, x -> x.val2 > 2) function show_recorder_events( ::Type{T}, filename::AbstractString, - filter_func::Union{Nothing, Function}=nothing; + filter_func::Union{Nothing, Function} = nothing; kwargs..., ) where {T <: AbstractRecorderEvent} return show_recorder_events(stdout, T, filename, filter_func; kwargs...) @@ -217,7 +222,7 @@ function show_recorder_events( io::IO, ::Type{T}, filename::AbstractString, - filter_func::Union{Nothing, Function}=nothing; + filter_func::Union{Nothing, Function} = nothing; kwargs..., ) where {T <: AbstractRecorderEvent} events = list_recorder_events(T, filename, filter_func) @@ -233,7 +238,7 @@ end function show_recorder_events( io::IO, events::Vector{T}; - exclude_columns=Set{String}(), + exclude_columns = Set{String}(), kwargs..., ) where {T <: AbstractRecorderEvent} if isempty(events) @@ -267,6 +272,6 @@ function show_recorder_events( end end - PrettyTables.pretty_table(io, data; header=header, kwargs...) + PrettyTables.pretty_table(io, data; header = header, kwargs...) return end diff --git a/src/utils/test.jl b/src/utils/test.jl index a63ff3d1a..d5570e336 100644 --- a/src/utils/test.jl +++ b/src/utils/test.jl @@ -3,6 +3,7 @@ mutable struct TestComponent <: InfrastructureSystemsComponent name::String val::Int time_series_container::TimeSeriesContainer + attributes_container::SupplementalAttributesContainer internal::InfrastructureSystemsInternal end @@ -10,11 +11,18 @@ mutable struct AdditionalTestComponent <: InfrastructureSystemsComponent name::String val::Int time_series_container::TimeSeriesContainer + attributes_container::SupplementalAttributesContainer internal::InfrastructureSystemsInternal end function TestComponent(name, val) - return TestComponent(name, val, TimeSeriesContainer(), InfrastructureSystemsInternal()) + return TestComponent( + name, + val, + TimeSeriesContainer(), + SupplementalAttributesContainer(), + InfrastructureSystemsInternal(), + ) end function AdditionalTestComponent(name, val) @@ -22,6 +30,7 @@ function AdditionalTestComponent(name, val) name, val, TimeSeriesContainer(), + SupplementalAttributesContainer(), InfrastructureSystemsInternal(), ) end @@ -29,6 +38,10 @@ end get_internal(component::TestComponent) = component.internal get_internal(component::AdditionalTestComponent) = component.internal get_val(component::TestComponent) = component.val +get_supplemental_attributes_container(component::TestComponent) = + component.attributes_container +get_supplemental_attributes_container(component::AdditionalTestComponent) = + component.attributes_container function get_time_series_container(component::TestComponent) return component.time_series_container @@ -44,6 +57,7 @@ function deserialize(::Type{TestComponent}, data::Dict) data["name"], data["val"], deserialize(TimeSeriesContainer, data["time_series_container"]), + SupplementalAttributesContainer(), deserialize(InfrastructureSystemsInternal, data["internal"]), ) end @@ -83,3 +97,28 @@ function runtests(args...) empty!(ARGS) end end + +struct TestSupplemental <: InfrastructureSystemsSupplementalAttribute + value::Float64 + component_uuids::Set{UUIDs.UUID} + internal::InfrastructureSystemsInternal + time_series_container::TimeSeriesContainer +end + +function TestSupplemental(; + value::Float64 = 0.0, + component_uuids::Set{UUIDs.UUID} = Set{UUIDs.UUID}(), +) + return TestSupplemental( + value, + component_uuids, + InfrastructureSystemsInternal(), + TimeSeriesContainer(), + ) +end + +get_value(attr::TestSupplemental) = attr.attr_json +get_internal(attr::TestSupplemental) = attr.internal +get_uuid(attr::TestSupplemental) = get_uuid(get_internal(attr)) +get_component_uuids(attr::TestSupplemental) = attr.component_uuids +get_time_series_container(attr::TestSupplemental) = attr.time_series_container diff --git a/src/utils/utils.jl b/src/utils/utils.jl index 46c9ff188..9f59619f0 100644 --- a/src/utils/utils.jl +++ b/src/utils/utils.jl @@ -49,7 +49,7 @@ end """ Returns an array of all super types of T. """ -function supertypes(::Type{T}, types=[]) where {T} +function supertypes(::Type{T}, types = []) where {T} super = supertype(T) push!(types, super) if super == Any @@ -122,26 +122,27 @@ Recursively compares struct values. Prints all mismatched values to stdout. - `y::T`: Second value - `compare_uuids::Bool = false`: Compare any UUID in the object or composed objects. """ -function compare_values(x::T, y::T; compare_uuids=false) where {T} +function compare_values(x::T, y::T; compare_uuids = false) where {T} match = true fields = fieldnames(T) if isempty(fields) match = x == y else for field_name in fields - if T <: TimeSeriesContainer && field_name == :time_series_storage + if (T <: TimeSeriesContainer || T <: SupplementalAttributes) && + field_name == :time_series_storage # This gets validated at SystemData. Don't repeat for each component. continue end val1 = getfield(x, field_name) val2 = getfield(y, field_name) if !isempty(fieldnames(typeof(val1))) - if !compare_values(val1, val2, compare_uuids=compare_uuids) + if !compare_values(val1, val2; compare_uuids = compare_uuids) @error "values do not match" T field_name val1 val2 match = false end elseif val1 isa AbstractArray - if !compare_values(val1, val2, compare_uuids=compare_uuids) + if !compare_values(val1, val2; compare_uuids = compare_uuids) @error "values do not match" T field_name val1 val2 match = false end @@ -157,15 +158,15 @@ function compare_values(x::T, y::T; compare_uuids=false) where {T} return match end -function compare_values(x::Vector{T}, y::Vector{T}; compare_uuids=false) where {T} +function compare_values(x::Vector{T}, y::Vector{T}; compare_uuids = false) where {T} if length(x) != length(y) @error "lengths do not match" T length(x) length(y) return false end match = true - for i in range(1, length=length(x)) - if !compare_values(x[i], y[i], compare_uuids=compare_uuids) + for i in range(1; length = length(x)) + if !compare_values(x[i], y[i]; compare_uuids = compare_uuids) @error "values do not match" typeof(x[i]) i x[i] y[i] match = false end @@ -174,7 +175,7 @@ function compare_values(x::Vector{T}, y::Vector{T}; compare_uuids=false) where { return match end -function compare_values(x::Dict, y::Dict; compare_uuids=false) +function compare_values(x::Dict, y::Dict; compare_uuids = false) keys_x = Set(keys(x)) keys_y = Set(keys(y)) if keys_x != keys_y @@ -184,7 +185,7 @@ function compare_values(x::Dict, y::Dict; compare_uuids=false) match = true for key in keys_x - if !compare_values(x[key], y[key], compare_uuids=compare_uuids) + if !compare_values(x[key], y[key]; compare_uuids = compare_uuids) @error "values do not match" typeof(x[key]) key x[key] y[key] match = false end @@ -193,8 +194,8 @@ function compare_values(x::Dict, y::Dict; compare_uuids=false) return match end -compare_values(::Type{T}, ::Type{T}; compare_uuids=false) where {T} = true -compare_values(::Type{T}, ::Type{U}; compare_uuids=false) where {T, U} = false +compare_values(::Type{T}, ::Type{T}; compare_uuids = false) where {T} = true +compare_values(::Type{T}, ::Type{U}; compare_uuids = false) where {T, U} = false # Copied from https://discourse.julialang.org/t/encapsulating-enum-access-via-dot-syntax/11785/10 """ @@ -328,7 +329,7 @@ function forward(sender::Tuple{Type, Symbol}, receiver::Type, exclusions::Vector return code end -macro forward(sender, receiver, exclusions=Symbol[]) +macro forward(sender, receiver, exclusions = Symbol[]) out = quote list = InfrastructureSystems.forward($sender, $receiver, $exclusions) for line in list @@ -379,8 +380,11 @@ end function get_module(module_name) # root_module cannot find InfrastructureSystems if it hasn't been installed by the # user (but has been installed as a dependency to another package). - return module_name == "InfrastructureSystems" ? InfrastructureSystems : - Base.root_module(Base.__toplevel__, Symbol(module_name)) + return if module_name == "InfrastructureSystems" + InfrastructureSystems + else + Base.root_module(Base.__toplevel__, Symbol(module_name)) + end end get_type_from_strings(module_name, type) = getfield(get_module(module_name), Symbol(type)) @@ -409,7 +413,7 @@ function get_initial_times( return [initial_timestamp] end - return range(initial_timestamp; length=count, step=interval) + return range(initial_timestamp; length = count, step = interval) end function get_total_period( diff --git a/src/validation.jl b/src/validation.jl index 6c02e2437..e3fd8c509 100644 --- a/src/validation.jl +++ b/src/validation.jl @@ -110,7 +110,7 @@ end function get_limits(valid_range::String, ist_struct::InfrastructureSystemsType) # Gets min and max values from activepowerlimits for activepower, etc. - function recur(d, a, i=1) + function recur(d, a, i = 1) if i <= length(a) d = getfield(d, Symbol(a[i])) recur(d, a, i + 1) @@ -123,7 +123,7 @@ function get_limits(valid_range::String, ist_struct::InfrastructureSystemsType) vr = recur(ist_struct, split(valid_range, ".")) if isnothing(vr) - limits = (min=nothing, max=nothing) + limits = (min = nothing, max = nothing) else limits = get_limits(vr, ist_struct) end @@ -134,7 +134,7 @@ end function get_limits(valid_range::Dict, unused::InfrastructureSystemsType) # Gets min and max value defined for a field, # e.g. "valid_range": {"min":-1.571, "max":1.571}. - return (min=valid_range["min"], max=valid_range["max"]) + return (min = valid_range["min"], max = valid_range["max"]) end function get_limits( @@ -143,7 +143,7 @@ function get_limits( ) # Gets min and max value defined for a field, # e.g. "valid_range": {"min":-1.571, "max":1.571}. - return (min=valid_range.min, max=valid_range.max) + return (min = valid_range.min, max = valid_range.max) end function validate_range(::String, valid_info::ValidationInfo, field_value) diff --git a/test/common.jl b/test/common.jl index 9b4130a66..cebe7e888 100644 --- a/test/common.jl +++ b/test/common.jl @@ -1,6 +1,6 @@ -function create_system_data(; with_time_series=false, time_series_in_memory=false) - data = IS.SystemData(; time_series_in_memory=time_series_in_memory) +function create_system_data(; with_time_series = false, time_series_in_memory = false) + data = IS.SystemData(; time_series_in_memory = time_series_in_memory) name = "Component1" component = IS.TestComponent(name, 5) @@ -20,8 +20,8 @@ function create_system_data(; with_time_series=false, time_series_in_memory=fals return data end -function create_system_data_shared_time_series(; time_series_in_memory=false) - data = IS.SystemData(; time_series_in_memory=time_series_in_memory) +function create_system_data_shared_time_series(; time_series_in_memory = false) + data = IS.SystemData(; time_series_in_memory = time_series_in_memory) name1 = "Component1" name2 = "Component2" @@ -30,7 +30,7 @@ function create_system_data_shared_time_series(; time_series_in_memory=false) IS.add_component!(data, component1) IS.add_component!(data, component2) - ts = IS.SingleTimeSeries(name="val", data=create_time_array()) + ts = IS.SingleTimeSeries(; name = "val", data = create_time_array()) IS.add_time_series!(data, component1, ts) IS.add_time_series!(data, component2, ts) diff --git a/test/runtests.jl b/test/runtests.jl index 2c8adfee1..c17ed87cc 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -82,10 +82,10 @@ function run_tests() if logging_config_filename !== nothing config = IS.LoggingConfiguration(logging_config_filename) else - config = IS.LoggingConfiguration( - filename=LOG_FILE, - file_level=Logging.Info, - console_level=Logging.Error, + config = IS.LoggingConfiguration(; + filename = LOG_FILE, + file_level = Logging.Info, + console_level = Logging.Error, ) end console_logger = TerminalLogger(config.console_stream, config.console_level) diff --git a/test/test_deprecations.jl b/test/test_deprecations.jl index 5f7c2ef08..aecece456 100644 --- a/test/test_deprecations.jl +++ b/test/test_deprecations.jl @@ -2,5 +2,5 @@ data = IS.SystemData() component = IS.TestComponent("component1", 5) - @test_deprecated IS.add_component!(data, component; deserialization_in_progress=true) + @test_deprecated IS.add_component!(data, component; deserialization_in_progress = true) end diff --git a/test/test_generate_structs.jl b/test/test_generate_structs.jl index e8fc992ce..5abd0a2dd 100644 --- a/test/test_generate_structs.jl +++ b/test/test_generate_structs.jl @@ -9,21 +9,21 @@ end output_directory = mktempdir() descriptor_file = joinpath(output_directory, "structs.json") cp(orig_descriptor_file, descriptor_file) - new_struct = IS.StructDefinition( - struct_name="MyComponent", - docstring="Custom component", - supertype="InfrastructureSystemsComponent", - fields=[ - IS.StructField(name="val1", data_type=Float64), - IS.StructField(name="val2", data_type=Int), - IS.StructField(name="val3", data_type=String), + new_struct = IS.StructDefinition(; + struct_name = "MyComponent", + docstring = "Custom component", + supertype = "InfrastructureSystemsComponent", + fields = [ + IS.StructField(; name = "val1", data_type = Float64), + IS.StructField(; name = "val2", data_type = Int), + IS.StructField(; name = "val3", data_type = String), ], ) redirect_stdout(devnull) do IS.generate_struct_file( - new_struct, - filename=descriptor_file, - output_directory=output_directory, + new_struct; + filename = descriptor_file, + output_directory = output_directory, ) end data = open(descriptor_file, "r") do io @@ -36,18 +36,24 @@ end @testset "Test StructField errors" begin @test_throws ErrorException IS.StructDefinition( - struct_name="MyStruct", - fields=[IS.StructField(name="val", data_type=Float64, valid_range="invalid_field")], + struct_name = "MyStruct", + fields = [ + IS.StructField(; + name = "val", + data_type = Float64, + valid_range = "invalid_field", + ), + ], ) @test_throws ErrorException IS.StructField( - name="val", - data_type=Float64, - valid_range=Dict("min" => 0, "invalid" => 100), + name = "val", + data_type = Float64, + valid_range = Dict("min" => 0, "invalid" => 100), ) @test_throws ErrorException IS.StructField( - name="val", - data_type=Float64, - valid_range=Dict("min" => 0, "max" => 100), - validation_action="invalid", + name = "val", + data_type = Float64, + valid_range = Dict("min" => 0, "max" => 100), + validation_action = "invalid", ) end diff --git a/test/test_lazy_dict_from_iterator.jl b/test/test_lazy_dict_from_iterator.jl index 7cd990a23..1f90347f3 100644 --- a/test/test_lazy_dict_from_iterator.jl +++ b/test/test_lazy_dict_from_iterator.jl @@ -16,7 +16,7 @@ end container = IS.LazyDictFromIterator(Int, TestItem, iter, get_field) # Run through twice because the items must persist in the dict. - for i in range(1, length=2) + for i in range(1; length = 2) for x in 1:10 @test get(container, x) isa TestItem end diff --git a/test/test_logging.jl b/test/test_logging.jl index 761dac30f..2813d7a88 100644 --- a/test/test_logging.jl +++ b/test/test_logging.jl @@ -19,7 +19,7 @@ TEST_MSG = "test log message" IS.LogEvent("file", 14, :id, TEST_MSG, Logging.Error), ) - for i in range(1, length=2) + for i in range(1; length = 2) for event in events increment_count!(tracker, event, false) end @@ -60,7 +60,7 @@ end ) with_logger(logger) do - for i in range(1, length=2) + for i in range(1; length = 2) @debug TEST_MSG @info TEST_MSG @warn TEST_MSG @@ -82,10 +82,10 @@ end # Verify logging to a file. logfile = "testlog.txt" logger = IS.configure_logging(; - file=true, - filename=logfile, - file_level=Logging.Info, - set_global=false, + file = true, + filename = logfile, + file_level = Logging.Info, + set_global = false, ) with_logger(logger) do @info TEST_MSG @@ -103,12 +103,12 @@ end # Verify logging with no file. logger = IS.configure_logging(; - console=true, - file=false, - console_stream=devnull, - filename=nothing, - file_level=Logging.Info, - set_global=false, + console = true, + file = false, + console_stream = devnull, + filename = nothing, + file_level = Logging.Info, + set_global = false, ) with_logger(logger) do @error TEST_MSG @@ -122,13 +122,13 @@ end # Verify disabling of tracker. logger = IS.configure_logging(; - console=true, - file=false, - console_stream=devnull, - filename=logfile, - file_level=Logging.Info, - set_global=false, - tracker=nothing, + console = true, + file = false, + console_stream = devnull, + filename = logfile, + file_level = Logging.Info, + set_global = false, + tracker = nothing, ) with_logger(logger) do @error TEST_MSG @@ -138,13 +138,13 @@ end # Verify setting of global logger orig_logger = global_logger() logger = IS.configure_logging(; - console=true, - file=false, - console_stream=devnull, - filename=logfile, - file_level=Logging.Info, - set_global=true, - tracker=nothing, + console = true, + file = false, + console_stream = devnull, + filename = logfile, + file_level = Logging.Info, + set_global = true, + tracker = nothing, ) @error TEST_MSG @test orig_logger != global_logger() @@ -196,7 +196,7 @@ end try redirect_stdout(devnull) do IS.make_logging_config_file(filename) - return IS.make_logging_config_file(filename, force=true) + return IS.make_logging_config_file(filename; force = true) end @test IS.LoggingConfiguration(filename) isa IS.LoggingConfiguration finally @@ -305,12 +305,12 @@ end @testset "Test progress logger" begin io = IOBuffer() - logger = IS.configure_logging( - console_stream=io, - console=false, - file=false, - progress=true, - set_global=false, + logger = IS.configure_logging(; + console_stream = io, + console = false, + file = false, + progress = true, + set_global = false, ) with_logger(logger) do @progress for i in 1:5 @@ -324,8 +324,8 @@ end @testset "Test bad input" begin @test_throws ErrorException IS.configure_logging( - console=false, - file=false, - progress=false, + console = false, + file = false, + progress = false, ) end diff --git a/test/test_printing.jl b/test/test_printing.jl index ed80df597..4fc53c8ef 100644 --- a/test/test_printing.jl +++ b/test/test_printing.jl @@ -1,5 +1,5 @@ @testset "Test printing of the system and components" begin - sys = create_system_data(with_time_series=true, time_series_in_memory=true) + sys = create_system_data(; with_time_series = true, time_series_in_memory = true) io = IOBuffer() show(io, "text/plain", sys) text = String(take!(io)) @@ -8,7 +8,7 @@ end @testset "Test show_component_tables" begin - sys = create_system_data(with_time_series=true, time_series_in_memory=true) + sys = create_system_data(; with_time_series = true, time_series_in_memory = true) io = IOBuffer() IS.show_components(io, sys.components, IS.TestComponent) @test occursin("TestComponent", String(take!(io))) diff --git a/test/test_recorder.jl b/test/test_recorder.jl index 4e12196cd..050f8e75b 100644 --- a/test/test_recorder.jl +++ b/test/test_recorder.jl @@ -99,8 +99,8 @@ end IS.show_recorder_events( buf2, InfrastructureSystems.TestEvent, - filename, - exclude_columns=Set("timestamp"), + filename; + exclude_columns = Set("timestamp"), ) text = String(take!(buf1)) @test !occursin("timestamp", text) diff --git a/test/test_serialization.jl b/test/test_serialization.jl index 200d572b1..d91a65e87 100644 --- a/test/test_serialization.jl +++ b/test/test_serialization.jl @@ -1,5 +1,5 @@ -function validate_serialization(sys::IS.SystemData; time_series_read_only=false) +function validate_serialization(sys::IS.SystemData; time_series_read_only = false) #path, io = mktemp() # For some reason files aren't getting deleted when written to /tmp. Using current dir. filename = "test_system_serialization.json" @@ -9,7 +9,7 @@ function validate_serialization(sys::IS.SystemData; time_series_read_only=false) if isfile(filename) rm(filename) end - IS.prepare_for_serialization!(sys, filename; force=true) + IS.prepare_for_serialization!(sys, filename; force = true) data = IS.serialize(sys) open(filename, "w") do io return JSON3.write(io, data) @@ -41,15 +41,19 @@ function validate_serialization(sys::IS.SystemData; time_series_read_only=false) try cd(dirname(path)) sys2 = - IS.deserialize(IS.SystemData, data; time_series_read_only=time_series_read_only) + IS.deserialize( + IS.SystemData, + data; + time_series_read_only = time_series_read_only, + ) # Deserialization of components should be directed by the parent of SystemData. # There isn't one in IS, so perform the deserialization in the test code. for component in data["components"] type = IS.get_type_from_serialization_data(component) comp = IS.deserialize(type, component) - IS.add_component!(sys2, comp, allow_existing_time_series=true) + IS.add_component!(sys2, comp; allow_existing_time_series = true) end - return sys2, IS.compare_values(sys, sys2, compare_uuids=true) + return sys2, IS.compare_values(sys, sys2; compare_uuids = true) finally cd(orig) end @@ -57,7 +61,7 @@ end @testset "Test JSON serialization of system data" begin for in_memory in (true, false) - sys = create_system_data_shared_time_series(; time_series_in_memory=in_memory) + sys = create_system_data_shared_time_series(; time_series_in_memory = in_memory) _, result = validate_serialization(sys) @test result end @@ -71,19 +75,19 @@ end end @testset "Test JSON serialization of with read-only time series" begin - sys = create_system_data_shared_time_series(; time_series_in_memory=false) - sys2, result = validate_serialization(sys; time_series_read_only=true) + sys = create_system_data_shared_time_series(; time_series_in_memory = false) + sys2, result = validate_serialization(sys; time_series_read_only = true) @test result end @testset "Test JSON serialization of with mutable time series" begin - sys = create_system_data_shared_time_series(; time_series_in_memory=false) - sys2, result = validate_serialization(sys; time_series_read_only=false) + sys = create_system_data_shared_time_series(; time_series_in_memory = false) + sys2, result = validate_serialization(sys; time_series_read_only = false) @test result end @testset "Test JSON serialization with no time series" begin - sys = create_system_data(with_time_series=false) + sys = create_system_data(; with_time_series = false) sys2, result = validate_serialization(sys) @test result end @@ -103,13 +107,13 @@ end @testset "Test pretty-print JSON IO" begin component = IS.TestComponent("Component1", 2) io = IOBuffer() - IS.to_json(io, component, pretty=false) + IS.to_json(io, component; pretty = false) text = String(take!(io)) @test !occursin(" ", text) IS.deserialize(IS.TestComponent, JSON3.read(text, Dict)) == component io = IOBuffer() - IS.to_json(io, component, pretty=true) + IS.to_json(io, component; pretty = true) text = String(take!(io)) @test occursin(" ", text) IS.deserialize(IS.TestComponent, JSON3.read(text, Dict)) == component diff --git a/test/test_supplemental_attributes.jl b/test/test_supplemental_attributes.jl new file mode 100644 index 000000000..6b8ee4608 --- /dev/null +++ b/test/test_supplemental_attributes.jl @@ -0,0 +1,103 @@ +@testset "Test add_supplemental_attribute" begin + container = IS.SupplementalAttributes(IS.InMemoryTimeSeriesStorage()) + geo_supplemental_attribute = IS.GeographicInfo() + component = IS.TestComponent("component1", 5) + IS.add_supplemental_attribute!(container, component, geo_supplemental_attribute) + @test length(container.data) == 1 + @test length(container.data[IS.GeographicInfo]) == 1 + @test IS.get_num_supplemental_attributes(container) == 1 + @test_throws ArgumentError IS.add_supplemental_attribute!( + container, + component, + geo_supplemental_attribute, + ) + + container = IS.SupplementalAttributes(IS.InMemoryTimeSeriesStorage()) + geo_supplemental_attribute = IS.GeographicInfo() + @test_throws ArgumentError IS._add_supplemental_attribute!( + container, + geo_supplemental_attribute, + ) +end + +@testset "Test clear_supplemental_attributes" begin + container = IS.SupplementalAttributes(IS.InMemoryTimeSeriesStorage()) + geo_supplemental_attribute = IS.GeographicInfo() + component = IS.TestComponent("component1", 5) + IS.add_supplemental_attribute!(container, component, geo_supplemental_attribute) + @test IS.get_num_supplemental_attributes(container) == 1 + + IS.clear_supplemental_attributes!(component) + @test isempty(IS.get_component_uuids(geo_supplemental_attribute)) + IS.clear_supplemental_attributes!(container) + supplemental_attributes = IS.get_supplemental_attributes(IS.GeographicInfo, container) + @test length(supplemental_attributes) == 0 +end + +@testset "Test remove_supplemental_attribute" begin + container = IS.SupplementalAttributes(IS.InMemoryTimeSeriesStorage()) + geo_supplemental_attribute = IS.GeographicInfo() + component = IS.TestComponent("component1", 5) + IS.add_supplemental_attribute!(container, component, geo_supplemental_attribute) + @test IS.get_num_supplemental_attributes(container) == 1 + + IS.detach_component!(geo_supplemental_attribute, component) + IS.detach_supplemental_attribute!(component, geo_supplemental_attribute) + @test isempty(IS.get_supplemental_attributes_container(component)) + @test isempty(IS.get_component_uuids(geo_supplemental_attribute)) +end + +@testset "Test iterate_SupplementalAttributes" begin + container = IS.SupplementalAttributes(IS.InMemoryTimeSeriesStorage()) + geo_supplemental_attribute = IS.GeographicInfo() + component = IS.TestComponent("component1", 5) + IS.add_supplemental_attribute!(container, component, geo_supplemental_attribute) + + i = 0 + for component in IS.iterate_supplemental_attributes(container) + i += 1 + end + @test i == 1 +end + +@testset "Summarize SupplementalAttributes" begin + container = IS.SupplementalAttributes(IS.InMemoryTimeSeriesStorage()) + geo_supplemental_attribute = IS.GeographicInfo() + component = IS.TestComponent("component1", 5) + IS.add_supplemental_attribute!(container, component, geo_supplemental_attribute) + summary(devnull, container) +end + +@testset "Test supplemental_attributes serialization" begin + container = IS.SupplementalAttributes(IS.InMemoryTimeSeriesStorage()) + geo_supplemental_attribute = IS.GeographicInfo() + component = IS.TestComponent("component1", 5) + IS.add_supplemental_attribute!(container, component, geo_supplemental_attribute) + data = IS.serialize(container) + @test data isa Vector + @test !isempty(data) + @test data[1] isa Dict +end + +@testset "Add time series to supplemental_attribute" begin + data = IS.SystemData() + initial_time = Dates.DateTime("2020-09-01") + resolution = Dates.Hour(1) + ta = TimeSeries.TimeArray(range(initial_time; length = 24, step = resolution), ones(24)) + ts = IS.SingleTimeSeries(; data = ta, name = "test") + + for i in 1:3 + name = "component_$(i)" + component = IS.TestComponent(name, 5) + IS.add_component!(data, component) + supp_attribute = IS.TestSupplemental() + IS.add_supplemental_attribute!(data, component, supp_attribute) + IS.add_time_series!(data, supp_attribute, ts) + end + + for attribute in IS.iterate_supplemental_attributes(data) + @test IS.get_time_series_container(attribute) !== nothing + ts_ = IS.get_time_series(IS.SingleTimeSeries, attribute, "test") + @test IS.get_initial_timestamp(ts_) == initial_time + end +end diff --git a/test/test_system_data.jl b/test/test_system_data.jl index be45e9683..7c9895bc7 100644 --- a/test/test_system_data.jl +++ b/test/test_system_data.jl @@ -62,8 +62,8 @@ end data = IS.SystemData() initial_time = Dates.DateTime("2020-09-01") resolution = Dates.Hour(1) - ta = TimeSeries.TimeArray(range(initial_time; length=24, step=resolution), ones(24)) - ts = IS.SingleTimeSeries(data=ta, name="test") + ta = TimeSeries.TimeArray(range(initial_time; length = 24, step = resolution), ones(24)) + ts = IS.SingleTimeSeries(; data = ta, name = "test") for i in 1:3 name = "component_$(i)" @@ -88,7 +88,8 @@ end @test IS.is_attached(component, data.masked_components) # This needs to return time series for masked components. - @test length(collect(IS.get_time_series_multiple(data, type=IS.SingleTimeSeries))) == 3 + @test length(collect(IS.get_time_series_multiple(data; type = IS.SingleTimeSeries))) == + 3 end @testset "Test compare_values" begin @@ -99,7 +100,7 @@ end @test_logs( (:error, r"not match"), match_mode = :any, - !IS.compare_values(component1, component2, compare_uuids=true) + !IS.compare_values(component1, component2; compare_uuids = true) ) ) component2.name = "b" @@ -107,7 +108,7 @@ end @test_logs( (:error, r"not match"), match_mode = :any, - !IS.compare_values(component1, component2, compare_uuids=false) + !IS.compare_values(component1, component2; compare_uuids = false) ) ) @@ -138,11 +139,11 @@ end end @testset "Test compression settings" begin - none = IS.CompressionSettings(enabled=false) + none = IS.CompressionSettings(; enabled = false) @test IS.get_compression_settings(IS.SystemData()) == none - @test IS.get_compression_settings(IS.SystemData(time_series_in_memory=true)) == none - settings = IS.CompressionSettings(enabled=true, type=IS.CompressionTypes.DEFLATE) - @test IS.get_compression_settings(IS.SystemData(compression=settings)) == settings + @test IS.get_compression_settings(IS.SystemData(; time_series_in_memory = true)) == none + settings = IS.CompressionSettings(; enabled = true, type = IS.CompressionTypes.DEFLATE) + @test IS.get_compression_settings(IS.SystemData(; compression = settings)) == settings end @testset "Test single time series consistency" begin @@ -150,8 +151,11 @@ end initial_time = Dates.DateTime("2020-09-01") resolution = Dates.Hour(1) len = 24 - ta = TimeSeries.TimeArray(range(initial_time; length=len, step=resolution), ones(len)) - ts = IS.SingleTimeSeries(data=ta, name="test") + ta = TimeSeries.TimeArray( + range(initial_time; length = len, step = resolution), + ones(len), + ) + ts = IS.SingleTimeSeries(; data = ta, name = "test") for i in 1:2 name = "component_$(i)" @@ -173,8 +177,8 @@ end for i in 1:2 it = initial_time + resolution * i - ta = TimeSeries.TimeArray(range(it; length=len, step=resolution), ones(len)) - ts = IS.SingleTimeSeries(data=ta, name="test") + ta = TimeSeries.TimeArray(range(it; length = len, step = resolution), ones(len)) + ts = IS.SingleTimeSeries(; data = ta, name = "test") name = "component_$(i)" component = IS.TestComponent(name, 5) IS.add_component!(data, component) @@ -193,10 +197,10 @@ end for i in 1:2 len += i ta = TimeSeries.TimeArray( - range(initial_time; length=len, step=resolution), + range(initial_time; length = len, step = resolution), ones(len), ) - ts = IS.SingleTimeSeries(data=ta, name="test") + ts = IS.SingleTimeSeries(; data = ta, name = "test") name = "component_$(i)" component = IS.TestComponent(name, 5) IS.add_component!(data, component) @@ -227,8 +231,8 @@ end data = IS.SystemData() initial_time = Dates.DateTime("2020-09-01") resolution = Dates.Hour(1) - ta = TimeSeries.TimeArray(range(initial_time; length=24, step=resolution), ones(24)) - ts = IS.SingleTimeSeries(data=ta, name="test") + ta = TimeSeries.TimeArray(range(initial_time; length = 24, step = resolution), ones(24)) + ts = IS.SingleTimeSeries(; data = ta, name = "test") for i in 1:5 name = "component_$(i)" @@ -247,3 +251,50 @@ end @test ts_counts[1]["type"] == "SingleTimeSeries" @test ts_counts[1]["count"] == 5 end + +@testset "Test component and attributes" begin + data = IS.SystemData() + initial_time = Dates.DateTime("2020-09-01") + resolution = Dates.Hour(1) + ta = TimeSeries.TimeArray(range(initial_time; length = 24, step = resolution), ones(24)) + ts = IS.SingleTimeSeries(; data = ta, name = "test") + + for i in 1:5 + name = "component_$(i)" + component = IS.TestComponent(name, 3) + IS.add_component!(data, component) + IS.add_time_series!(data, component, ts) + geo_info = IS.GeographicInfo() + IS.add_supplemental_attribute!(data, component, geo_info) + end + + for c in IS.get_components(IS.TestComponent, data) + @test IS.has_supplemental_attributes(IS.GeographicInfo, c) + end + + @test length(IS.get_supplemental_attributes(IS.GeographicInfo, data)) == 5 + + i = 0 + for component in IS.iterate_supplemental_attributes(data) + i += 1 + end + @test i == 5 + + attributes = IS.get_supplemental_attributes(IS.GeographicInfo, data) + io = IOBuffer() + show(io, "text/plain", attributes) + output = String(take!(io)) + expected = "GeographicInfo: $i" + @test occursin(expected, output) + + attribute_removed = collect(attributes)[1] + IS.remove_supplemental_attribute!(data, attribute_removed) + + attributes = IS.get_supplemental_attributes(IS.GeographicInfo, data) + @test length(attributes) == 4 + @test IS.get_uuid(attribute_removed) ∉ IS.get_uuid.(attributes) + + IS.remove_supplemental_attributes!(IS.GeographicInfo, data) + attributes = IS.get_supplemental_attributes(IS.GeographicInfo, data) + @test length(attributes) == 0 +end diff --git a/test/test_time_series.jl b/test/test_time_series.jl index 1f24111ec..a7654576f 100644 --- a/test/test_time_series.jl +++ b/test/test_time_series.jl @@ -12,9 +12,9 @@ horizon = 24 data = SortedDict(initial_time => ones(horizon), other_time => ones(horizon)) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution) IS.add_time_series!(sys, component, forecast) - var1 = IS.get_time_series(IS.Deterministic, component, name; start_time=initial_time) + var1 = IS.get_time_series(IS.Deterministic, component, name; start_time = initial_time) @test length(var1) == 2 @test IS.get_horizon(var1) == horizon @test IS.get_initial_timestamp(var1) == initial_time @@ -23,27 +23,27 @@ IS.Deterministic, component, name; - start_time=initial_time, - count=2, + start_time = initial_time, + count = 2, ) @test length(var2) == 2 - var3 = IS.get_time_series(IS.Deterministic, component, name; start_time=other_time) + var3 = IS.get_time_series(IS.Deterministic, component, name; start_time = other_time) @test length(var2) == 2 # Throws errors @test_throws ArgumentError IS.get_time_series( IS.Deterministic, component, name; - start_time=initial_time, - count=3, + start_time = initial_time, + count = 3, ) @test_throws ArgumentError IS.get_time_series( IS.Deterministic, component, name; - start_time=other_time, - count=2, + start_time = other_time, + count = 2, ) count = IS.get_count(var2) @@ -90,11 +90,11 @@ end data_ts = Dict( initial_time => TimeSeries.TimeArray( - range(initial_time; length=horizon, step=resolution), + range(initial_time; length = horizon, step = resolution), ones(horizon), ), other_time => TimeSeries.TimeArray( - range(other_time; length=horizon, step=resolution), + range(other_time; length = horizon, step = resolution), ones(horizon), ), ) @@ -109,11 +109,11 @@ end data_ts_two_cols = Dict( initial_time => TimeSeries.TimeArray( - range(initial_time; length=horizon, step=resolution), + range(initial_time; length = horizon, step = resolution), ones(horizon, 2), ), other_time => TimeSeries.TimeArray( - range(other_time; length=horizon, step=resolution), + range(other_time; length = horizon, step = resolution), ones(horizon, 2), ), ) @@ -150,21 +150,21 @@ end data_ts_polynomial = Dict( initial_time => TimeSeries.TimeArray( - range(initial_time; length=horizon, step=resolution), + range(initial_time; length = horizon, step = resolution), polynomial_cost, ), other_time => TimeSeries.TimeArray( - range(other_time; length=horizon, step=resolution), + range(other_time; length = horizon, step = resolution), polynomial_cost, ), ) data_ts_pwl = Dict( initial_time => TimeSeries.TimeArray( - range(initial_time; length=horizon, step=resolution), + range(initial_time; length = horizon, step = resolution), pwl_cost, ), other_time => TimeSeries.TimeArray( - range(other_time; length=horizon, step=resolution), + range(other_time; length = horizon, step = resolution), pwl_cost, ), ) @@ -198,16 +198,16 @@ end @test IS.has_time_series(component) @test IS.get_initial_timestamp(forecast) == initial_time forecast_retrieved = - IS.get_time_series(IS.Probabilistic, component, "test"; start_time=initial_time) + IS.get_time_series(IS.Probabilistic, component, "test"; start_time = initial_time) @test IS.get_initial_timestamp(forecast_retrieved) == initial_time data_ts = Dict( initial_time => TimeSeries.TimeArray( - range(initial_time; length=horizon, step=resolution), + range(initial_time; length = horizon, step = resolution), ones(horizon, 99), ), other_time => TimeSeries.TimeArray( - range(other_time; length=horizon, step=resolution), + range(other_time; length = horizon, step = resolution), ones(horizon, 99), ), ) @@ -237,16 +237,16 @@ end @test IS.has_time_series(component) @test IS.get_initial_timestamp(forecast) == initial_time forecast_retrieved = - IS.get_time_series(IS.Scenarios, component, "test"; start_time=initial_time) + IS.get_time_series(IS.Scenarios, component, "test"; start_time = initial_time) @test IS.get_initial_timestamp(forecast_retrieved) == initial_time data_ts = Dict( initial_time => TimeSeries.TimeArray( - range(initial_time; length=horizon, step=resolution), + range(initial_time; length = horizon, step = resolution), ones(horizon, 2), ), other_time => TimeSeries.TimeArray( - range(other_time; length=horizon, step=resolution), + range(other_time; length = horizon, step = resolution), ones(horizon, 2), ), ) @@ -269,30 +269,33 @@ end initial_time = Dates.DateTime("2020-09-01") resolution = Dates.Hour(1) - data = TimeSeries.TimeArray(range(initial_time; length=365, step=resolution), ones(365)) - data = IS.SingleTimeSeries(data=data, name="test_c") + data = TimeSeries.TimeArray( + range(initial_time; length = 365, step = resolution), + ones(365), + ) + data = IS.SingleTimeSeries(; data = data, name = "test_c") IS.add_time_series!(sys, component, data) ts1 = IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time, - len=12, + start_time = initial_time, + len = 12, ) @test length(IS.get_data(ts1)) == 12 ts2 = IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time + Dates.Day(1), - len=12, + start_time = initial_time + Dates.Day(1), + len = 12, ) @test length(IS.get_data(ts2)) == 12 ts3 = IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time + Dates.Day(1), + start_time = initial_time + Dates.Day(1), ) @test length(IS.get_data(ts3)) == 341 #Throws errors @@ -300,29 +303,29 @@ end IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time, - len=1200, + start_time = initial_time, + len = 1200, ) @test_throws ArgumentError IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time - Dates.Day(10), - len=12, + start_time = initial_time - Dates.Day(10), + len = 12, ) # Conflicting resolution data = TimeSeries.TimeArray( - range(initial_time; length=365, step=Dates.Minute(5)), + range(initial_time; length = 365, step = Dates.Minute(5)), ones(365), ) - data = IS.SingleTimeSeries(data=data, name="test_d") + data = IS.SingleTimeSeries(; data = data, name = "test_d") @test_throws IS.ConflictingInputsError IS.add_time_series!(sys, component, data) end @testset "Test Deterministic with a wrapped SingleTimeSeries" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) component = IS.TestComponent("Component1", 5) IS.add_component!(sys, component) @@ -342,7 +345,8 @@ end for i in 1:forecast_count fdata[dates[i]] = ones(horizon) end - bystander = IS.Deterministic(data=fdata, name="bystander", resolution=resolution) + bystander = + IS.Deterministic(; data = fdata, name = "bystander", resolution = resolution) IS.add_time_series!(sys, component, bystander) # This interval is greater than the max possible. @@ -396,12 +400,15 @@ end # Verify that get_time_series_multiple works with these types. forecasts = collect(IS.get_time_series_multiple(sys)) @test length(forecasts) == 3 - forecasts = collect(IS.get_time_series_multiple(sys; type=IS.AbstractDeterministic)) + forecasts = + collect(IS.get_time_series_multiple(sys; type = IS.AbstractDeterministic)) @test length(forecasts) == 2 - forecasts = collect(IS.get_time_series_multiple(sys; type=IS.Deterministic)) + forecasts = collect(IS.get_time_series_multiple(sys; type = IS.Deterministic)) @test length(forecasts) == 1 forecasts = - collect(IS.get_time_series_multiple(sys; type=IS.DeterministicSingleTimeSeries)) + collect( + IS.get_time_series_multiple(sys; type = IS.DeterministicSingleTimeSeries), + ) @test length(forecasts) == 1 @test forecasts[1] isa IS.DeterministicSingleTimeSeries @@ -410,14 +417,14 @@ end IS.Deterministic, component, name; - start_time=dates[2], + start_time = dates[2], ) # Must pass a full horizon. @test_throws ArgumentError IS.get_time_series( IS.Deterministic, component, name; - len=horizon - 1, + len = horizon - 1, ) # Already stored. @test IS.transform_single_time_series!( @@ -470,7 +477,7 @@ end @testset "Test Deterministic with a wrapped SingleTimeSeries different offsets" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) component = IS.TestComponent("Component1", 5) IS.add_component!(sys, component) @@ -500,7 +507,7 @@ end end @testset "Test SingleTimeSeries transform with multiple forecasts per component" begin - sys = IS.SystemData(time_series_in_memory=true) + sys = IS.SystemData(; time_series_in_memory = true) component = IS.TestComponent("Component1", 5) IS.add_component!(sys, component) @@ -533,7 +540,7 @@ end @testset "Test SingleTimeSeries transform deletions" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) component = IS.TestComponent("Component1", 5) IS.add_component!(sys, component) @@ -568,14 +575,14 @@ end end @testset "Test DeterministicSingleTimeSeries with single window" begin - sys = IS.SystemData(time_series_in_memory=true) + sys = IS.SystemData(; time_series_in_memory = true) component = IS.TestComponent("Component1", 5) IS.add_component!(sys, component) resolution = Dates.Hour(1) horizon = 24 dates = collect( - range(Dates.DateTime("2020-01-01T00:00:00"); length=horizon, step=resolution), + range(Dates.DateTime("2020-01-01T00:00:00"); length = horizon, step = resolution), ) data = collect(1:horizon) ta = TimeSeries.TimeArray(dates, data, [IS.get_name(component)]) @@ -598,14 +605,14 @@ end end @testset "Test DeterministicSingleTimeSeries with interval = resolution" begin - sys = IS.SystemData(time_series_in_memory=true) + sys = IS.SystemData(; time_series_in_memory = true) component = IS.TestComponent("Component1", 5) IS.add_component!(sys, component) resolution = Dates.Hour(1) horizon = 24 dates = collect( - range(Dates.DateTime("2020-01-01T00:00:00"); length=horizon, step=resolution), + range(Dates.DateTime("2020-01-01T00:00:00"); length = horizon, step = resolution), ) data = collect(1:horizon) ta = TimeSeries.TimeArray(dates, data, [IS.get_name(component)]) @@ -662,10 +669,10 @@ end other_time = initial_time + resolution polynomial_cost = repeat([(999.0, 1.0)], 365) data_polynomial = TimeSeries.TimeArray( - range(initial_time; length=365, step=resolution), + range(initial_time; length = 365, step = resolution), polynomial_cost, ) - data = IS.SingleTimeSeries(data=data_polynomial, name="test_c") + data = IS.SingleTimeSeries(; data = data_polynomial, name = "test_c") IS.add_time_series!(sys, component, data) ts = IS.get_time_series(IS.SingleTimeSeries, component, "test_c";) @test IS.get_data_type(ts) == "POLYNOMIAL" @@ -675,23 +682,23 @@ end IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time, - len=12, + start_time = initial_time, + len = 12, ) @test length(IS.get_data(ts1)) == 12 ts2 = IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time + Dates.Day(1), - len=12, + start_time = initial_time + Dates.Day(1), + len = 12, ) @test length(IS.get_data(ts2)) == 12 ts3 = IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time + Dates.Day(1), + start_time = initial_time + Dates.Day(1), ) @test length(IS.get_data(ts3)) == 341 end @@ -707,8 +714,8 @@ end other_time = initial_time + resolution pwl_cost = repeat([repeat([(999.0, 1.0)], 5)], 365) data_pwl = - TimeSeries.TimeArray(range(initial_time; length=365, step=resolution), pwl_cost) - data = IS.SingleTimeSeries(data=data_pwl, name="test_c") + TimeSeries.TimeArray(range(initial_time; length = 365, step = resolution), pwl_cost) + data = IS.SingleTimeSeries(; data = data_pwl, name = "test_c") IS.add_time_series!(sys, component, data) ts = IS.get_time_series(IS.SingleTimeSeries, component, "test_c";) @test IS.get_data_type(ts) == "PWL" @@ -717,23 +724,23 @@ end IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time, - len=12, + start_time = initial_time, + len = 12, ) @test length(IS.get_data(ts1)) == 12 ts2 = IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time + Dates.Day(1), - len=12, + start_time = initial_time + Dates.Day(1), + len = 12, ) @test length(IS.get_data(ts2)) == 12 ts3 = IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time + Dates.Day(1), + start_time = initial_time + Dates.Day(1), ) @test length(IS.get_data(ts3)) == 341 end @@ -769,7 +776,7 @@ end typeof(time_series), component, IS.get_name(time_series); - start_time=IS.get_initial_timestamp(time_series), + start_time = IS.get_initial_timestamp(time_series), ) @test length(time_series) == length(time_series2) @test IS.get_initial_timestamp(time_series) == IS.get_initial_timestamp(time_series2) @@ -801,7 +808,7 @@ end @test IS.has_time_series(component) ini_time = IS.get_initial_timestamp(data) retrieved_data = - IS.get_time_series(IS.Deterministic, component, "test"; start_time=ini_time) + IS.get_time_series(IS.Deterministic, component, "test"; start_time = ini_time) @test IS.get_name(data) == IS.get_name(retrieved_data) @test IS.get_resolution(data) == IS.get_resolution(retrieved_data) end @@ -817,9 +824,13 @@ end data = collect(1:24) ta = TimeSeries.TimeArray(dates, data, [IS.get_name(component)]) name = "val" - ts = IS.SingleTimeSeries(name=name, data=ta, scaling_factor_multiplier=IS.get_val) + ts = IS.SingleTimeSeries(; + name = name, + data = ta, + scaling_factor_multiplier = IS.get_val, + ) IS.add_time_series!(sys, component, ts) - ts = IS.get_time_series(IS.SingleTimeSeries, component, name; start_time=dates[1]) + ts = IS.get_time_series(IS.SingleTimeSeries, component, name; start_time = dates[1]) @test ts isa IS.SingleTimeSeries name = "Component2" @@ -847,7 +858,11 @@ end data = collect(1:24) ta = TimeSeries.TimeArray(dates, data, ["1"]) name = "val" - ts = IS.SingleTimeSeries(name=name, data=ta, scaling_factor_multiplier=IS.get_val) + ts = IS.SingleTimeSeries(; + name = name, + data = ta, + scaling_factor_multiplier = IS.get_val, + ) IS.add_time_series!(sys, components, ts) hash_ta_main = nothing @@ -883,9 +898,17 @@ end ta1 = TimeSeries.TimeArray(dates1, data1, [IS.get_name(component)]) ta2 = TimeSeries.TimeArray(dates2, data2, [IS.get_name(component)]) time_series1 = - IS.SingleTimeSeries(name="val", data=ta1, scaling_factor_multiplier=IS.get_val) + IS.SingleTimeSeries(; + name = "val", + data = ta1, + scaling_factor_multiplier = IS.get_val, + ) time_series2 = - IS.SingleTimeSeries(name="val2", data=ta2, scaling_factor_multiplier=IS.get_val) + IS.SingleTimeSeries(; + name = "val2", + data = ta2, + scaling_factor_multiplier = IS.get_val, + ) IS.add_time_series!(sys, component, time_series1) IS.add_time_series!(sys, component, time_series2) @@ -893,17 +916,17 @@ end @test length(collect(IS.get_time_series_multiple(component))) == 2 @test length(collect(IS.get_time_series_multiple(sys))) == 2 - @test length(collect(IS.get_time_series_multiple(sys; type=IS.SingleTimeSeries))) == 2 - @test length(collect(IS.get_time_series_multiple(sys; type=IS.Probabilistic))) == 0 + @test length(collect(IS.get_time_series_multiple(sys; type = IS.SingleTimeSeries))) == 2 + @test length(collect(IS.get_time_series_multiple(sys; type = IS.Probabilistic))) == 0 time_series = collect(IS.get_time_series_multiple(sys)) @test length(time_series) == 2 - @test length(collect(IS.get_time_series_multiple(sys; name="val"))) == 1 - @test length(collect(IS.get_time_series_multiple(sys; name="bad_name"))) == 0 + @test length(collect(IS.get_time_series_multiple(sys; name = "val"))) == 1 + @test length(collect(IS.get_time_series_multiple(sys; name = "bad_name"))) == 0 filter_func = x -> TimeSeries.values(IS.get_data(x))[12] == 12 - @test length(collect(IS.get_time_series_multiple(sys, filter_func; name="val2"))) == 0 + @test length(collect(IS.get_time_series_multiple(sys, filter_func; name = "val2"))) == 0 end @testset "Test get_time_series_with_metadata_multiple" begin @@ -922,9 +945,17 @@ end ta1 = TimeSeries.TimeArray(dates1, data1, [IS.get_name(component)]) ta2 = TimeSeries.TimeArray(dates2, data2, [IS.get_name(component)]) time_series1 = - IS.SingleTimeSeries(name="val", data=ta1, scaling_factor_multiplier=IS.get_val) + IS.SingleTimeSeries(; + name = "val", + data = ta1, + scaling_factor_multiplier = IS.get_val, + ) time_series2 = - IS.SingleTimeSeries(name="val2", data=ta2, scaling_factor_multiplier=IS.get_val) + IS.SingleTimeSeries(; + name = "val2", + data = ta2, + scaling_factor_multiplier = IS.get_val, + ) IS.add_time_series!(sys, component, time_series1) IS.add_time_series!(sys, component, time_series2) @@ -932,26 +963,33 @@ end @test length( collect( - IS.get_time_series_with_metadata_multiple(component; type=IS.SingleTimeSeries), + IS.get_time_series_with_metadata_multiple( + component; + type = IS.SingleTimeSeries, + ), ), ) == 2 @test length( collect( - IS.get_time_series_with_metadata_multiple(component; type=IS.Probabilistic), + IS.get_time_series_with_metadata_multiple(component; type = IS.Probabilistic), ), ) == 0 @test length( - collect(IS.get_time_series_with_metadata_multiple(component; name="val")), + collect(IS.get_time_series_with_metadata_multiple(component; name = "val")), ) == 1 @test length( - collect(IS.get_time_series_with_metadata_multiple(component; name="bad_name")), + collect(IS.get_time_series_with_metadata_multiple(component; name = "bad_name")), ) == 0 filter_func = x -> TimeSeries.values(IS.get_data(x))[12] == 12 @test length( collect( - IS.get_time_series_with_metadata_multiple(component, filter_func; name="val2"), + IS.get_time_series_with_metadata_multiple( + component, + filter_func; + name = "val2", + ), ), ) == 0 end @@ -967,14 +1005,14 @@ end data = collect(1:24) ta = TimeSeries.TimeArray(dates, data, [IS.get_name(component)]) name = "val" - ts = IS.SingleTimeSeries(name, ta; scaling_factor_multiplier=IS.get_val) + ts = IS.SingleTimeSeries(name, ta; scaling_factor_multiplier = IS.get_val) IS.add_time_series!(sys, component, ts) time_series = IS.get_time_series(IS.SingleTimeSeries, component, name) @test time_series isa IS.SingleTimeSeries end @testset "Test remove_time_series" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) components = collect(IS.iterate_components(data)) @test length(components) == 1 component = components[1] @@ -989,13 +1027,13 @@ end end @testset "Test clear_time_series" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) IS.clear_time_series!(data) @test length(get_all_time_series(data)) == 0 end @testset "Test that remove_component removes time_series" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) components = collect(IS.get_components(IS.InfrastructureSystemsComponent, data)) @test length(components) == 1 @@ -1027,8 +1065,8 @@ end ts = IS.SingleTimeSeries( name, ta; - normalization_factor=1.0, - scaling_factor_multiplier=IS.get_val, + normalization_factor = 1.0, + scaling_factor_multiplier = IS.get_val, ) IS.add_time_series!(sys, component, ts) time_series = IS.get_time_series(IS.SingleTimeSeries, component, name) @@ -1057,11 +1095,11 @@ end ts = IS.SingleTimeSeries(name, ta) IS.add_time_series!(sys, component, ts) - ts = IS.get_time_series(IS.SingleTimeSeries, component, name; start_time=dates[1]) + ts = IS.get_time_series(IS.SingleTimeSeries, component, name; start_time = dates[1]) @test TimeSeries.timestamp(IS.get_data(ts))[1] == dates[1] @test length(ts) == 24 - ts = IS.get_time_series(IS.SingleTimeSeries, component, name; start_time=dates[3]) + ts = IS.get_time_series(IS.SingleTimeSeries, component, name; start_time = dates[3]) @test TimeSeries.timestamp(IS.get_data(ts))[1] == dates[3] @test length(ts) == 22 @@ -1069,8 +1107,8 @@ end IS.SingleTimeSeries, component, name; - start_time=dates[5], - len=10, + start_time = dates[5], + len = 10, ) @test TimeSeries.timestamp(IS.get_data(time_series))[1] == dates[5] @test length(time_series) == 10 @@ -1119,7 +1157,7 @@ end IS.add_component!(sys, component2) name2 = "val2" name_mapping = Dict((IS.get_name(component), name1) => name2) - IS.copy_time_series!(component2, component; name_mapping=name_mapping) + IS.copy_time_series!(component2, component; name_mapping = name_mapping) time_series = IS.get_time_series(IS.SingleTimeSeries, component2, name2) @test time_series isa IS.SingleTimeSeries @test IS.get_initial_timestamp(time_series) == initial_time @@ -1153,7 +1191,7 @@ end IS.add_component!(sys, component2) name2b = "val2b" name_mapping = Dict((IS.get_name(component), name2a) => name2b) - IS.copy_time_series!(component2, component; name_mapping=name_mapping) + IS.copy_time_series!(component2, component; name_mapping = name_mapping) time_series = IS.get_time_series(IS.SingleTimeSeries, component2, name2b) @test time_series isa IS.SingleTimeSeries @test IS.get_initial_timestamp(time_series) == initial_time2 @@ -1162,7 +1200,7 @@ end end @testset "Test copy time_series with transformed time series" begin - sys = create_system_data(time_series_in_memory=true) + sys = create_system_data(; time_series_in_memory = true) components = collect(IS.get_components(IS.InfrastructureSystemsComponent, sys)) @test length(components) == 1 component = components[1] @@ -1216,12 +1254,12 @@ end end @testset "Summarize time_series" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) summary(devnull, data.time_series_params) end @testset "Test time_series forwarding methods" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) time_series = get_all_time_series(data)[1] # Iteration @@ -1242,7 +1280,7 @@ end end @testset "Test time_series head" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) time_series = get_all_time_series(data)[1] fcast = IS.head(time_series) # head returns a length of 6 by default, but don't hard-code that. @@ -1253,7 +1291,7 @@ end end @testset "Test time_series tail" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) time_series = get_all_time_series(data)[1] fcast = IS.tail(time_series) # tail returns a length of 6 by default, but don't hard-code that. @@ -1264,7 +1302,7 @@ end end @testset "Test time_series from" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) time_series = get_all_time_series(data)[1] start_time = Dates.DateTime(Dates.today()) + Dates.Hour(3) fcast = IS.from(time_series, start_time) @@ -1273,7 +1311,7 @@ end end @testset "Test time_series from" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) time_series = get_all_time_series(data)[1] for end_time in ( Dates.DateTime(Dates.today()) + Dates.Hour(15), @@ -1287,7 +1325,7 @@ end @testset "Test Scenarios time_series" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) sys = IS.SystemData() name = "Component1" name = "val" @@ -1300,11 +1338,11 @@ end scenario_count = 2 data_input = rand(horizon, scenario_count) data = SortedDict(initial_timestamp => data_input) - time_series = IS.Scenarios( - name=name, - resolution=resolution, - scenario_count=scenario_count, - data=data, + time_series = IS.Scenarios(; + name = name, + resolution = resolution, + scenario_count = scenario_count, + data = data, ) fdata = IS.get_data(time_series) @test size(first(values(fdata)))[2] == 2 @@ -1323,7 +1361,7 @@ end @testset "Test Probabilistic time_series" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) name = "Component1" name = "val" component = IS.TestComponent(name, 5) @@ -1335,11 +1373,11 @@ end percentiles = 1:99 data_input = rand(horizon, length(percentiles)) data = SortedDict(initial_timestamp => data_input) - time_series = IS.Probabilistic( - name=name, - resolution=resolution, - percentiles=percentiles, - data=data, + time_series = IS.Probabilistic(; + name = name, + resolution = resolution, + percentiles = percentiles, + data = data, ) fdata = IS.get_data(time_series) @test size(first(values(fdata)))[2] == length(percentiles) @@ -1372,7 +1410,7 @@ end IS.add_component!(sys, component) dates = create_dates("2020-01-01T00:00:00", Dates.Hour(1), "2020-01-01T23:00:00") ta = TimeSeries.TimeArray(dates, collect(1:24), [IS.get_name(component)]) - time_series = IS.SingleTimeSeries(name="val", data=ta) + time_series = IS.SingleTimeSeries(; name = "val", data = ta) @test_throws ArgumentError IS.add_time_series!(sys, component, time_series) end @@ -1391,12 +1429,15 @@ end horizon = 24 data = SortedDict(initial_time => ones(horizon), second_time => ones(horizon)) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution) IS.add_time_series!(sys, component, forecast) sts_data = - TimeSeries.TimeArray(range(initial_time; length=365, step=resolution), ones(365)) - sts = IS.SingleTimeSeries(data=sts_data, name="test_sts") + TimeSeries.TimeArray( + range(initial_time; length = 365, step = resolution), + ones(365), + ) + sts = IS.SingleTimeSeries(; data = sts_data, name = "test_sts") IS.add_time_series!(sys, component, sts) @test IS.get_time_series_resolution(sys) == resolution @@ -1414,7 +1455,7 @@ end @testset "Test get_time_series options" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) name = "Component1" component = IS.TestComponent(name, 5) IS.add_component!(sys, component) @@ -1423,7 +1464,7 @@ end resolution = Dates.Minute(5) interval = Dates.Hour(1) initial_timestamp = Dates.DateTime("2020-09-01") - initial_times = collect(range(initial_timestamp, length=24, step=interval)) + initial_times = collect(range(initial_timestamp; length = 24, step = interval)) name = "test" horizon = 24 data = SortedDict(it => ones(horizon) * i for (i, it) in enumerate(initial_times)) @@ -1446,8 +1487,8 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, + start_time = it, + count = count, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1461,9 +1502,9 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, - len=horizon, + start_time = it, + count = count, + len = horizon, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1477,14 +1518,14 @@ end IS.Deterministic, component, name; - start_time=it + Dates.Minute(1), + start_time = it + Dates.Minute(1), ) end end @testset "Test get_time_series options for Polynomial Cost" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) name = "Component1" component = IS.TestComponent(name, 5) IS.add_component!(sys, component) @@ -1493,14 +1534,15 @@ end resolution = Dates.Minute(5) interval = Dates.Hour(1) initial_timestamp = Dates.DateTime("2020-09-01") - initial_times = collect(range(initial_timestamp, length=24, step=interval)) + initial_times = collect(range(initial_timestamp; length = 24, step = interval)) name = "test" horizon = 24 data_polynomial = SortedDict{Dates.DateTime, Vector{IS.POLYNOMIAL}}( it => repeat([(999.0, 1.0 * i)], 24) for (i, it) in enumerate(initial_times) ) - forecast = IS.Deterministic(data=data_polynomial, name=name, resolution=resolution) + forecast = + IS.Deterministic(; data = data_polynomial, name = name, resolution = resolution) IS.add_time_series!(sys, component, forecast) @test IS.get_forecast_window_count(sys) == length(data_polynomial) @@ -1518,8 +1560,8 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, + start_time = it, + count = count, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1536,8 +1578,8 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, + start_time = it, + count = count, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1552,9 +1594,9 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, - len=horizon, + start_time = it, + count = count, + len = horizon, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1568,7 +1610,7 @@ end IS.Deterministic, component, name; - start_time=it + Dates.Minute(1), + start_time = it + Dates.Minute(1), ) end end @@ -1576,7 +1618,7 @@ end @testset "Test get_time_series options for PWL Cost" begin #for in_memory in (true, false) for in_memory in [false] - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) name = "Component1" component = IS.TestComponent(name, 5) IS.add_component!(sys, component) @@ -1585,7 +1627,7 @@ end resolution = Dates.Minute(5) interval = Dates.Hour(1) initial_timestamp = Dates.DateTime("2020-09-01") - initial_times = collect(range(initial_timestamp, length=24, step=interval)) + initial_times = collect(range(initial_timestamp; length = 24, step = interval)) name = "test" horizon = 24 data_pwl = SortedDict{Dates.DateTime, Vector{IS.PWL}}( @@ -1593,7 +1635,7 @@ end (i, it) in enumerate(initial_times) ) - forecast = IS.Deterministic(data=data_pwl, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data_pwl, name = name, resolution = resolution) IS.add_time_series!(sys, component, forecast) @test IS.get_forecast_window_count(sys) == length(data_pwl) @@ -1611,8 +1653,8 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, + start_time = it, + count = count, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1628,8 +1670,8 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, + start_time = it, + count = count, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1643,9 +1685,9 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, - len=horizon, + start_time = it, + count = count, + len = horizon, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1659,7 +1701,7 @@ end IS.Deterministic, component, name; - start_time=it + Dates.Minute(1), + start_time = it + Dates.Minute(1), ) end end @@ -1674,7 +1716,7 @@ end data = collect(1:24) ta = TimeSeries.TimeArray(dates, data, [IS.get_name(component)]) name = "val" - ts = IS.SingleTimeSeries(name, ta; scaling_factor_multiplier=IS.get_val) + ts = IS.SingleTimeSeries(name, ta; scaling_factor_multiplier = IS.get_val) IS.add_time_series!(sys, component, ts) # Get data from storage, defaults. @@ -1692,24 +1734,24 @@ end IS.SingleTimeSeries, component, name; - start_time=dates[5], - len=5, + start_time = dates[5], + len = 5, ) @test TimeSeries.timestamp(ta2) == dates[5:9] @test TimeSeries.timestamp(ta2) == IS.get_time_series_timestamps( IS.SingleTimeSeries, component, name; - start_time=dates[5], - len=5, + start_time = dates[5], + len = 5, ) @test TimeSeries.values(ta2) == data[5:9] * IS.get_val(component) @test TimeSeries.values(ta2) == IS.get_time_series_values( IS.SingleTimeSeries, component, name; - start_time=dates[5], - len=5, + start_time = dates[5], + len = 5, ) # Get data from storage, ignore_scaling_factors. @@ -1717,8 +1759,8 @@ end IS.SingleTimeSeries, component, name; - start_time=dates[5], - ignore_scaling_factors=true, + start_time = dates[5], + ignore_scaling_factors = true, ) @test TimeSeries.timestamp(ta2) == dates[5:end] @test TimeSeries.values(ta2) == data[5:end] @@ -1731,30 +1773,30 @@ end @test TimeSeries.values(ta2) == IS.get_time_series_values(component, ts) # Get data from cached instance, custom offsets - ta2 = IS.get_time_series_array(component, ts, dates[5], len=5) + ta2 = IS.get_time_series_array(component, ts, dates[5]; len = 5) @test TimeSeries.timestamp(ta2) == dates[5:9] @test TimeSeries.timestamp(ta2) == - IS.get_time_series_timestamps(component, ts, dates[5], len=5) + IS.get_time_series_timestamps(component, ts, dates[5]; len = 5) @test TimeSeries.values(ta2) == data[5:9] * IS.get_val(component) @test TimeSeries.values(ta2) == - IS.get_time_series_values(component, ts, dates[5], len=5) + IS.get_time_series_values(component, ts, dates[5]; len = 5) # Get data from cached instance, custom offsets, ignore_scaling_factors. ta2 = IS.get_time_series_array( component, ts, - dates[5], - len=5, - ignore_scaling_factors=true, + dates[5]; + len = 5, + ignore_scaling_factors = true, ) @test TimeSeries.timestamp(ta2) == dates[5:9] @test TimeSeries.values(ta2) == data[5:9] @test TimeSeries.values(ta2) == IS.get_time_series_values( component, ts, - dates[5], - len=5, - ignore_scaling_factors=true, + dates[5]; + len = 5, + ignore_scaling_factors = true, ) IS.clear_time_series!(sys) @@ -1779,26 +1821,27 @@ end resolution = Dates.Minute(5) interval = Dates.Hour(1) initial_timestamp = Dates.DateTime("2020-09-01") - initial_times = collect(range(initial_timestamp, length=2, step=interval)) + initial_times = collect(range(initial_timestamp; length = 2, step = interval)) name = "test" horizon = 24 data = SortedDict(it => ones(horizon) * i for (i, it) in enumerate(initial_times)) forecast = - IS.Deterministic(name, data, resolution; scaling_factor_multiplier=IS.get_val) + IS.Deterministic(name, data, resolution; scaling_factor_multiplier = IS.get_val) IS.add_time_series!(sys, component, forecast) start_time = initial_timestamp + interval # Verify all permutations with defaults. - ta2 = IS.get_time_series_array(IS.Deterministic, component, name; start_time=start_time) + ta2 = + IS.get_time_series_array(IS.Deterministic, component, name; start_time = start_time) @test ta2 isa TimeSeries.TimeArray @test TimeSeries.timestamp(ta2) == - collect(range(start_time, length=horizon, step=resolution)) + collect(range(start_time; length = horizon, step = resolution)) @test TimeSeries.timestamp(ta2) == IS.get_time_series_timestamps( IS.Deterministic, component, name; - start_time=start_time, + start_time = start_time, ) @test TimeSeries.timestamp(ta2) == IS.get_time_series_timestamps(component, forecast, start_time) @@ -1807,7 +1850,7 @@ end IS.Deterministic, component, name; - start_time=start_time, + start_time = start_time, ) @test TimeSeries.values(ta2) == IS.get_time_series_values(component, forecast, start_time) @@ -1820,22 +1863,22 @@ end IS.Deterministic, component, name; - start_time=start_time, - ignore_scaling_factors=true, + start_time = start_time, + ignore_scaling_factors = true, ), ) == data[start_time] IS.get_time_series_values( IS.Deterministic, component, name; - start_time=start_time, - ignore_scaling_factors=true, + start_time = start_time, + ignore_scaling_factors = true, ) == data[start_time] IS.get_time_series_values( component, forecast, - start_time, - ignore_scaling_factors=true, + start_time; + ignore_scaling_factors = true, ) == data[start_time] # Custom length @@ -1844,22 +1887,22 @@ end IS.Deterministic, component, name; - start_time=start_time, - len=10, + start_time = start_time, + len = 10, ) @test TimeSeries.timestamp(ta2)[1:10] == - IS.get_time_series_timestamps(component, forecast, start_time; len=10) + IS.get_time_series_timestamps(component, forecast, start_time; len = 10) @test TimeSeries.values(ta2)[1:10] == IS.get_time_series_values( IS.Deterministic, component, name; - start_time=start_time, - len=len, + start_time = start_time, + len = len, ) @test TimeSeries.values(ta2)[1:10] == - IS.get_time_series_values(component, forecast, start_time; len=10) + IS.get_time_series_values(component, forecast, start_time; len = 10) @test TimeSeries.values(ta2)[1:10] == TimeSeries.values( - IS.get_time_series_array(component, forecast, start_time, len=10), + IS.get_time_series_array(component, forecast, start_time; len = 10), ) end @@ -1881,13 +1924,13 @@ end @test IS.has_time_series(component) @test IS.get_initial_timestamp(forecast) == initial_time forecast_retrieved = - IS.get_time_series(IS.Probabilistic, component, "test"; start_time=initial_time) + IS.get_time_series(IS.Probabilistic, component, "test"; start_time = initial_time) @test IS.get_initial_timestamp(forecast_retrieved) == initial_time t = IS.get_time_series_array( IS.Probabilistic, component, "test"; - start_time=initial_time, + start_time = initial_time, ) @test size(t) == (24, 99) @test TimeSeries.values(t) == data1 @@ -1896,12 +1939,13 @@ end IS.Probabilistic, component, "test"; - start_time=initial_time, - len=12, + start_time = initial_time, + len = 12, ) @test size(t) == (12, 99) @test TimeSeries.values(t) == data1[1:12, :] - t_other = IS.get_time_series(IS.Probabilistic, component, "test"; start_time=other_time) + t_other = + IS.get_time_series(IS.Probabilistic, component, "test"; start_time = other_time) @test collect(keys(IS.get_data(t_other)))[1] == other_time end @@ -1923,9 +1967,9 @@ end @test IS.has_time_series(component) @test IS.get_initial_timestamp(forecast) == initial_time forecast_retrieved = - IS.get_time_series(IS.Scenarios, component, "test"; start_time=initial_time) + IS.get_time_series(IS.Scenarios, component, "test"; start_time = initial_time) @test IS.get_initial_timestamp(forecast_retrieved) == initial_time - t = IS.get_time_series_array(IS.Scenarios, component, "test"; start_time=initial_time) + t = IS.get_time_series_array(IS.Scenarios, component, "test"; start_time = initial_time) @test size(t) == (24, 99) @test TimeSeries.values(t) == data1 @@ -1933,12 +1977,12 @@ end IS.Scenarios, component, "test"; - start_time=initial_time, - len=12, + start_time = initial_time, + len = 12, ) @test size(t) == (12, 99) @test TimeSeries.values(t) == data1[1:12, :] - t_other = IS.get_time_series(IS.Scenarios, component, "test"; start_time=other_time) + t_other = IS.get_time_series(IS.Scenarios, component, "test"; start_time = other_time) @test collect(keys(IS.get_data(t_other)))[1] == other_time end @@ -1956,17 +2000,17 @@ end # Horizon must be greater than 1. bad_data = SortedDict(initial_time => ones(1), second_time => ones(1)) - forecast = IS.Deterministic(data=bad_data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = bad_data, name = name, resolution = resolution) @test_throws ArgumentError IS.add_time_series!(sys, component, forecast) # Arrays must have the same length. bad_data = SortedDict(initial_time => ones(2), second_time => ones(3)) - forecast = IS.Deterministic(data=bad_data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = bad_data, name = name, resolution = resolution) @test_throws DimensionMismatch IS.add_time_series!(sys, component, forecast) # Set baseline parameters for the rest of the tests. data = SortedDict(initial_time => ones(horizon), second_time => ones(horizon)) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution) IS.add_time_series!(sys, component, forecast) # Conflicting initial time @@ -1974,7 +2018,7 @@ end name = "test2" data = SortedDict(initial_time2 => ones(horizon), second_time => ones(horizon)) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution) @test_throws IS.ConflictingInputsError IS.add_time_series!(sys, component, forecast) # Conflicting resolution @@ -1982,7 +2026,7 @@ end name = "test2" data = SortedDict(initial_time => ones(horizon), second_time => ones(horizon)) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution2) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution2) @test_throws IS.ConflictingInputsError IS.add_time_series!(sys, component, forecast) # Conflicting horizon @@ -1990,7 +2034,7 @@ end horizon2 = 23 data = SortedDict(initial_time => ones(horizon2), second_time => ones(horizon2)) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution) @test_throws IS.ConflictingInputsError IS.add_time_series!(sys, component, forecast) # Conflicting count @@ -2002,7 +2046,7 @@ end third_time => ones(horizon), ) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution) @test_throws IS.ConflictingInputsError IS.add_time_series!(sys, component, forecast) end @@ -2020,14 +2064,15 @@ end horizon = 24 data = SortedDict(initial_time => ones(horizon), other_time => ones(horizon)) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution) IS.add_time_series!(sys, component, forecast) key = IS.TimeSeriesKey(forecast) @test key == IS.TimeSeriesKey(IS.DeterministicMetadata, name) - @test key == IS.TimeSeriesKey(; time_series_type=IS.DeterministicMetadata, name=name) + @test key == + IS.TimeSeriesKey(; time_series_type = IS.DeterministicMetadata, name = name) - var1 = IS.get_time_series(IS.Deterministic, component, name; start_time=initial_time) - var_key1 = IS.get_time_series_by_key(key, component; start_time=initial_time) + var1 = IS.get_time_series(IS.Deterministic, component, name; start_time = initial_time) + var_key1 = IS.get_time_series_by_key(key, component; start_time = initial_time) @test length(var1) == length(var_key1) @test IS.get_horizon(var1) == horizon @test IS.get_horizon(var1) == IS.get_horizon(var_key1) @@ -2038,10 +2083,11 @@ end IS.Deterministic, component, name; - start_time=initial_time, - count=2, + start_time = initial_time, + count = 2, ) - var_key2 = IS.get_time_series_by_key(key, component; start_time=initial_time, count=2) + var_key2 = + IS.get_time_series_by_key(key, component; start_time = initial_time, count = 2) @test length(var2) == 2 @test length(var2) == length(var_key2) @@ -2049,13 +2095,13 @@ end @test_throws ArgumentError IS.get_time_series_by_key( key, component; - start_time=initial_time, - count=3, + start_time = initial_time, + count = 3, ) end @testset "Test copy_to_new_file! on HDF5" begin - sys = IS.SystemData(time_series_in_memory=false) + sys = IS.SystemData(; time_series_in_memory = false) name = "Component1" name = "val" component = IS.TestComponent(name, 5) @@ -2066,7 +2112,7 @@ end resolution = Dates.Hour(1) data_input = rand(horizon) data = SortedDict(initial_timestamp => data_input) - time_series = IS.Deterministic(name=name, resolution=resolution, data=data) + time_series = IS.Deterministic(; name = name, resolution = resolution, data = data) fdata = IS.get_data(time_series) @test initial_timestamp == first(keys((fdata))) @test data_input == first(values((fdata))) @@ -2096,8 +2142,8 @@ end end for compression_enabled in (true, false) - compression = IS.CompressionSettings(enabled=compression_enabled) - sys = IS.SystemData(time_series_in_memory=false, compression=compression) + compression = IS.CompressionSettings(; enabled = compression_enabled) + sys = IS.SystemData(; time_series_in_memory = false, compression = compression) @test sys.time_series_storage.compression.enabled == compression_enabled name = "Component1" name = "val" @@ -2110,7 +2156,8 @@ end data_input = rand(horizon) data = SortedDict(initial_timestamp => data_input) for i in 1:2 - time_series = IS.Deterministic(name="name_$i", resolution=resolution, data=data) + time_series = + IS.Deterministic(; name = "name_$i", resolution = resolution, data = data) IS.add_time_series!(sys, component, time_series) end old_file = IS.get_file_path(sys.time_series_storage) @@ -2146,7 +2193,7 @@ end @testset "Test assign_new_uuid! for component with time series" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) name = "Component1" component = IS.TestComponent(name, 5) IS.add_component!(sys, component) @@ -2156,8 +2203,11 @@ end name = "test" data = - TimeSeries.TimeArray(range(initial_time; length=24, step=resolution), ones(24)) - data = IS.SingleTimeSeries(data=data, name=name) + TimeSeries.TimeArray( + range(initial_time; length = 24, step = resolution), + ones(24), + ) + data = IS.SingleTimeSeries(; data = data, name = name) IS.add_time_series!(sys, component, data) @test IS.get_time_series(IS.SingleTimeSeries, component, name) isa IS.SingleTimeSeries diff --git a/test/test_time_series_cache.jl b/test/test_time_series_cache.jl index bdcecba37..a70010c39 100644 --- a/test/test_time_series_cache.jl +++ b/test/test_time_series_cache.jl @@ -35,7 +35,7 @@ @test IS.get_next_time(cache) === nothing # Iterate over all initial times with custom cache size. - cache = IS.ForecastCache(IS.Deterministic, component, "test"; cache_size_bytes=1024) + cache = IS.ForecastCache(IS.Deterministic, component, "test"; cache_size_bytes = 1024) @test length(cache) == cache.common.num_iterations == 168 for (i, ta) in enumerate(cache) it = initial_times[i] @@ -57,7 +57,7 @@ IS.Deterministic, component, "test"; - start_time=Dates.DateTime("2020-01-02T00:00:00"), + start_time = Dates.DateTime("2020-01-02T00:00:00"), ) for (i, ta) in enumerate(cache) it = initial_times[i + 24] @@ -67,7 +67,7 @@ end # Test caching internals. - cache = IS.ForecastCache(IS.Deterministic, component, "test"; cache_size_bytes=1024) + cache = IS.ForecastCache(IS.Deterministic, component, "test"; cache_size_bytes = 1024) @test cache.in_memory_count == 5 @test IS.get_next_time(cache) == initial_timestamp for it in initial_times[1:(cache.in_memory_count)] @@ -99,10 +99,10 @@ end len = 96 data = TimeSeries.TimeArray( - range(initial_timestamp; length=len, step=resolution), + range(initial_timestamp; length = len, step = resolution), rand(len), ) - ts = IS.SingleTimeSeries(data=data, name="test") + ts = IS.SingleTimeSeries(; data = data, name = "test") IS.add_time_series!(sys, component, ts) cache = IS.StaticTimeSeriesCache(IS.SingleTimeSeries, component, "test") @@ -115,16 +115,16 @@ end for (i, ta) in enumerate(cache) it = initial_timestamp + (i - 1) * resolution @test TimeSeries.timestamp(ta) == - IS.get_time_series_timestamps(component, ts, it, len=1) - @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it, len=1) + IS.get_time_series_timestamps(component, ts, it; len = 1) + @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it; len = 1) end ta = IS.get_next_time_series_array!(cache) @test first(TimeSeries.timestamp(ta)) == initial_timestamp @test TimeSeries.timestamp(ta) == - IS.get_time_series_timestamps(component, ts, initial_timestamp, len=1) + IS.get_time_series_timestamps(component, ts, initial_timestamp; len = 1) @test TimeSeries.values(ta) == - IS.get_time_series_values(component, ts, initial_timestamp, len=1) + IS.get_time_series_values(component, ts, initial_timestamp; len = 1) # Iterate over all initial times with custom cache size. cache_size_bytes = 96 @@ -132,7 +132,7 @@ end IS.SingleTimeSeries, component, "test"; - cache_size_bytes=cache_size_bytes, + cache_size_bytes = cache_size_bytes, ) @test cache.in_memory_rows == cache_size_bytes / 8 @test length(cache) == cache.common.num_iterations == len @@ -145,8 +145,8 @@ end for (i, ta) in enumerate(cache) it = initial_timestamp + (i - 1) * resolution @test TimeSeries.timestamp(ta) == - IS.get_time_series_timestamps(component, ts, it; len=1) - @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it; len=1) + IS.get_time_series_timestamps(component, ts, it; len = 1) + @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it; len = 1) end IS.reset!(cache) @@ -154,8 +154,8 @@ end ta = IS.get_next_time_series_array!(cache) it = initial_timestamp + (i - 1) * resolution @test TimeSeries.timestamp(ta) == - IS.get_time_series_timestamps(component, ts, it; len=1) - @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it; len=1) + IS.get_time_series_timestamps(component, ts, it; len = 1) + @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it; len = 1) end cache_size_bytes = 96 @@ -164,8 +164,8 @@ end IS.SingleTimeSeries, component, "test"; - start_time=start_time, - cache_size_bytes=cache_size_bytes, + start_time = start_time, + cache_size_bytes = cache_size_bytes, ) @test cache.in_memory_rows == cache_size_bytes / 8 @test cache.common.num_iterations == @@ -175,8 +175,8 @@ end ta = IS.get_next_time_series_array!(cache) it = start_time + (i - 1) * resolution @test TimeSeries.timestamp(ta) == - IS.get_time_series_timestamps(component, ts, it; len=1) - @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it; len=1) + IS.get_time_series_timestamps(component, ts, it; len = 1) + @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it; len = 1) end end @@ -204,7 +204,7 @@ end forecast = IS.get_time_series(IS.AbstractDeterministic, component, name) initial_times = collect(IS.get_initial_times(forecast)) cache = - IS.ForecastCache(IS.AbstractDeterministic, component, name; cache_size_bytes=1024) + IS.ForecastCache(IS.AbstractDeterministic, component, name; cache_size_bytes = 1024) for (i, ta) in enumerate(cache) @test TimeSeries.timestamp(ta) == @@ -221,7 +221,7 @@ end name = "test" horizon = 24 data = SortedDict{Dates.DateTime, Matrix{Float64}}() - for (i, it) in enumerate(range(initial_time, step=interval, length=100)) + for (i, it) in enumerate(range(initial_time; step = interval, length = 100)) data[it] = ones(horizon, 99) * i end sys = IS.SystemData() @@ -233,7 +233,7 @@ end # Iterate over all initial times with custom cache size. sz = 1024 * 1024 - cache = IS.ForecastCache(IS.Probabilistic, component, "test"; cache_size_bytes=sz) + cache = IS.ForecastCache(IS.Probabilistic, component, "test"; cache_size_bytes = sz) initial_times = collect(keys(data)) @test cache.in_memory_count == trunc(Int, sz / (99 * 8 * 24)) for (i, ta) in enumerate(cache) diff --git a/test/test_time_series_storage.jl b/test/test_time_series_storage.jl index 6664f84b8..75d1c42fb 100644 --- a/test/test_time_series_storage.jl +++ b/test/test_time_series_storage.jl @@ -21,7 +21,7 @@ function test_add_remove(storage::IS.TimeSeriesStorage) name = "component1" name = "val" component = IS.TestComponent(name, 5) - ts = IS.SingleTimeSeries(data=create_time_array(), name="test") + ts = IS.SingleTimeSeries(; data = create_time_array(), name = "test") IS.serialize_time_series!(storage, IS.get_uuid(component), name, ts) ts2 = _deserialize_full(storage, ts) @@ -48,7 +48,7 @@ function test_add_references(storage::IS.TimeSeriesStorage) name = "val" component1 = IS.TestComponent("component1", 5) component2 = IS.TestComponent("component2", 6) - ts = IS.SingleTimeSeries(data=create_time_array(), name="test") + ts = IS.SingleTimeSeries(; data = create_time_array(), name = "test") ts_uuid = IS.get_uuid(ts) IS.serialize_time_series!(storage, IS.get_uuid(component1), name, ts) IS.add_time_series_reference!(storage, IS.get_uuid(component2), name, ts_uuid) @@ -77,7 +77,7 @@ function test_get_subset(storage::IS.TimeSeriesStorage) name = "component1" name = "val" component = IS.TestComponent(name, 1) - ts = IS.SingleTimeSeries(data=create_time_array(), name="test") + ts = IS.SingleTimeSeries(; data = create_time_array(), name = "test") IS.serialize_time_series!(storage, IS.get_uuid(component), name, ts) ts2 = _deserialize_full(storage, ts) @@ -97,7 +97,7 @@ function test_get_subset(storage::IS.TimeSeriesStorage) horizon = 24 data = SortedDict(initial_time1 => ones(horizon), initial_time2 => ones(horizon)) - ts = IS.Deterministic(data=data, name=name, resolution=resolution) + ts = IS.Deterministic(; data = data, name = name, resolution = resolution) IS.serialize_time_series!(storage, IS.get_uuid(component), name, ts) ts_metadata = make_metadata(ts) rows = UnitRange(1, horizon) @@ -129,7 +129,7 @@ function test_clear(storage::IS.TimeSeriesStorage) name = "component1" name = "val" component = IS.TestComponent(name, 5) - ts = IS.SingleTimeSeries(data=create_time_array(), name="test") + ts = IS.SingleTimeSeries(; data = create_time_array(), name = "test") IS.serialize_time_series!(storage, IS.get_uuid(component), name, ts) ts2 = _deserialize_full(storage, ts) @@ -142,27 +142,27 @@ end @testset "Test time series storage implementations" begin for in_memory in (true, false) - test_add_remove(IS.make_time_series_storage(; in_memory=in_memory)) - test_get_subset(IS.make_time_series_storage(; in_memory=in_memory)) - test_clear(IS.make_time_series_storage(; in_memory=in_memory)) + test_add_remove(IS.make_time_series_storage(; in_memory = in_memory)) + test_get_subset(IS.make_time_series_storage(; in_memory = in_memory)) + test_clear(IS.make_time_series_storage(; in_memory = in_memory)) end - test_add_remove(IS.make_time_series_storage(; in_memory=false, directory=".")) - test_get_subset(IS.make_time_series_storage(; in_memory=false, directory=".")) - test_clear(IS.make_time_series_storage(; in_memory=false, directory=".")) + test_add_remove(IS.make_time_series_storage(; in_memory = false, directory = ".")) + test_get_subset(IS.make_time_series_storage(; in_memory = false, directory = ".")) + test_clear(IS.make_time_series_storage(; in_memory = false, directory = ".")) end @testset "Test copy time series references" begin for in_memory in (true, false) - test_add_remove(IS.make_time_series_storage(; in_memory=in_memory)) - test_add_references(IS.make_time_series_storage(; in_memory=in_memory)) - test_get_subset(IS.make_time_series_storage(; in_memory=in_memory)) - test_clear(IS.make_time_series_storage(; in_memory=in_memory)) + test_add_remove(IS.make_time_series_storage(; in_memory = in_memory)) + test_add_references(IS.make_time_series_storage(; in_memory = in_memory)) + test_get_subset(IS.make_time_series_storage(; in_memory = in_memory)) + test_clear(IS.make_time_series_storage(; in_memory = in_memory)) end end @testset "Test data format version" begin - storage = IS.make_time_series_storage(in_memory=false) + storage = IS.make_time_series_storage(; in_memory = false) @test IS.read_data_format_version(storage) == IS.TIME_SERIES_DATA_FORMAT_VERSION end @@ -171,18 +171,35 @@ end for type in (IS.CompressionTypes.BLOSC, IS.CompressionTypes.DEFLATE) for shuffle in (true, false) compression = - IS.CompressionSettings(enabled=true, type=type, level=5, shuffle=shuffle) + IS.CompressionSettings(; + enabled = true, + type = type, + level = 5, + shuffle = shuffle, + ) test_add_remove( - IS.make_time_series_storage(; in_memory=in_memory, compression=compression), + IS.make_time_series_storage(; + in_memory = in_memory, + compression = compression, + ), ) test_add_references( - IS.make_time_series_storage(; in_memory=in_memory, compression=compression), + IS.make_time_series_storage(; + in_memory = in_memory, + compression = compression, + ), ) test_get_subset( - IS.make_time_series_storage(; in_memory=in_memory, compression=compression), + IS.make_time_series_storage(; + in_memory = in_memory, + compression = compression, + ), ) test_clear( - IS.make_time_series_storage(; in_memory=in_memory, compression=compression), + IS.make_time_series_storage(; + in_memory = in_memory, + compression = compression, + ), ) end end @@ -190,12 +207,12 @@ end @testset "Test isempty" begin for in_memory in (true, false) - storage = IS.make_time_series_storage(in_memory=in_memory) + storage = IS.make_time_series_storage(; in_memory = in_memory) @test isempty(storage) name = "component1" name = "val" component = IS.TestComponent(name, 5) - ts = IS.SingleTimeSeries(data=create_time_array(), name="test") + ts = IS.SingleTimeSeries(; data = create_time_array(), name = "test") IS.serialize_time_series!(storage, IS.get_uuid(component), name, ts) @test !isempty(storage) end