Skip to content

Commit

Permalink
implement ff correctly for services
Browse files Browse the repository at this point in the history
  • Loading branch information
jd-lara committed Sep 1, 2023
1 parent 77a6685 commit 3738c7b
Show file tree
Hide file tree
Showing 5 changed files with 210 additions and 25 deletions.
109 changes: 101 additions & 8 deletions src/feedforward/feedforward_arguments.jl
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,83 @@ function _add_feedforward_arguments!(
return
end

function _add_feedforward_slack_variables!(container::OptimizationContainer,
::T,
ff::Union{LowerBoundFeedforward, UpperBoundFeedforward},
model::ServiceModel{U, V},
devices::Vector,
) where {
T <: Union{LowerBoundFeedForwardSlack, UpperBoundFeedForwardSlack},
U <: PSY.AbstractReserve,
V <: AbstractReservesFormulation,
}
time_steps = get_time_steps(container)
for var in get_affected_values(ff)
variable = get_variable(container, var)
set_name, set_time = JuMP.axes(variable)
device_names = [PSY.get_name(d) for d in devices]
IS.@assert_op set_name == device_names
IS.@assert_op set_time == time_steps
service_name = get_service_name(model)
var_type = get_entry_type(var)
variable_container = add_variable_container!(
container,
T(),
U,
device_names,
time_steps;
meta = "$(var_type)_$(service_name)",
)

for t in time_steps, name in set_name
variable_container[name, t] = JuMP.@variable(
get_jump_model(container),
base_name = "$(T)_$(U)_{$(name), $(t)}",
lower_bound = 0.0
)
end
end
return
end

function _add_feedforward_slack_variables!(
container::OptimizationContainer,
::T,
ff::Union{LowerBoundFeedforward, UpperBoundFeedforward},
model::DeviceModel{U, V},
devices::IS.FlattenIteratorWrapper{U},
) where {
T <: Union{LowerBoundFeedForwardSlack, UpperBoundFeedForwardSlack},
U <: PSY.Device,
V <: AbstractDeviceFormulation,
}
time_steps = get_time_steps(container)
for var in get_affected_values(ff)
variable = get_variable(container, var)
set_name, set_time = JuMP.axes(variable)
IS.@assert_op set_name == [PSY.get_name(d) for d in devices]
IS.@assert_op set_time == time_steps

var_type = get_entry_type(var)
variable = add_variable_container!(
container,
T,
U,
[PSY.get_name(d) for d in devices];
meta = "$(var_type)",
)

for t in time_steps, name in set_name
variable[name, t] = JuMP.@variable(
get_jump_model(container),
base_name = "$(T)_$(U)_{$(name), $(t)}",
lower_bound = 0.0
)
end
end
return
end

function _add_feedforward_arguments!(
container::OptimizationContainer,
model::DeviceModel{T, U},
Expand All @@ -54,7 +131,12 @@ function _add_feedforward_arguments!(
parameter_type = get_default_parameter_type(ff, T)
add_parameters!(container, parameter_type, ff, model, devices)
if get_slacks(ff)
add_variables!(container, UpperBoundFeedForwardSlack(), devices, U())
add_feedforward_slack_variables!(
container,
UpperBoundFeedForwardSlack,
devices,
model,
)
end
return
end
Expand All @@ -68,7 +150,12 @@ function _add_feedforward_arguments!(
parameter_type = get_default_parameter_type(ff, SR)
add_parameters!(container, parameter_type, ff, model, contributing_devices)
if get_slacks(ff)
add_variables!(container, UpperBoundFeedForwardSlack(), contributing_devices, U())
add_feedforward_slack_variables!(
container,
UpperBoundFeedForwardSlack,
contributing_devices,
model,
)
end
return
end
Expand All @@ -82,21 +169,27 @@ function _add_feedforward_arguments!(
parameter_type = get_default_parameter_type(ff, T)
add_parameters!(container, parameter_type, ff, model, devices)
if get_slacks(ff)
_add_slack_variable!(container, LowerBoundFeedForwardSlack(), devices, U)
_add_feedforward_slack_variables!(
container,
LowerBoundFeedForwardSlack,
ff,
model,
devices,
)
end
return
end

function _add_feedforward_arguments!(
container::OptimizationContainer,
model::ServiceModel{SR},
contributing_devices::Vector{T},
model::ServiceModel{T, U},
contributing_devices::Vector{V},
ff::LowerBoundFeedforward,
) where {T <: PSY.Component, SR <: PSY.AbstractReserve}
parameter_type = get_default_parameter_type(ff, SR)
) where {T <: PSY.AbstractReserve, U <: AbstractReservesFormulation, V <: PSY.Component}
parameter_type = get_default_parameter_type(ff, T)
add_parameters!(container, parameter_type, ff, model, contributing_devices)
if get_slacks(ff)
_add_slack_variable!(
_add_feedforward_slack_variables!(
container,
LowerBoundFeedForwardSlack(),
ff,
Expand Down
55 changes: 41 additions & 14 deletions src/feedforward/feedforward_constraints.jl
Original file line number Diff line number Diff line change
Expand Up @@ -319,10 +319,10 @@ The Parameters are initialized using the uppper boundary values of the provided
"""
function add_feedforward_constraints!(
container::OptimizationContainer,
::DeviceModel,
::DeviceModel{T, U},
devices::IS.FlattenIteratorWrapper{T},
ff::LowerBoundFeedforward,
) where {T <: PSY.Component}
) where {T <: PSY.Component, U <: AbstractDeviceFormulation}
time_steps = get_time_steps(container)
parameter_type = get_default_parameter_type(ff, T)
param_ub = get_parameter_array(container, parameter_type(), T)
Expand All @@ -344,10 +344,20 @@ function add_feedforward_constraints!(
)

for t in time_steps, name in set_name
con_ub[name, t] = JuMP.@constraint(
container.JuMPmodel,
variable[name, t] >= param_ub[name, t] * multiplier_ub[name, t]
)
if use_slacks
slack_var =
get_variable(container, LowerBoundFeedForwardSlack(), T, "$(var_type)")
con_ub[name, t] = JuMP.@constraint(
get_jump_model(container),
variable[name, t] >=
param_ub[name, t] * multiplier_ub[name, t] + slack_var[name, t]
)
else
con_ub[name, t] = JuMP.@constraint(
get_jump_model(container),
variable[name, t] >= param_ub[name, t] * multiplier_ub[name, t]
)
end
end
end
return
Expand All @@ -358,37 +368,54 @@ function add_feedforward_constraints!(
model::ServiceModel{T, U},
contributing_devices::Vector{V},
ff::LowerBoundFeedforward,
) where {T, U, V <: PSY.Component}
) where {T <: PSY.Service, U <: AbstractServiceFormulation, V <: PSY.Component}
time_steps = get_time_steps(container)
parameter_type = get_default_parameter_type(ff, T)
param_ub = get_parameter_array(container, parameter_type(), T, get_service_name(model))
service_name = get_service_name(model)
multiplier_ub = get_parameter_multiplier_array(
container,
parameter_type(),
T,
get_service_name(model),
service_name,
)
use_slacks = get_slacks(ff)
for var in get_affected_values(ff)
variable = get_variable(container, var)
set_name, set_time = JuMP.axes(variable)
IS.@assert_op set_name == [PSY.get_name(d) for d in contributing_devices]
IS.@assert_op set_time == time_steps

var_type = get_entry_type(var)
con_ub = add_constraints_container!(
con_lb = add_constraints_container!(
container,
FeedforwardLowerBoundConstraint(),
T,
set_name,
time_steps;
meta = "$(var_type)lb",
meta = "$(var_type)_$(service_name)",
)

for t in time_steps, name in set_name
con_ub[name, t] = JuMP.@constraint(
container.JuMPmodel,
variable[name, t] >= param_ub[name, t] * multiplier_ub[name, t]
)
if use_slacks
slack_var = get_variable(
container,
LowerBoundFeedForwardSlack(),
T,
"$(var_type)_$(service_name)",
)
slack_var[name, t]
con_lb[name, t] = JuMP.@constraint(
get_jump_model(container),
variable[name, t] >=
param_ub[name, t] * multiplier_ub[name, t] + slack_var[name, t]
)
else
con_lb[name, t] = JuMP.@constraint(
get_jump_model(container),
variable[name, t] >= param_ub[name, t] * multiplier_ub[name, t]
)
end
end
end
return
Expand Down
25 changes: 25 additions & 0 deletions src/feedforward/feedforwards.jl
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,31 @@ get_default_parameter_type(::LowerBoundFeedforward, _) = LowerBoundValueParamete
get_optimization_container_key(ff::LowerBoundFeedforward) = ff.optimization_container_key
get_slacks(ff::LowerBoundFeedforward) = ff.add_slacks

function attach_feedforward!(
model::ServiceModel,
ff::T,
) where {T <: Union{LowerBoundFeedforward, UpperBoundFeedforward}}
if get_feedforward_meta(ff) != NO_SERVICE_NAME_PROVIDED
ff_ = ff
else
ff_ = T(;
component_type = get_component_type(ff),
source = get_entry_type(get_optimization_container_key(ff)),
affected_values = [get_entry_type(get_optimization_container_key(ff))],
meta = model.service_name,
add_slacks = ff.add_slacks,
)
end
if !isempty(model.feedforwards)
ff_k = [get_optimization_container_key(v) for v in model.feedforwards if isa(v, T)]
if get_optimization_container_key(ff_) ff_k
return
end
end
push!(model.feedforwards, ff_)
return
end

"""
Adds a constraint to make the bounds of a variable 0.0. Effectively allows to "turn off" a value.
"""
Expand Down
44 changes: 42 additions & 2 deletions src/parameters/update_parameters.jl
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,47 @@ end
function _update_parameter_values!(
parameter_array::AbstractArray{T},
attributes::VariableValueAttributes,
::Type{<:PSY.Component},
::Type{<:PSY.Device},
model::DecisionModel,
state::DatasetContainer{InMemoryDataset},
) where {T <: Union{JuMP.VariableRef, Float64}}
current_time = get_current_time(model)
state_values = get_dataset_values(state, get_attribute_key(attributes))
component_names, time = axes(parameter_array)
resolution = get_resolution(model)

state_data = get_dataset(state, get_attribute_key(attributes))
state_timestamps = state_data.timestamps
max_state_index = get_num_rows(state_data)

state_data_index = find_timestamp_index(state_timestamps, current_time)
sim_timestamps = range(current_time; step = resolution, length = time[end])
for t in time
timestamp_ix = min(max_state_index, state_data_index + 1)
@debug "parameter horizon is over the step" max_state_index > state_data_index + 1
if state_timestamps[timestamp_ix] <= sim_timestamps[t]
state_data_index = timestamp_ix
end
for name in component_names
# Pass indices in this way since JuMP DenseAxisArray don't support view()
state_value = state_values[name, state_data_index]
if !isfinite(state_value)
error(
"The value for the system state used in $(encode_key_as_string(get_attribute_key(attributes))) is not a finite value $(state_value) \
This is commonly caused by referencing a state value at a time when such decision hasn't been made. \
Consider reviewing your models' horizon and interval definitions",
)
end
_set_param_value!(parameter_array, state_value, name, t)
end
end
return
end

function _update_parameter_values!(
parameter_array::AbstractArray{T},
attributes::VariableValueAttributes,
::PSY.Reserve,
model::DecisionModel,
state::DatasetContainer{InMemoryDataset},
) where {T <: Union{JuMP.VariableRef, Float64}}
Expand Down Expand Up @@ -207,7 +247,7 @@ function _update_parameter_values!(
::Type{U},
model::DecisionModel,
state::DatasetContainer{InMemoryDataset},
) where {T <: Union{JuMP.VariableRef, Float64}, U <: PSY.Component}
) where {T <: Union{JuMP.VariableRef, Float64}, U <: PSY.Device}
current_time = get_current_time(model)
state_values = get_dataset_values(state, get_attribute_key(attributes))
component_names, time = axes(parameter_array)
Expand Down
2 changes: 1 addition & 1 deletion src/utils/jump_utils.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#Given the changes in syntax in ParameterJuMP and the new format to create anonymous parameters
function add_jump_parameter(jump_model::JuMP.Model, val::Number)
param = JuMP.@variable(jump_model)
param = JuMP.@variable(jump_model, base_name = "param")
JuMP.fix(param, val; force = true)
return param
end
Expand Down

0 comments on commit 3738c7b

Please sign in to comment.