Skip to content

Commit

Permalink
Ring formatter.
Browse files Browse the repository at this point in the history
  • Loading branch information
james-choncholas committed Nov 1, 2024
1 parent dc350e3 commit c99465b
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 5 deletions.
8 changes: 6 additions & 2 deletions tf_shell_ml/dpsgd_sequential_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,9 @@ def call(self, x, training=False):
return x

def compute_max_two_norm_and_pred(self, features, skip_two_norm):
with tf.GradientTape(persistent=tf.executing_eagerly() or self.jacobian_pfor) as tape:
with tf.GradientTape(
persistent=tf.executing_eagerly() or self.jacobian_pfor
) as tape:
y_pred = self(features, training=True) # forward pass

if not skip_two_norm:
Expand Down Expand Up @@ -272,4 +274,6 @@ def rebalance(x, s):
else:
result[key] = value # non-subdict elements are just copied

return result, None if self.disable_encryption else backprop_context.num_slots
return result, (
None if self.disable_encryption else backprop_context.num_slots
)
4 changes: 3 additions & 1 deletion tf_shell_ml/model_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,9 @@ def __init__(
self.dataset_prepped = False

if self.disable_encryption and self.jacobian_pfor:
print("WARNING: `jacobian_pfor` may be incompatible with `disable_encryption`.")
print(
"WARNING: `jacobian_pfor` may be incompatible with `disable_encryption`."
)

def compile(self, shell_loss, **kwargs):
if not isinstance(shell_loss, tf_shell_ml.CategoricalCrossentropy):
Expand Down
8 changes: 6 additions & 2 deletions tf_shell_ml/postscale_sequential_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,9 @@ def shell_train_step(self, data):
# is factored out of the gradient computation and accounted for below.
self.layers[-1].activation = tf.keras.activations.linear

with tf.GradientTape(persistent=tf.executing_eagerly() or self.jacobian_pfor) as tape:
with tf.GradientTape(
persistent=tf.executing_eagerly() or self.jacobian_pfor
) as tape:
y_pred = self(x, training=True) # forward pass
grads = tape.jacobian(
y_pred,
Expand Down Expand Up @@ -273,4 +275,6 @@ def rebalance(x, s):
else:
result[key] = value # non-subdict elements are just copied

return result, None if self.disable_encryption else backprop_context.num_slots
return result, (
None if self.disable_encryption else backprop_context.num_slots
)

0 comments on commit c99465b

Please sign in to comment.