Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Jean/efficient bootstrapping #3

Merged
merged 43 commits into from
Jan 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
43 commits
Select commit Hold shift + click to select a range
167fa42
trying
jeandut Jan 9, 2024
095b67b
not working because of weird class vs instance issue
jeandut Jan 9, 2024
d8266e6
getting nowhere
jeandut Jan 9, 2024
be02744
errors start to make more sense
jeandut Jan 11, 2024
71dc969
implicit self
jeandut Jan 11, 2024
e59dff6
supposedly working if no reinstantiation
jeandut Jan 12, 2024
9ed1e9d
weird bugs still...
jeandut Jan 13, 2024
1ed5ee5
fixing inii of algo
jeandut Jan 14, 2024
46f668e
iterating
jeandut Jan 15, 2024
7e63934
making slow progress
jeandut Jan 15, 2024
196c5c6
it runs !
jeandut Jan 15, 2024
7557813
fixing criterrion
jeandut Jan 16, 2024
8a6b748
fixing predict
jeandut Jan 16, 2024
17aa1ff
fixing metric
jeandut Jan 16, 2024
f6d7550
more robust equality check but is it needed ?
jeandut Jan 17, 2024
879f0fc
more strength to the popensity to see actual learning
jeandut Jan 18, 2024
1647aa9
starting up tests
jeandut Jan 18, 2024
43736e5
trying to launch tests
jeandut Jan 18, 2024
bd5989a
first CP of tests run
jeandut Jan 18, 2024
841108e
tests don't pass hmm...
jeandut Jan 18, 2024
f01aba8
trying to fix bootstrap
jeandut Jan 18, 2024
82bef30
bootstrapping is working
jeandut Jan 18, 2024
480709d
trying to make linting better
jeandut Jan 18, 2024
0ce62e0
trying to make linting better
jeandut Jan 18, 2024
bd5f184
trying to make linting better
jeandut Jan 18, 2024
4ce64d1
trying to make linting better
jeandut Jan 18, 2024
9ecb8ea
trying to make linting better
jeandut Jan 18, 2024
e5e4828
adding better docstring
jeandut Jan 18, 2024
80632cd
trying to make docs pass
jeandut Jan 18, 2024
cba499c
trying to make docs pass
jeandut Jan 18, 2024
5fdb6e9
adding doc and fixing it
jeandut Jan 18, 2024
15cfa36
trying to make sphinx pass
jeandut Jan 18, 2024
af1ad2f
trying to fix CI
jeandut Jan 18, 2024
4787a5e
tryiing to make CI pass
jeandut Jan 18, 2024
8dc5001
upping version
jeandut Jan 19, 2024
9f1f2ee
trying to install the correct version
jeandut Jan 19, 2024
a4bc853
fixing data error
jeandut Jan 22, 2024
2a73a01
first test working
jeandut Jan 22, 2024
3b3eec4
fixing linting
jeandut Jan 22, 2024
a73bdff
accomodating new update_from_checkpoint functon
jeandut Jan 22, 2024
390a466
Update setup.py
jeandut Jan 22, 2024
d773739
accomodating substrafl api chhange
jeandut Jan 23, 2024
aeaf189
new API compliance
jeandut Jan 23, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions docs/source/api/strategies.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,8 @@ fedeca.strategies

.. autoclass:: fedeca.strategies.WebDisco

.. automodule:: fedeca.strategies.bootstraper

.. automodule:: fedeca.strategies.webdisco_utils


1 change: 1 addition & 0 deletions fedeca/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
"""Top level package for :mod:`fedeca`."""
from .fedeca_core import FedECA
from .fedeca_core import LogisticRegressionTorch
from .competitors import PooledIPTW, MatchingAjudsted, NaiveComparison
24 changes: 13 additions & 11 deletions fedeca/algorithms/torch_dp_fed_avg_algo.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Differentially private algorithm to be used with FedAvg strategy."""
import logging
import random
from typing import Any, Optional

import numpy as np
Expand Down Expand Up @@ -371,28 +372,26 @@ def _get_state_to_save(self) -> dict:

return checkpoint

def _update_from_checkpoint(self, path) -> dict:
def _update_from_checkpoint(self, checkpoint: dict) -> None:
"""Set self attributes using saved values.

Parameters
----------
path : Path
Path towards the checkpoint to use.
checkpoint : dict
Checkpoint to load.

Returns
-------
dict
The emptied checkpoint.
"""
# One cannot simply call checkpoint = super()._update_from_checkpoint(path)
# One cannot simply call checkpoint = super()._update_from_checkpoint(chkpt)
# because we have to change the model class if it should be changed
# (and optimizer) aka if we find a specific key in the checkpoint
assert (
path.is_file()
), f'Cannot load the model - does not exist {list(path.parent.glob("*"))}'
checkpoint = torch.load(path, map_location=self._device)

# For some reason substrafl save and load client before calling train
if "privacy_accountant_state_dict" in checkpoint:

self.accountant = RDPAccountant()
self.accountant.load_state_dict(
checkpoint.pop("privacy_accountant_state_dict")
Expand Down Expand Up @@ -429,10 +428,13 @@ def _update_from_checkpoint(self, path) -> dict:

self._index_generator = checkpoint.pop("index_generator")

random.setstate(checkpoint.pop("random_rng_state"))
np.random.set_state(checkpoint.pop("numpy_rng_state"))

if self._device == torch.device("cpu"):
torch.set_rng_state(checkpoint.pop("rng_state").to(self._device))
torch.set_rng_state(checkpoint.pop("torch_rng_state").to(self._device))
else:
torch.cuda.set_rng_state(checkpoint.pop("rng_state").to("cpu"))
torch.cuda.set_rng_state(checkpoint.pop("torch_rng_state").to("cpu"))

attr_names = [
"dp_max_grad_norm",
Expand All @@ -447,4 +449,4 @@ def _update_from_checkpoint(self, path) -> dict:
for attr in attr_names:
setattr(self, attr, checkpoint.pop(attr))

return checkpoint
return
16 changes: 5 additions & 11 deletions fedeca/algorithms/torch_webdisco_algo.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import os
from copy import deepcopy
from math import sqrt
from pathlib import Path
from typing import Any, List, Optional

# hello
Expand Down Expand Up @@ -547,23 +546,18 @@ def _get_state_to_save(self) -> dict:
checkpoint.update({"global_moments": self.global_moments})
return checkpoint

def _update_from_checkpoint(self, path: Path) -> dict:
def _update_from_checkpoint(self, checkpoint: dict) -> None:
"""Load the local state from the checkpoint.

Parameters
----------
path : pathlib.Path
Path where the checkpoint is saved

Returns
-------
dict
Checkpoint
checkpoint : dict
The checkpoint to load.
"""
checkpoint = super()._update_from_checkpoint(path=path)
super()._update_from_checkpoint(checkpoint=checkpoint)
self.server_state = checkpoint.pop("server_state")
self.global_moments = checkpoint.pop("global_moments")
return checkpoint
return

def summary(self):
"""Summary of the class to be exposed in the experiment summary file.
Expand Down
1 change: 1 addition & 0 deletions fedeca/strategies/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
"""Init file for strategies."""
from .webdisco import WebDisco
from .bootstraper import make_bootstrap_metric_function, make_bootstrap_strategy
Loading
Loading