Skip to content

Commit

Permalink
Merge branch 'main' into context2task
Browse files Browse the repository at this point in the history
  • Loading branch information
panh99 authored Jan 24, 2024
2 parents 14be434 + 472f42a commit c81fd43
Show file tree
Hide file tree
Showing 6 changed files with 19 additions and 15 deletions.
2 changes: 1 addition & 1 deletion examples/advanced-pytorch/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def main() -> None:
)
parser.add_argument(
"--toy",
action='store_true',
action="store_true",
help="Set to true to quicky run the client using only 10 datasamples. \
Useful for testing purposes. Default: False",
)
Expand Down
2 changes: 1 addition & 1 deletion examples/advanced-pytorch/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def main():
parser = argparse.ArgumentParser(description="Flower")
parser.add_argument(
"--toy",
action='store_true',
action="store_true",
help="Set to true to use only 10 datasamples for validation. \
Useful for testing purposes. Default: False",
)
Expand Down
24 changes: 14 additions & 10 deletions examples/advanced-pytorch/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,18 +28,21 @@ def load_centralized_data():

def apply_transforms(batch):
"""Apply transforms to the partition from FederatedDataset."""
pytorch_transforms = Compose([
Resize(256),
CenterCrop(224),
ToTensor(),
Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
])
pytorch_transforms = Compose(
[
Resize(256),
CenterCrop(224),
ToTensor(),
Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
]
)
batch["img"] = [pytorch_transforms(img) for img in batch["img"]]
return batch


def train(net, trainloader, valloader, epochs,
device: torch.device = torch.device("cpu")):
def train(
net, trainloader, valloader, epochs, device: torch.device = torch.device("cpu")
):
"""Train the network on the training set."""
print("Starting training...")
net.to(device) # move model to GPU if available
Expand Down Expand Up @@ -71,8 +74,9 @@ def train(net, trainloader, valloader, epochs,
return results


def test(net, testloader, steps: int = None,
device: torch.device = torch.device("cpu")):
def test(
net, testloader, steps: int = None, device: torch.device = torch.device("cpu")
):
"""Validate the network on the entire test set."""
print("Starting evalutation...")
net.to(device) # move model to GPU if available
Expand Down
2 changes: 1 addition & 1 deletion examples/advanced-tensorflow/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def main() -> None:
)
parser.add_argument(
"--toy",
action='store_true',
action="store_true",
help="Set to true to quicky run the client using only 10 datasamples. "
"Useful for testing purposes. Default: False",
)
Expand Down
2 changes: 1 addition & 1 deletion examples/quickstart-huggingface/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def evaluate(self, parameters, config):
required=True,
type=int,
help="Partition of the dataset divided into 1,000 iid partitions created "
"artificially.",
"artificially.",
)
node_id = parser.parse_args().node_id
main(node_id)
2 changes: 1 addition & 1 deletion examples/vertical-fl/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ class ServerModel(nn.Module):

It comprises a single linear layer that accepts the concatenated outputs from
all client models as its input. The number of inputs to this layer equals the
total number of outputs from the client models ( $3 \times 4 = 12$ ). After processing
total number of outputs from the client models (3 x 4 = 12). After processing
these inputs, the linear layer's output is passed through a sigmoid activation
function (`nn.Sigmoid()`), which maps the result to a `(0, 1)` range, providing
a probability score indicative of the likelihood of survival.
Expand Down

0 comments on commit c81fd43

Please sign in to comment.