Skip to content

Commit

Permalink
Fix linter issues
Browse files Browse the repository at this point in the history
  • Loading branch information
stevelaskaridis committed Oct 21, 2023
1 parent 090c31d commit 1f3ef97
Show file tree
Hide file tree
Showing 11 changed files with 110 additions and 93 deletions.
10 changes: 5 additions & 5 deletions baselines/fjord/fjord/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def get_agg_config(
)
net(images, sampler=max_sampler)
for i, layer in enumerate(layers):
if isinstance(layer, ODConv2d) or isinstance(layer, ODLinear):
if isinstance(layer, (ODConv2d, ODLinear)):
config[p][i]["in_dim"] = layer.last_input_dim
config[p][i]["out_dim"] = layer.last_output_dim
elif isinstance(layer, ODBatchNorm2d):
Expand Down Expand Up @@ -179,8 +179,8 @@ def fit(
:param parameters: The parameters of the model.
:param config: The train configuration.
:return: The parameters of the model, the number of
samples used for training, and the training metrics
:return: The parameters of the model, the number of samples used for training,
and the training metrics
"""
Logger.get().info(
f"Training on client {self.cid} for round "
Expand Down Expand Up @@ -225,8 +225,8 @@ def evaluate(
:param parameters: The parameters of the model.
:param config: The eval configuration.
:return: The loss on the test set, the number of samples
used for evaluation, and the evaluation metrics.
:return: The loss on the test set, the number of samples used for evaluation,
and the evaluation metrics.
"""
Logger.get().info(
f"Evaluating on client {self.cid} for round "
Expand Down
8 changes: 4 additions & 4 deletions baselines/fjord/fjord/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,10 @@ def __init__( # pylint: disable=too-many-arguments
:param root: Root directory of dataset
:param train: If True, creates dataset from training set
:param transform: A function/transform that takes in an PIL image
and returns a transformed version.
:param target_transform: A function/transform that takes in the
target and transforms it.
:param transform: A function/transform that takes in an PIL image and returns a
transformed version.
:param target_transform: A function/transform that takes in the target and
transforms it.
:param download: If true, downloads the dataset from the internet.
"""
super().__init__(
Expand Down
1 change: 1 addition & 0 deletions baselines/fjord/fjord/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,7 @@ def sample(
for p in self.p_s:
if remainder == 0:
break
cid = random.choice(max_p_to_cids[p])
while cid not in selected_cids:
cid = random.choice(max_p_to_cids[p])
selected_cids.add(cid)
Expand Down
24 changes: 11 additions & 13 deletions baselines/fjord/fjord/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,11 +40,11 @@ def __init__(
padding=1,
bias=False,
)
self.bn1 = create_bn_layer(od, p_s, planes)
self.bn1 = create_bn_layer(od=od, p_s=p_s, num_features=planes)
self.conv2 = create_conv_layer(
od, True, planes, planes, kernel_size=3, stride=1, padding=1, bias=False
)
self.bn2 = create_bn_layer(od, p_s, planes)
self.bn2 = create_bn_layer(od=od, p_s=p_s, num_features=planes)

self.shortcut = SequentialWithSampler()
if stride != 1 or in_planes != self.expansion * planes:
Expand All @@ -58,7 +58,7 @@ def __init__(
stride=stride,
bias=False,
),
create_bn_layer(od, p_s, self.expansion * planes),
create_bn_layer(od=od, p_s=p_s, num_features=self.expansion * planes),
)

def forward(self, x, sampler):
Expand Down Expand Up @@ -107,7 +107,7 @@ def __init__(
self.conv1 = create_conv_layer(
od, True, 3, 64, kernel_size=3, stride=1, padding=1, bias=False
)
self.bn1 = create_bn_layer(od, p_s, 64)
self.bn1 = create_bn_layer(od=od, p_s=p_s, num_features=64)
self.layer1 = self._make_layer(od, p_s, block, 64, num_blocks[0], stride=1)
self.layer2 = self._make_layer(od, p_s, block, 128, num_blocks[1], stride=2)
self.layer3 = self._make_layer(od, p_s, block, 256, num_blocks[2], stride=2)
Expand All @@ -119,8 +119,8 @@ def _make_layer(
): # pylint: disable=too-many-arguments
strides = [stride] + [1] * (num_blocks - 1)
layers = []
for stride in strides:
layers.append(block(od, p_s, self.in_planes, planes, stride))
for strd in strides:
layers.append(block(od, p_s, self.in_planes, planes, strd))
self.in_planes = planes * block.expansion
return SequentialWithSampler(*layers)

Expand All @@ -140,7 +140,7 @@ def forward(self, x, sampler=None):
out = self.layer2(out, sampler=sampler)
out = self.layer3(out, sampler=sampler)
out = self.layer4(out, sampler=sampler)
out = F.avg_pool2d(out, 4)
out = F.avg_pool2d(out, 4) # pylint: disable=not-callable
out = out.view(out.size(0), -1)
out = self.linear(out)
else:
Expand All @@ -149,7 +149,7 @@ def forward(self, x, sampler=None):
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = F.avg_pool2d(out, 4)
out = F.avg_pool2d(out, 4) # pylint: disable=not-callable
out = out.view(out.size(0), -1)
out = self.linear(out)
return out
Expand Down Expand Up @@ -200,8 +200,7 @@ def train( # pylint: disable=too-many-locals, too-many-arguments
:param net: The model to train.
:param trainloader: The training set.
:param know_distill: Whether the model being trained
uses knowledge distillation.
:param know_distill: Whether the model being trained uses knowledge distillation.
:param max_p: The maximum p value.
:param current_round: The current round of training.
:param total_rounds: The total number of rounds of training.
Expand Down Expand Up @@ -308,13 +307,12 @@ def get_lr_scheduler(
:param optimiser: The optimiser for which to get the scheduler.
:param total_epochs: The total number of epochs.
:param method: The method to use for the scheduler.
Supports static and cifar10.
:param method: The method to use for the scheduler. Supports static and cifar10.
:return: The learning rate scheduler.
"""
if method == "static":
return MultiStepLR(optimiser, [total_epochs + 1])
elif method == "cifar10":
if method == "cifar10":
return MultiStepLR(
optimiser, [int(0.5 * total_epochs), int(0.75 * total_epochs)], gamma=0.1
)
Expand Down
2 changes: 1 addition & 1 deletion baselines/fjord/fjord/od/layers/batch_norm.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@ class ODBatchNorm2d(nn.Module): # pylint: disable=too-many-instance-attributes

def __init__(
self,
*args,
p_s: List[float],
num_features: int,
affine: Optional[bool] = True,
*args,
**kwargs,
) -> None:
super().__init__()
Expand Down
42 changes: 29 additions & 13 deletions baselines/fjord/fjord/od/layers/conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,14 @@
def od_conv_forward(
layer: Module, x: Tensor, p: Optional[Union[Tuple[Module, float], float]] = None
) -> Tensor:
"""Ordered dropout forward pass for convolution networks.
Args:
:param layer: The layer being forwarded.
:param x: Input tensor.
:param p: Tuple of layer and p or p.
:return: Output of forward pass.
"""
p = check_layer(layer, p)
if not layer.is_od and p is not None:
raise ValueError("p must be None if is_od is False")
Expand All @@ -26,7 +34,9 @@ def od_conv_forward(
# subsampled weights and bias
weights_red = layer.weight[:out_dim, :in_dim]
bias_red = layer.bias[:out_dim] if layer.bias is not None else None
return layer._conv_forward(x, weights_red, bias_red)
return layer._conv_forward( # pylint: disable=protected-access
x, weights_red, bias_red
)


def get_slice(layer: Module, in_dim: int, out_dim: int) -> Tuple[Tensor, Tensor]:
Expand All @@ -46,24 +56,26 @@ def get_slice(layer: Module, in_dim: int, out_dim: int) -> Tuple[Tensor, Tensor]
class ODConv1d(nn.Conv1d):
"""Ordered Dropout Conv1d."""

def __init__(self, is_od: bool = True, *args, **kwargs) -> None:
def __init__(self, *args, is_od: bool = True, **kwargs) -> None:
self.is_od = is_od
super().__init__(*args, **kwargs)
self.width = self.out_channels
self.last_input_dim = None
self.last_output_dim = None

def forward(
self, x: Tensor, p: Optional[Union[Tuple[Module, float], float]] = None
self,
input: Tensor, # pylint: disable=redefined-builtin
p: Optional[Union[Tuple[Module, float], float]] = None,
) -> Tensor:
"""Forward pass.
Args:
:param x: Input tensor.
:param input: Input tensor.
:param p: Tuple of layer and p or p.
:return: Output of forward pass.
"""
return od_conv_forward(self, x, p)
return od_conv_forward(self, input, p)

def get_slice(self, *args, **kwargs) -> Tuple[Tensor, Tensor]:
"""Get slice of weights and bias."""
Expand All @@ -73,24 +85,26 @@ def get_slice(self, *args, **kwargs) -> Tuple[Tensor, Tensor]:
class ODConv2d(nn.Conv2d):
"""Ordered Dropout Conv2d."""

def __init__(self, is_od: bool = True, *args, **kwargs) -> None:
def __init__(self, *args, is_od: bool = True, **kwargs) -> None:
self.is_od = is_od
super().__init__(*args, **kwargs)
self.width = self.out_channels
self.last_input_dim = None
self.last_output_dim = None

def forward(
self, x: Tensor, p: Optional[Union[Tuple[Module, float], float]] = None
self,
input: Tensor, # pylint: disable=redefined-builtin
p: Optional[Union[Tuple[Module, float], float]] = None,
) -> Tensor:
"""Forward pass.
Args:
:param x: Input tensor.
:param input: Input tensor.
:param p: Tuple of layer and p or p.
:return: Output of forward pass.
"""
return od_conv_forward(self, x, p)
return od_conv_forward(self, input, p)

def get_slice(self, *args, **kwargs) -> Tuple[Tensor, Tensor]:
"""Get slice of weights and bias."""
Expand All @@ -100,24 +114,26 @@ def get_slice(self, *args, **kwargs) -> Tuple[Tensor, Tensor]:
class ODConv3d(nn.Conv3d):
"""Ordered Dropout Conv3d."""

def __init__(self, is_od: bool = True, *args, **kwargs) -> None:
def __init__(self, *args, is_od: bool = True, **kwargs) -> None:
self.is_od = is_od
super().__init__(*args, **kwargs)
self.width = self.out_channels
self.last_input_dim = None
self.last_output_dim = None

def forward(
self, x: Tensor, p: Optional[Union[Tuple[Module, float], float]] = None
self,
input: Tensor, # pylint: disable=redefined-builtin
p: Optional[Union[Tuple[Module, float], float]] = None,
) -> Tensor:
"""Forward pass.
Args:
:param x: Input tensor.
:param input: Input tensor.
:param p: Tuple of layer and p or p.
:return: Output of forward pass.
"""
return od_conv_forward(self, x, p)
return od_conv_forward(self, input, p)

def get_slice(self, *args, **kwargs) -> Tuple[Tensor, Tensor]:
"""Get slice of weights and bias."""
Expand Down
12 changes: 7 additions & 5 deletions baselines/fjord/fjord/od/layers/linear.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,27 +14,29 @@
class ODLinear(nn.Linear):
"""Ordered Dropout Linear."""

def __init__(self, is_od: bool = True, *args, **kwargs) -> None:
def __init__(self, *args, is_od: bool = True, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.is_od = is_od
self.width = self.out_features
self.last_input_dim = None
self.last_output_dim = None

def forward(
self, x: Tensor, p: Optional[Union[Tuple[Module, float], float]] = None
self,
input: Tensor, # pylint: disable=redefined-builtin
p: Optional[Union[Tuple[Module, float], float]] = None,
) -> Tensor:
"""Forward pass.
Args:
:param x: Input tensor.
:param input: Input tensor.
:param p: Tuple of layer and p or p.
:return: Output of forward pass.
"""
if not self.is_od and p is not None:
raise ValueError("p must be None if is_od is False")
p = check_layer(self, p)
in_dim = x.size(1) # second dimension is input dimension
in_dim = input.size(1) # second dimension is input dimension
self.last_input_dim = in_dim
if not p: # i.e., don't apply OD
out_dim = self.width
Expand All @@ -44,7 +46,7 @@ def forward(
# subsampled weights and bias
weights_red = self.weight[:out_dim, :in_dim]
bias_red = self.bias[:out_dim] if self.bias is not None else None
return F.linear(x, weights_red, bias_red)
return F.linear(input, weights_red, bias_red) # pylint: disable=not-callable

def get_slice(self, in_dim: int, out_dim: int) -> Tuple[Tensor, Tensor]:
"""Get slice of weights and bias.
Expand Down
20 changes: 11 additions & 9 deletions baselines/fjord/fjord/od/models/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def create_linear_layer(od, is_od, *args, **kwargs):
:return: nn.Linear or ODLinear
"""
if od:
return ODLinear(is_od, *args, **kwargs)
return ODLinear(*args, is_od=is_od, **kwargs)

return nn.Linear(*args, **kwargs)

Expand All @@ -29,7 +29,7 @@ def create_conv_layer(od, is_od, *args, **kwargs):
:return: nn.Conv2d or ODConv2d
"""
if od:
return ODConv2d(is_od, *args, **kwargs)
return ODConv2d(*args, is_od=is_od, **kwargs)

return nn.Conv2d(*args, **kwargs)

Expand All @@ -44,15 +44,17 @@ def create_bn_layer(od, p_s, *args, **kwargs):
:return: nn.BatchNorm2d or ODBatchNorm2d
"""
if od:
return ODBatchNorm2d(p_s, *args, **kwargs)
num_features = kwargs["num_features"]
del kwargs["num_features"]
return ODBatchNorm2d(*args, p_s=p_s, num_features=num_features, **kwargs)

return nn.BatchNorm2d(*args, **kwargs)


class SequentialWithSampler(nn.Sequential):
"""Implements sequential model with sampler."""

def forward(self, x, sampler=None):
def forward(self, input, sampler=None): # pylint: disable=redefined-builtin
"""Forward method for custom Sequential.
:param x: input
Expand All @@ -61,13 +63,13 @@ def forward(self, x, sampler=None):
"""
if sampler is None:
for module in self:
x = module(x)
input = module(input)
else:
for module in self:
if hasattr(module, "od") and module.od:
x = module(x, sampler=sampler)
input = module(input, sampler=sampler)
elif hasattr(module, "is_od") and module.is_od:
x = module(x, p=sampler())
input = module(input, p=sampler())
else:
x = module(x)
return x
input = module(input)
return input
Loading

0 comments on commit 1f3ef97

Please sign in to comment.