Skip to content

Commit

Permalink
Revert "Remove ReLU from first layer"
Browse files Browse the repository at this point in the history
This reverts commit 0db79b9.
  • Loading branch information
Eve-ning committed Feb 15, 2024
1 parent 0db79b9 commit cde52a5
Showing 1 changed file with 1 addition and 13 deletions.
14 changes: 1 addition & 13 deletions src/frdc/models/inceptionv3.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,24 +4,12 @@
from sklearn.preprocessing import OrdinalEncoder, StandardScaler
from torch import nn
from torchvision.models import Inception_V3_Weights, inception_v3
from torchvision.models.inception import Inception3
from torchvision.models.inception import BasicConv2d, Inception3

from frdc.train.mixmatch_module import MixMatchModule
from frdc.utils.ema import EMA


class BasicConv2d(nn.Module):
def __init__(self, in_channels: int, out_channels: int, **kwargs) -> None:
super().__init__()
self.conv = nn.Conv2d(in_channels, out_channels, bias=False, **kwargs)
self.bn = nn.BatchNorm2d(out_channels, eps=0.001)

def forward(self, x):
x = self.conv(x)
x = self.bn(x)
return x


class InceptionV3MixMatchModule(MixMatchModule):
INCEPTION_OUT_DIMS = 2048
INCEPTION_AUX_DIMS = 1000
Expand Down

0 comments on commit cde52a5

Please sign in to comment.