Skip to content

Commit

Permalink
added the dropout
Browse files Browse the repository at this point in the history
  • Loading branch information
aredier committed Nov 7, 2019
1 parent 93822ae commit 5ea72fc
Show file tree
Hide file tree
Showing 2 changed files with 129 additions and 0 deletions.
45 changes: 45 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
/target
**/*.rs.bk
Cargo.lock
.DS_Stores
.idea/
.cache/
.vscode/
*.pyc
__pycache__/
.python-version
/chariots.egg-info/
/.tox/
/.eggs/
.pytest_cache/
.ipynb_checkpoints/
.DS_Store
node_modules
/dist

# local env files
.env.local
.env.*.local

# Log files
npm-debug.log*
yarn-debug.log*
yarn-error.log*

# Editor directories and files
.idea
.vscode
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?
.editorconfig
yarn.lock

# doc
docs/_build/*

# notebooks
*.ipynb
!project_template/{{cookiecutter.project_name}}/notebooks/*.ipynb
84 changes: 84 additions & 0 deletions monte_carlo_dropout/resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,43 @@ def forward(self, x):

return out

class DropoutBlock(nn.Module):
"""
same as a basic block but adding dropout to it
"""

def __init__(self, basic_block: BasicBlock, dropout_rate: float = 0.):
super(DropoutBlock, self).__init__()
self.conv1 = basic_block.conv1
self.bn1 = basic_block.bn1
self.relu = basic_block.relu
self.drop1 = nn.Dropout2d(dropout_rate)
self.conv2 = basic_block.conv2
self.bn2 = basic_block.bn2
self.downsample = basic_block.downsample
self.stride = basic_block.stride
self.drop2 = nn.Dropout2d(dropout_rate)

def forward(self, x):
identity = x

out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.drop1(out)
out = self.conv2(out)
out = self.bn2(out)

if self.downsample is not None:
identity = self.downsample(x)

out += identity
out = self.relu(out)
out = self.drop2(out)

return out



class Bottleneck(nn.Module):
expansion = 4
Expand Down Expand Up @@ -215,6 +252,53 @@ def _forward(self, x):
forward = _forward


class DropoutResnet(nn.Module):
"""adds dropout to an existing resnet"""

def __init__(self, source_resnet: ResNet):

super(DropoutResnet, self).__init__()
self._norm_layer = source_resnet._norm_layer

self.inplanes = source_resnet.inplanes
self.dilation = source_resnet.dilation
self.groups = source_resnet.groups
self.base_width = source_resnet.base_width
self.conv1 = source_resnet.conv1
self.bn1 = source_resnet.bn1
self.relu = source_resnet.relu
self.maxpool = source_resnet.relu
self.layer1 = self._make_layer(source_resnet.layer1)
self.layer2 = self._make_layer(source_resnet.layer2)
self.layer3 = self._make_layer(source_resnet.layer3)
self.layer4 = self._make_layer(source_resnet.layer4)
self.avgpool = source_resnet.avgpool
self.fc = source_resnet.fc

def _make_layer(self, source_layer: nn.Sequential):
return nn.Sequential(*[DropoutBlock(block) for block in source_layer.children()])

def _forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)

x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)

x = self.avgpool(x)
x = torch.flatten(x, 1)
x = self.fc(x)

return x

# Allow for accessing forward method in a inherited class
forward = _forward


def _resnet(arch, block, layers, pretrained, progress, **kwargs):
model = ResNet(block, layers, **kwargs)
if pretrained:
Expand Down

0 comments on commit 5ea72fc

Please sign in to comment.