-
Notifications
You must be signed in to change notification settings - Fork 323
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Revision models.detection.yolo #851
Changes from 4 commits
cf1646c
fecf88c
b5abc8f
198ebc1
08f17f7
db2601a
fe38bb7
7da9d4a
8c3ed4e
8f69419
b25a864
9ff86ab
17fab64
353f119
a3445ac
189346c
a1d97b6
d5b5fb9
0b4eca4
b52ab5b
eb9930e
fdf38fb
a42cdec
d534cfa
57c9baf
55059f5
bf5b360
3ed65fd
bd23c27
41d2749
0d1c4e7
d758939
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,4 @@ | ||
from typing import Callable, Dict, List, Optional, Tuple | ||
from typing import Callable, Dict, List, Optional, Tuple, Union | ||
|
||
import torch | ||
from pytorch_lightning.utilities.exceptions import MisconfigurationException | ||
|
@@ -464,7 +464,7 @@ def _calculate_losses( | |
class Mish(nn.Module): | ||
"""Mish activation.""" | ||
|
||
def forward(self, x): | ||
def forward(self, x: Tensor) -> Tensor: | ||
return x * torch.tanh(nn.functional.softplus(x)) | ||
|
||
|
||
|
@@ -483,7 +483,7 @@ def __init__(self, source_layers: List[int], num_chunks: int, chunk_idx: int) -> | |
self.num_chunks = num_chunks | ||
self.chunk_idx = chunk_idx | ||
|
||
def forward(self, x, outputs): | ||
def forward(self, x, outputs: List[Union[Tensor, None]]) -> Tensor: | ||
chunks = [torch.chunk(outputs[layer], self.num_chunks, dim=1)[self.chunk_idx] for layer in self.source_layers] | ||
return torch.cat(chunks, dim=1) | ||
|
||
|
@@ -500,5 +500,5 @@ def __init__(self, source_layer: int) -> None: | |
super().__init__() | ||
self.source_layer = source_layer | ||
|
||
def forward(self, x, outputs): | ||
def forward(self, x, outputs: List[Union[Tensor, None]]) -> Tensor: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We could review why we are passing 'x' to the forward method and doing nothing with it. Seems to be just to keep with the format... There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @luca-medeiros you're right. RouteLayer and ShortcutLayer do not use |
||
return outputs[-1] + outputs[self.source_layer] |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Same with this 'x' here!