8000 [primTorch] Implement NLL loss reference by rdspring1 · Pull Request #81128 · pytorch/pytorch · GitHub
[go: up one dir, main page]

Skip to content
Closed
Changes from 1 commit
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
2256d36
Initial nll_loss implementation
rdspring1 Jun 29, 2022
b1393e5
fixup
rdspring1 Jun 29, 2022
f72db25
Disable validate_view_consistency check
rdspring1 Jun 29, 2022
055e0e2
Merge 1d and 2d nll_loss functions
rdspring1 Jun 29, 2022
96cc303
Add target class check - disabled because of FakeTensor
rdspring1 Jun 29, 2022
370bc60
refactor helper function
rdspring1 Jul 8, 2022
612ce91
Merge branch 'master' of github.com:rdspring1/pytorch into ref_nll_loss
rdspring1 Sep 25, 2022
e7a3ae4
Merge branch 'master' of github.com:rdspring1/pytorch into ref_nll_loss
rdspring1 Sep 27, 2022
44702b8
Address comments - rnd 1
rdspring1 Sep 27, 2022
c71d746
fixup
rdspring1 Sep 27, 2022
e0554f2
Refactor class weight selection
rdspring1 Sep 28, 2022
6aa6b62
Add comments
rdspring1 Sep 28, 2022
dde53e3
Replace 4-D case for image inputs with general 3-D case
rdspring1 Sep 28, 2022
8000
4df9971
Merge branch 'master' of github.com:rdspring1/pytorch into ref_nll_loss
rdspring1 Sep 28, 2022
39883b6
add comments
rdspring1 Sep 28, 2022
1a635cd
Merge branch 'master' of github.com:rdspring1/pytorch into ref_nll_loss
rdspring1 Sep 28, 2022
590866b
Add class check
rdspring1 Sep 28, 2022
1b88f57
Add FakeTensor Issue
rdspring1 Sep 28, 2022
c59279e
add zero-dim check
rdspring1 Sep 28, 2022
e6d01e4
Merge branch 'master' of github.com:rdspring1/pytorch into ref_nll_loss
rdspring1 Sep 30, 2022
f2c9c3f
Update comments
rdspring1 Sep 30, 2022
10b85ff
fixup
rdspring1 Sep 30, 2022
96a6142
Merge branch 'master' of github.com:rdspring1/pytorch into ref_nll_loss
rdspring1 Oct 3, 2022
6cbdf01
lint
rdspring1 Oct 3, 2022
746a60e
Merge branch 'master' of github.com:rdspring1/pytorch into ref_nll_loss
rdspring1 Oct 11, 2022
e1eb641
Merge branch 'master' of github.com:rdspring1/pytorch into ref_nll_loss
rdspring1 Oct 16, 2022
ef5719e
PR comments
rdspring1 Oct 16, 2022
76bfc80
update test args
rdspring1 Oct 16, 2022
3cd82ab
add type promotion wrapper
rdspring1 Oct 17, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
refactor helper function
  • Loading branch information
rdspring1 committed Jul 8, 2022
commit 370bc6022aeebebbdab512dcaf5b731185ba5e03
37 changes: 20 additions & 17 deletions torch/_refs/nn/functional/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,6 +304,24 @@ def _check_reduction_value(reduction: str):
raise ValueError("{} is not a valid value for reduction".format(reduction))


# This helper function maps depreciated arguments, "size_average" and "reduce"
# to their corresponding "reduction" string argument
def _get_string_reduction_arg(
*, size_average: Optional[bool], reduce: Optional[bool]
) -> str:
if size_average is None:
size_average = True
if reduce is None:
reduce = True
if size_average and reduce:
ret = "mean"
elif reduce:
ret = "sum"
else:
ret = "none"
return ret


@register_decomposition(torch.ops.aten.margin_ranking_loss)
def margin_ranking_loss(
input1: TensorLikeType,
Expand Down Expand Up @@ -350,22 +368,6 @@ def hinge_embedding_loss(
return _apply_loss_reduction(loss, reduction)


def _get_string_reduction_arg(
size_average: Optional[bool], reduce: Optional[bool]
) -> str:
if size_average is None:
size_average = True
if reduce is None:
reduce = True
if size_average and reduce:
ret = "mean"
elif reduce:
ret = "sum"
else:
ret = "none"
return ret


def _nll_loss_nd(
input: TensorLikeType,
target: TensorLikeType,
Expand Down Expand Up @@ -437,6 +439,7 @@ def _nll_loss_nd(
return torch.sum(loss) / torch.sum(current_weight)


@register_decomposition(torch.ops.aten.nll_loss)
def nll_loss(
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

try wrapping with type promotion decorator

input: TensorLikeType,
target: TensorLikeType,
Expand All @@ -449,7 +452,7 @@ def nll_loss(
if size_average is not None or reduce is not None:
# TODO: raise exception instead of converting value
# msg = "size_average and reduce args are deprecated, please use reduction argument."
reduction = _get_string_reduction_arg(size_average, reduce)
reduction = _get_string_reduction_arg(size_average=size_average, reduce=reduce)

if input.ndim == 3 or input.ndim > 4:
# input ndim is == 3 or > 4
Expand Down
0