@@ -1095,6 +1095,51 @@ func.func @test_not_2d(%arg0: !torch.vtensor<[3,4],i1>) -> !torch.vtensor<[3,4],
1095
1095
1096
1096
// -----
1097
1097
1098
+ // CHECK-LABEL: func.func @test_nllloss_ii
1099
+ func.func @test_nllloss_ii (%arg0: !torch.vtensor <[3 ,5 ,2 ],f32 >, %arg1: !torch.vtensor <[3 ,2 ],si64 >) -> !torch.vtensor <[],f32 > attributes {torch.onnx_meta.ir_version = 7 : si64 , torch.onnx_meta.opset_version = 13 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
1100
+ // CHECK: %[[VAL_3:.*]] = torch.constant.none
1101
+ // CHECK: %[[VAL_4:.*]] = torch.constant.int 1
1102
+ // CHECK: %[[VAL_5:.*]] = torch.constant.int 1
1103
+ // CHECK: %[[VAL_6:.*]], %[[VAL_7:.*]] = torch.aten.nll_loss_forward %arg0, %arg1, %[[VAL_3]], %[[VAL_5]], %[[VAL_4]] : !torch.vtensor<[3,5,2],f32>, !torch.vtensor<[3,2],si64>, !torch.none, !torch.int, !torch.int -> !torch.vtensor<[],f32>, !torch.vtensor<[],f32>
1104
+ // CHECK: return %[[VAL_6]] : !torch.vtensor<[],f32>
1105
+ %0 = torch.operator " onnx.NegativeLogLikelihoodLoss" (%arg0 , %arg1 ) {torch.onnx.ignore_index = 1 : si64 , torch.onnx.reduction = " mean" } : (!torch.vtensor <[3 ,5 ,2 ],f32 >, !torch.vtensor <[3 ,2 ],si64 >) -> !torch.vtensor <[],f32 >
1106
+ return %0 : !torch.vtensor <[],f32 >
1107
+ }
1108
+
1109
+ // CHECK-LABEL: func.func @test_nllloss_ii_ignore_default
1110
+ func.func @test_nllloss_ii_ignore_default (%arg0: !torch.vtensor <[3 ,5 ,2 ],f32 >, %arg1: !torch.vtensor <[3 ,2 ],si64 >) -> !torch.vtensor <[],f32 > attributes {torch.onnx_meta.ir_version = 7 : si64 , torch.onnx_meta.opset_version = 13 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
1111
+ // CHECK: %[[VAL_3:.*]] = torch.constant.none
1112
+ // CHECK: %[[VAL_4:.*]] = torch.constant.int -100
1113
+ // CHECK: %[[VAL_5:.*]] = torch.constant.int 1
1114
+ // CHECK: %[[VAL_6:.*]], %[[VAL_7:.*]] = torch.aten.nll_loss_forward %arg0, %arg1, %[[VAL_3]], %[[VAL_5]], %[[VAL_4]] : !torch.vtensor<[3,5,2],f32>, !torch.vtensor<[3,2],si64>, !torch.none, !torch.int, !torch.int -> !torch.vtensor<[],f32>, !torch.vtensor<[],f32>
1115
+ // CHECK: return %[[VAL_6]] : !torch.vtensor<[],f32>
1116
+ %0 = torch.operator " onnx.NegativeLogLikelihoodLoss" (%arg0 , %arg1 ) {torch.onnx.reduction = " mean" } : (!torch.vtensor <[3 ,5 ,2 ],f32 >, !torch.vtensor <[3 ,2 ],si64 >) -> !torch.vtensor <[],f32 >
1117
+ return %0 : !torch.vtensor <[],f32 >
1118
+ }
1119
+
1120
+ // CHECK-LABEL: func.func @test_nllloss_ii_reduction_sum
1121
+ func.func @test_nllloss_ii_reduction_sum (%arg0: !torch.vtensor <[3 ,5 ,6 ,6 ],f32 >, %arg1: !torch.vtensor <[3 ,6 ,6 ],si64 >) -> !torch.vtensor <[],f32 > attributes {torch.onnx_meta.ir_version = 7 : si64 , torch.onnx_meta.opset_version = 13 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
1122
+ // CHECK: %[[VAL_3:.*]] = torch.constant.none
1123
+ // CHECK: %[[VAL_4:.*]] = torch.constant.int -100
1124
+ // CHECK: %[[VAL_5:.*]] = torch.constant.int 2
1125
+ // CHECK: %[[VAL_6:.*]], %[[VAL_7:.*]] = torch.aten.nll_loss_forward %arg0, %arg1, %[[VAL_3]], %[[VAL_5]], %[[VAL_4]] : !torch.vtensor<[3,5,6,6],f32>, !torch.vtensor<[3,6,6],si64>, !torch.none, !torch.int, !torch.int -> !torch.vtensor<[],f32>, !torch.vtensor<[],f32>
1126
+ // CHECK: return %[[VAL_6]] : !torch.vtensor<[],f32>
1127
+ %0 = torch.operator " onnx.NegativeLogLikelihoodLoss" (%arg0 , %arg1 ) {torch.onnx.reduction = " sum" } : (!torch.vtensor <[3 ,5 ,6 ,6 ],f32 >, !torch.vtensor <[3 ,6 ,6 ],si64 >) -> !torch.vtensor <[],f32 >
1128
+ return %0 : !torch.vtensor <[],f32 >
1129
+ }
1130
+
1131
+ // CHECK-LABEL: func.func @test_nllloss_iii_reduction_none_ignore_negative
1132
+ func.func @test_nllloss_iii_reduction_none_ignore_negative (%arg0: !torch.vtensor <[3 ,5 ,6 ],f32 >, %arg1: !torch.vtensor <[3 ,6 ],si64 >, %arg2: !torch.vtensor <[5 ],f32 >) -> !torch.vtensor <[],f32 > attributes {torch.onnx_meta.ir_version = 7 : si64 , torch.onnx_meta.opset_version = 13 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
1133
+ // CHECK: %[[VAL_4:.*]] = torch.constant.int -1
1134
+ // CHECK: %[[VAL_5:.*]] = torch.constant.int 0
1135
+ // CHECK: %[[VAL_6:.*]], %[[VAL_7:.*]] = torch.aten.nll_loss_forward %arg0, %arg1, %arg2, %[[VAL_5]], %[[VAL_4]] : !torch.vtensor<[3,5,6],f32>, !torch.vtensor<[3,6],si64>, !torch.vtensor<[5],f32>, !torch.int, !torch.int -> !torch.vtensor<[],f32>, !torch.vtensor<[],f32>
1136
+ // CHECK: return %[[VAL_6]] : !torch.vtensor<[],f32>
1137
+ %0 = torch.operator " onnx.NegativeLogLikelihoodLoss" (%arg0 , %arg1 , %arg2 ) {torch.onnx.ignore_index = -1 : si64 , torch.onnx.reduction = " none" } : (!torch.vtensor <[3 ,5 ,6 ],f32 >, !torch.vtensor <[3 ,6 ],si64 >, !torch.vtensor <[5 ],f32 >) -> !torch.vtensor <[],f32 >
1138
+ return %0 : !torch.vtensor <[],f32 >
1139
+ }
1140
+
1141
+ // -----
1142
+
1098
1143
// CHECK-LABEL: func.func @test_nonzero
1099
1144
func.func @test_nonzero (%arg0: !torch.vtensor <[3 ,4 ,5 ],f32 >) -> !torch.vtensor <[3 ,4 ,5 ],si64 > attributes {torch.onnx_meta.ir_version = 7 : si64 , torch.onnx_meta.opset_version = 13 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
1100
1145
// CHECK: torch.aten.nonzero %arg0 : !torch.vtensor<[3,4,5],f32> -> !torch.vtensor<[3,4,5],si64>
0 commit comments