@@ -5,7 +5,7 @@ use crate::fmt;
5
5
use crate :: hash:: { Hash , Hasher } ;
6
6
use crate :: intrinsics;
7
7
use crate :: marker:: { Freeze , StructuralPartialEq } ;
8
- use crate :: ops:: { BitOr , BitOrAssign , Div , Neg , Rem } ;
8
+ use crate :: ops:: { BitOr , BitOrAssign , Div , DivAssign , Neg , Rem , RemAssign } ;
9
9
use crate :: panic:: { RefUnwindSafe , UnwindSafe } ;
10
10
use crate :: ptr;
11
11
use crate :: str:: FromStr ;
@@ -849,6 +849,16 @@ macro_rules! nonzero_integer_signedness_dependent_impls {
849
849
}
850
850
}
851
851
852
+ #[ stable( feature = "nonzero_div_assign" , since = "CURRENT_RUSTC_VERSION" ) ]
853
+ impl DivAssign <$Ty> for $Int {
854
+ /// This operation rounds towards zero,
855
+ /// truncating any fractional part of the exact result, and cannot panic.
856
+ #[ inline]
857
+ fn div_assign( & mut self , other: $Ty) {
858
+ * self = * self / other;
859
+ }
860
+ }
861
+
852
862
#[ stable( feature = "nonzero_div" , since = "1.51.0" ) ]
853
863
impl Rem <$Ty> for $Int {
854
864
type Output = $Int;
@@ -861,6 +871,15 @@ macro_rules! nonzero_integer_signedness_dependent_impls {
861
871
unsafe { intrinsics:: unchecked_rem( self , other. get( ) ) }
862
872
}
863
873
}
874
+
875
+ #[ stable( feature = "nonzero_div_assign" , since = "CURRENT_RUSTC_VERSION" ) ]
876
+ impl RemAssign <$Ty> for $Int {
877
+ /// This operation satisfies `n % d == n - (n / d) * d`, and cannot panic.
878
+ #[ inline]
879
+ fn rem_assign( & mut self , other: $Ty) {
880
+ * self = * self % other;
881
+ }
882
+ }
864
883
} ;
865
884
866
885
// Impls for signed nonzero types only.
0 commit comments