Skip to content

Commit 86de320

Browse files
bors[bot]cuviper
andcommitted
Merge #39
39: Make it explicit that serialization is in u32 r=cuviper a=cuviper Add explicit type annotations to make sure we serialize `BigUint` like a `Vec<u32>`. If we ever change the internal representation, we still need to remain compatible with serialized data. Also, deserializing `BigUint` and `BigInt` now uses their normal constructor methods, to make sure they are normalized. We shouldn't assume that all incoming data is already well-formed. Co-authored-by: Josh Stone <[email protected]>
2 parents e22271c + 087c4ec commit 86de320

File tree

4 files changed

+118
-7
lines changed

4 files changed

+118
-7
lines changed

Cargo.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,9 @@ version = "0.4"
4141
optional = true
4242
version = "1.0"
4343

44+
[dev-dependencies]
45+
serde_test = "1.0"
46+
4447
[dev-dependencies.rand]
4548
version = "0.4"
4649

src/bigint.rs

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,8 @@ impl serde::Serialize for Sign {
7373
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
7474
where S: serde::Serializer
7575
{
76+
// Note: do not change the serialization format, or it may break
77+
// forward and backward compatibility of serialized data!
7678
match *self {
7779
Sign::Minus => (-1i8).serialize(serializer),
7880
Sign::NoSign => 0i8.serialize(serializer),
@@ -1701,6 +1703,8 @@ impl serde::Serialize for BigInt {
17011703
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
17021704
where S: serde::Serializer
17031705
{
1706+
// Note: do not change the serialization format, or it may break
1707+
// forward and backward compatibility of serialized data!
17041708
(self.sign, &self.data).serialize(serializer)
17051709
}
17061710
}
@@ -1711,10 +1715,7 @@ impl<'de> serde::Deserialize<'de> for BigInt {
17111715
where D: serde::Deserializer<'de>
17121716
{
17131717
let (sign, data) = serde::Deserialize::deserialize(deserializer)?;
1714-
Ok(BigInt {
1715-
sign: sign,
1716-
data: data,
1717-
})
1718+
Ok(BigInt::from_biguint(sign, data))
17181719
}
17191720
}
17201721

src/biguint.rs

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1745,7 +1745,11 @@ impl serde::Serialize for BigUint {
17451745
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
17461746
where S: serde::Serializer
17471747
{
1748-
self.data.serialize(serializer)
1748+
// Note: do not change the serialization format, or it may break forward
1749+
// and backward compatibility of serialized data! If we ever change the
1750+
// internal representation, we should still serialize in base-`u32`.
1751+
let data: &Vec<u32> = &self.data;
1752+
data.serialize(serializer)
17491753
}
17501754
}
17511755

@@ -1754,8 +1758,8 @@ impl<'de> serde::Deserialize<'de> for BigUint {
17541758
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
17551759
where D: serde::Deserializer<'de>
17561760
{
1757-
let data = try!(Vec::deserialize(deserializer));
1758-
Ok(BigUint { data: data })
1761+
let data: Vec<u32> = try!(Vec::deserialize(deserializer));
1762+
Ok(BigUint::new(data))
17591763
}
17601764
}
17611765

tests/serde.rs

Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,103 @@
1+
//! Test serialization and deserialization of `BigUint` and `BigInt`
2+
//!
3+
//! The serialized formats should not change, even if we change our
4+
//! internal representation, because we want to preserve forward and
5+
//! backward compatibility of serialized data!
6+
7+
#![cfg(feature = "serde")]
8+
9+
extern crate num_bigint;
10+
extern crate num_traits;
11+
extern crate serde_test;
12+
13+
use num_bigint::{BigInt, BigUint};
14+
use num_traits::{One, Zero};
15+
use serde_test::{assert_tokens, Token};
16+
17+
#[test]
18+
fn biguint_zero() {
19+
let tokens = [Token::Seq { len: Some(0) }, Token::SeqEnd];
20+
assert_tokens(&BigUint::zero(), &tokens);
21+
}
22+
23+
#[test]
24+
fn bigint_zero() {
25+
let tokens = [
26+
Token::Tuple { len: 2 },
27+
Token::I8(0),
28+
Token::Seq { len: Some(0) },
29+
Token::SeqEnd,
30+
Token::TupleEnd,
31+
];
32+
assert_tokens(&BigInt::zero(), &tokens);
33+
}
34+
35+
#[test]
36+
fn biguint_one() {
37+
let tokens = [Token::Seq { len: Some(1) }, Token::U32(1), Token::SeqEnd];
38+
assert_tokens(&BigUint::one(), &tokens);
39+
}
40+
41+
#[test]
42+
fn bigint_one() {
43+
let tokens = [
44+
Token::Tuple { len: 2 },
45+
Token::I8(1),
46+
Token::Seq { len: Some(1) },
47+
Token::U32(1),
48+
Token::SeqEnd,
49+
Token::TupleEnd,
50+
];
51+
assert_tokens(&BigInt::one(), &tokens);
52+
}
53+
54+
#[test]
55+
fn bigint_negone() {
56+
let tokens = [
57+
Token::Tuple { len: 2 },
58+
Token::I8(-1),
59+
Token::Seq { len: Some(1) },
60+
Token::U32(1),
61+
Token::SeqEnd,
62+
Token::TupleEnd,
63+
];
64+
assert_tokens(&-BigInt::one(), &tokens);
65+
}
66+
67+
// Generated independently from python `hex(factorial(100))`
68+
const FACTORIAL_100: &'static [u32] = &[
69+
0x00000000, 0x00000000, 0x00000000, 0x2735c61a, 0xee8b02ea, 0xb3b72ed2, 0x9420c6ec, 0x45570cca,
70+
0xdf103917, 0x943a321c, 0xeb21b5b2, 0x66ef9a70, 0xa40d16e9, 0x28d54bbd, 0xdc240695, 0x964ec395,
71+
0x1b30,
72+
];
73+
74+
#[test]
75+
fn biguint_factorial_100() {
76+
let n: BigUint = (1u8..101).product();
77+
78+
let mut tokens = vec![];
79+
tokens.push(Token::Seq {
80+
len: Some(FACTORIAL_100.len()),
81+
});
82+
tokens.extend(FACTORIAL_100.iter().map(|&u| Token::U32(u)));
83+
tokens.push(Token::SeqEnd);
84+
85+
assert_tokens(&n, &tokens);
86+
}
87+
88+
#[test]
89+
fn bigint_factorial_100() {
90+
let n: BigInt = (1i8..101).product();
91+
92+
let mut tokens = vec![];
93+
tokens.push(Token::Tuple { len: 2 });
94+
tokens.push(Token::I8(1));
95+
tokens.push(Token::Seq {
96+
len: Some(FACTORIAL_100.len()),
97+
});
98+
tokens.extend(FACTORIAL_100.iter().map(|&u| Token::U32(u)));
99+
tokens.push(Token::SeqEnd);
100+
tokens.push(Token::TupleEnd);
101+
102+
assert_tokens(&n, &tokens);
103+
}

0 commit comments

Comments
 (0)