|
| 1 | +//! Test serialization and deserialization of `BigUint` and `BigInt` |
| 2 | +//! |
| 3 | +//! The serialized formats should not change, even if we change our |
| 4 | +//! internal representation, because we want to preserve forward and |
| 5 | +//! backward compatibility of serialized data! |
| 6 | +
|
| 7 | +#![cfg(feature = "serde")] |
| 8 | + |
| 9 | +extern crate num_bigint; |
| 10 | +extern crate num_traits; |
| 11 | +extern crate serde_test; |
| 12 | + |
| 13 | +use num_bigint::{BigInt, BigUint}; |
| 14 | +use num_traits::{One, Zero}; |
| 15 | +use serde_test::{assert_tokens, Token}; |
| 16 | + |
| 17 | +#[test] |
| 18 | +fn biguint_zero() { |
| 19 | + let tokens = [Token::Seq { len: Some(0) }, Token::SeqEnd]; |
| 20 | + assert_tokens(&BigUint::zero(), &tokens); |
| 21 | +} |
| 22 | + |
| 23 | +#[test] |
| 24 | +fn bigint_zero() { |
| 25 | + let tokens = [ |
| 26 | + Token::Tuple { len: 2 }, |
| 27 | + Token::I8(0), |
| 28 | + Token::Seq { len: Some(0) }, |
| 29 | + Token::SeqEnd, |
| 30 | + Token::TupleEnd, |
| 31 | + ]; |
| 32 | + assert_tokens(&BigInt::zero(), &tokens); |
| 33 | +} |
| 34 | + |
| 35 | +#[test] |
| 36 | +fn biguint_one() { |
| 37 | + let tokens = [Token::Seq { len: Some(1) }, Token::U32(1), Token::SeqEnd]; |
| 38 | + assert_tokens(&BigUint::one(), &tokens); |
| 39 | +} |
| 40 | + |
| 41 | +#[test] |
| 42 | +fn bigint_one() { |
| 43 | + let tokens = [ |
| 44 | + Token::Tuple { len: 2 }, |
| 45 | + Token::I8(1), |
| 46 | + Token::Seq { len: Some(1) }, |
| 47 | + Token::U32(1), |
| 48 | + Token::SeqEnd, |
| 49 | + Token::TupleEnd, |
| 50 | + ]; |
| 51 | + assert_tokens(&BigInt::one(), &tokens); |
| 52 | +} |
| 53 | + |
| 54 | +#[test] |
| 55 | +fn bigint_negone() { |
| 56 | + let tokens = [ |
| 57 | + Token::Tuple { len: 2 }, |
| 58 | + Token::I8(-1), |
| 59 | + Token::Seq { len: Some(1) }, |
| 60 | + Token::U32(1), |
| 61 | + Token::SeqEnd, |
| 62 | + Token::TupleEnd, |
| 63 | + ]; |
| 64 | + assert_tokens(&-BigInt::one(), &tokens); |
| 65 | +} |
| 66 | + |
| 67 | +// Generated independently from python `hex(factorial(100))` |
| 68 | +const FACTORIAL_100: &'static [u32] = &[ |
| 69 | + 0x00000000, 0x00000000, 0x00000000, 0x2735c61a, 0xee8b02ea, 0xb3b72ed2, 0x9420c6ec, 0x45570cca, |
| 70 | + 0xdf103917, 0x943a321c, 0xeb21b5b2, 0x66ef9a70, 0xa40d16e9, 0x28d54bbd, 0xdc240695, 0x964ec395, |
| 71 | + 0x1b30, |
| 72 | +]; |
| 73 | + |
| 74 | +#[test] |
| 75 | +fn biguint_factorial_100() { |
| 76 | + let n: BigUint = (1u8..101).product(); |
| 77 | + |
| 78 | + let mut tokens = vec![]; |
| 79 | + tokens.push(Token::Seq { |
| 80 | + len: Some(FACTORIAL_100.len()), |
| 81 | + }); |
| 82 | + tokens.extend(FACTORIAL_100.iter().map(|&u| Token::U32(u))); |
| 83 | + tokens.push(Token::SeqEnd); |
| 84 | + |
| 85 | + assert_tokens(&n, &tokens); |
| 86 | +} |
| 87 | + |
| 88 | +#[test] |
| 89 | +fn bigint_factorial_100() { |
| 90 | + let n: BigInt = (1i8..101).product(); |
| 91 | + |
| 92 | + let mut tokens = vec![]; |
| 93 | + tokens.push(Token::Tuple { len: 2 }); |
| 94 | + tokens.push(Token::I8(1)); |
| 95 | + tokens.push(Token::Seq { |
| 96 | + len: Some(FACTORIAL_100.len()), |
| 97 | + }); |
| 98 | + tokens.extend(FACTORIAL_100.iter().map(|&u| Token::U32(u))); |
| 99 | + tokens.push(Token::SeqEnd); |
| 100 | + tokens.push(Token::TupleEnd); |
| 101 | + |
| 102 | + assert_tokens(&n, &tokens); |
| 103 | +} |
0 commit comments